diff --git a/Cargo.lock b/Cargo.lock index 94e695f1b24..921bd1d5b87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -856,9 +856,8 @@ dependencies = [ [[package]] name = "concread" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4539869aeea73afd414cc1750eceada0d042764f2d28873d74fbbd81610bffe" +version = "0.5.2" +source = "git+https://github.com/Erigara/concread.git?rev=bd80db2e85ed179f41aeabc6629e4eb3ec987ed9#bd80db2e85ed179f41aeabc6629e4eb3ec987ed9" dependencies = [ "ahash 0.8.11", "arc-swap", @@ -1600,6 +1599,7 @@ name = "executor_custom_data_model" version = "2.0.0-pre-rc.21" dependencies = [ "iroha_data_model", + "iroha_executor", "iroha_schema", "serde", "serde_json", @@ -3152,6 +3152,7 @@ dependencies = [ "iroha_data_model", "iroha_schema", "once_cell", + "parity-scale-codec", "serde", "serde_json", "test_samples", @@ -5440,7 +5441,7 @@ dependencies = [ [[package]] name = "storage" version = "0.1.0" -source = "git+https://github.com/Erigara/storage.git?rev=6bd9fdd95220da7626471d190b17b2f5b8815c47#6bd9fdd95220da7626471d190b17b2f5b8815c47" +source = "git+https://github.com/Erigara/storage.git?rev=cf82588d20494a1c1613ea2f4faa1e66bd827b5c#cf82588d20494a1c1613ea2f4faa1e66bd827b5c" dependencies = [ "concread", "serde", diff --git a/Cargo.toml b/Cargo.toml index 7ce64f1966f..4232dd4d1ec 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,7 +134,7 @@ parity-scale-codec = { version = "3.6.12", default-features = false } json5 = "0.4.1" toml = "0.8.14" -storage = { git = "https://github.com/Erigara/storage.git", rev = "6bd9fdd95220da7626471d190b17b2f5b8815c47" } +storage = { git = "https://github.com/Erigara/storage.git", rev = "cf82588d20494a1c1613ea2f4faa1e66bd827b5c" } [workspace.lints] rustdoc.private_doc_tests = "deny" diff --git a/cli/README.md b/cli/README.md index ee2f1d4582e..f94d8476938 100644 --- a/cli/README.md +++ b/cli/README.md @@ -103,18 +103,16 @@ You may deploy Iroha as a [native binary](#native-binary) or by using [Docker](# 3. Start an Iroha peer. - You can do this either with `--genesis` parameter to specify `genesis.json` location or without. Pay attention that for multi-peer setup only one peer should be started with `--genesis` parameter. - ```bash cd deploy - ./irohad --submit-genesis + ./irohad ``` ### Docker We provide a sample configuration for Docker in [`docker-compose.yml`](../configs/swarm/docker-compose.yml). We highly recommend that you adjust the `config.json` to include a set of new key pairs. -[Generate the keys](#generating-keys) and put them into `services.*.environment` in `docker-compose.yml`. Don't forget to update the public keys of `TRUSTED_PEERS`. +[Generate the keys](#generating-keys) and put them into `services.*.environment` in `docker-compose.yml`. Don't forget to update the public keys of `SUMERAGI_TRUSTED_PEERS`. - Build images: diff --git a/cli/src/lib.rs b/cli/src/lib.rs index b84c0ebbea8..ded6b11fbe0 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -294,8 +294,7 @@ impl Iroha { None } }.unwrap_or_else(|| { - State::from_config( - config.chain_wide, + State::new( world, Arc::clone(&kura), live_query_store_handle.clone(), @@ -614,7 +613,7 @@ pub fn read_config_and_genesis( None }; - validate_config(&config, args.submit_genesis)?; + validate_config(&config)?; let logger_config = LoggerInitConfig::new(config.logger, args.terminal_colors); @@ -628,7 +627,7 @@ fn read_genesis(path: &Path) -> Result { Ok(GenesisBlock(genesis)) } -fn validate_config(config: &Config, submit_genesis: bool) -> Result<(), ConfigError> { +fn validate_config(config: &Config) -> Result<(), ConfigError> { let mut emitter = Emitter::new(); // These cause race condition in tests, due to them actually binding TCP listeners @@ -643,13 +642,12 @@ fn validate_config(config: &Config, submit_genesis: bool) -> Result<(), ConfigEr // maybe validate only if snapshot mode is enabled validate_directory_path(&mut emitter, &config.snapshot.store_dir); - if !submit_genesis && !config.sumeragi.contains_other_trusted_peers() { + if config.genesis.signed_file.is_none() && !config.sumeragi.contains_other_trusted_peers() { emitter.emit(Report::new(ConfigError::LonePeer).attach_printable("\ Reason: the network consists from this one peer only (no `sumeragi.trusted_peers` provided).\n\ - Since `--submit-genesis` is not set, there is no way to receive the genesis block.\n\ - Either provide the genesis by setting `--submit-genesis` argument\n\ - and `genesis.signed_file` configuration parameter, or increase the number of trusted peers in\n\ - the network using `sumeragi.trusted_peers` configuration parameter.\ + Since `genesis.signed_file` is not set, there is no way to receive the genesis block.\n\ + Either provide the genesis by setting `genesis.signed_file` configuration parameter,\n\ + or increase the number of trusted peers in the network using `sumeragi.trusted_peers` configuration parameter.\ ").attach_printable(config.sumeragi.trusted_peers.clone().into_attachment().display_as_debug())); } @@ -771,19 +769,6 @@ pub struct Args { num_args(0..=1), )] pub terminal_colors: bool, - /// Whether the current peer should submit the genesis block or not - /// - /// Only one peer in the network should submit the genesis block. - /// - /// This argument must be set alongside with `genesis.signed_file` configuration option. - /// If not, Iroha will exit with an error. - /// - /// In case when the network consists only of this one peer, i.e. the amount of trusted - /// peers in the configuration (`sumeragi.trusted_peers`) is less than 2, this peer must - /// submit the genesis, since there are no other peers who can provide it. In this case, Iroha - /// will exit with an error if `--submit-genesis` is not set. - #[arg(long)] - pub submit_genesis: bool, } #[cfg(test)] @@ -879,7 +864,6 @@ mod tests { let (config, _logger, genesis) = read_config_and_genesis(&Args { config: Some(config_path), - submit_genesis: true, terminal_colors: false, trace_config: false, }) @@ -920,20 +904,11 @@ mod tests { // Given let genesis_key_pair = KeyPair::random(); - let genesis = GenesisBuilder::default().build_and_sign( - dummy_executor(), - ChainId::from("00000000-0000-0000-0000-000000000000"), - &genesis_key_pair, - vec![], - ); - let mut config = config_factory(genesis_key_pair.public_key()); - iroha_config::base::toml::Writer::new(&mut config) - .write(["genesis", "signed_file"], "./genesis.signed.scale"); + iroha_config::base::toml::Writer::new(&mut config); let dir = tempfile::tempdir()?; std::fs::write(dir.path().join("config.toml"), toml::to_string(&config)?)?; - std::fs::write(dir.path().join("genesis.signed.scale"), genesis.0.encode())?; std::fs::write(dir.path().join("executor.wasm"), "")?; let config_path = dir.path().join("config.toml"); @@ -941,7 +916,6 @@ mod tests { let report = read_config_and_genesis(&Args { config: Some(config_path), - submit_genesis: false, terminal_colors: false, trace_config: false, }) @@ -962,7 +936,6 @@ mod tests { let args = Args::try_parse_from(["test"]).unwrap(); assert_eq!(args.terminal_colors, is_coloring_supported()); - assert_eq!(args.submit_genesis, false); } #[test] diff --git a/cli/src/samples.rs b/cli/src/samples.rs index 5aaa54701a5..b03d96279bd 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -67,9 +67,8 @@ pub fn get_config_toml( .write(["sumeragi", "trusted_peers"], peers) .write(["network", "address"], DEFAULT_P2P_ADDR) .write(["network", "block_gossip_period_ms"], 500) - .write(["network", "block_gossip_max_size"], 1) + .write(["network", "block_gossip_size"], 1) .write(["torii", "address"], DEFAULT_TORII_ADDR) - .write(["chain_wide", "max_transactions_in_block"], 2) .write(["genesis", "public_key"], genesis_public_key) .write( ["genesis", "signed_file"], diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index c2d3c2e4875..cca409724ae 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -6,7 +6,7 @@ use iroha::{ crypto::KeyPair, data_model::{ events::pipeline::{BlockEventFilter, BlockStatus}, - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + parameter::BlockParameter, prelude::*, }, }; @@ -22,7 +22,7 @@ pub struct Config { pub peers: u32, /// Interval in microseconds between transactions to reduce load pub interval_us_per_tx: u64, - pub max_txs_per_block: u32, + pub block_limits: BlockParameter, pub blocks: u32, pub sample_size: u32, pub genesis_max_retries: u32, @@ -33,11 +33,7 @@ impl fmt::Display for Config { write!( f, "{}peers-{}interval_µs-{}max_txs-{}blocks-{}samples", - self.peers, - self.interval_us_per_tx, - self.max_txs_per_block, - self.blocks, - self.sample_size, + self.peers, self.interval_us_per_tx, self.block_limits, self.blocks, self.sample_size, ) } } @@ -55,11 +51,7 @@ impl Config { let clients = network.clients(); wait_for_genesis_committed_with_max_retries(&clients, 0, self.genesis_max_retries); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, self.max_txs_per_block)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block(self.block_limits)))?; let unit_names = (UnitName::MIN..).take(self.peers as usize); let units = clients @@ -110,7 +102,7 @@ impl Config { let blocks_out_of_measure = 2 + MeasurerUnit::PREPARATION_BLOCKS_NUMBER * self.peers; let state_view = network - .genesis + .first_peer .irohad .as_ref() .expect("Must be some") diff --git a/client/examples/million_accounts_genesis.rs b/client/examples/million_accounts_genesis.rs index d547aff6781..d8033ec6392 100644 --- a/client/examples/million_accounts_genesis.rs +++ b/client/examples/million_accounts_genesis.rs @@ -29,7 +29,7 @@ fn generate_genesis( .account(signatory_alice.clone()) .asset( format!("xor-{i}").parse().expect("Valid"), - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), ) .finish_domain(); } diff --git a/client/examples/register_1000_triggers.rs b/client/examples/register_1000_triggers.rs index 567dd9d3317..a7f31bd2962 100644 --- a/client/examples/register_1000_triggers.rs +++ b/client/examples/register_1000_triggers.rs @@ -1,10 +1,14 @@ //! Example of registering multiple triggers //! Used to show Iroha's trigger deduplication capabilities +use std::num::NonZeroU64; + use iroha::{ client::Client, + crypto::KeyPair, data_model::{prelude::*, trigger::TriggerId}, }; +use iroha_data_model::parameter::{Parameter, SmartContractParameter}; use iroha_genesis::{GenesisBlock, GenesisBuilder}; use iroha_primitives::unique_vec; use irohad::samples::{construct_executor, get_config}; @@ -18,17 +22,24 @@ use tokio::runtime::Runtime; fn generate_genesis( num_triggers: u32, chain_id: ChainId, - genesis_key_pair: &iroha_crypto::KeyPair, + genesis_key_pair: &KeyPair, topology: Vec, ) -> Result> { - let builder = GenesisBuilder::default(); + let builder = GenesisBuilder::default() + .append_instruction(SetParameter::new(Parameter::Executor( + SmartContractParameter::Fuel(NonZeroU64::MAX), + ))) + .append_instruction(SetParameter::new(Parameter::Executor( + SmartContractParameter::Memory(NonZeroU64::MAX), + ))); - let wasm = - iroha_wasm_builder::Builder::new("tests/integration/smartcontracts/mint_rose_trigger") - .show_output() - .build()? - .optimize()? - .into_bytes()?; + let wasm = iroha_wasm_builder::Builder::new( + "client/tests/integration/smartcontracts/mint_rose_trigger", + ) + .show_output() + .build()? + .optimize()? + .into_bytes()?; let wasm = WasmSmartContract::from_compiled(wasm); let (account_id, _account_keypair) = gen_account_in("wonderland"); @@ -54,7 +65,7 @@ fn generate_genesis( }) .fold(builder, GenesisBuilder::append_instruction); - let executor = construct_executor("../default_executor").expect("Failed to construct executor"); + let executor = construct_executor("default_executor").expect("Failed to construct executor"); Ok(builder.build_and_sign(executor, chain_id, genesis_key_pair, topology)) } @@ -64,17 +75,13 @@ fn main() -> Result<(), Box> { let chain_id = get_chain_id(); let genesis_key_pair = get_key_pair(test_network::Signatory::Genesis); let topology = vec![peer.id.clone()]; - let mut configuration = get_config( + let configuration = get_config( unique_vec![peer.id.clone()], chain_id.clone(), get_key_pair(test_network::Signatory::Peer), genesis_key_pair.public_key(), ); - // Increase executor limits for large genesis - configuration.chain_wide.executor_runtime.fuel_limit = u64::MAX; - configuration.chain_wide.executor_runtime.max_memory = u32::MAX.into(); - let genesis = generate_genesis(1_000_u32, chain_id, &genesis_key_pair, topology)?; let builder = PeerBuilder::new() diff --git a/client/examples/tutorial.rs b/client/examples/tutorial.rs index 1589b8d78ad..4718137ab0c 100644 --- a/client/examples/tutorial.rs +++ b/client/examples/tutorial.rs @@ -34,7 +34,7 @@ fn domain_registration_test(config: Config) -> Result<(), Error> { use iroha::{ client::Client, data_model::{ - metadata::UnlimitedMetadata, + metadata::Metadata, prelude::{Domain, DomainId, InstructionBox, Register}, }, }; @@ -57,7 +57,7 @@ fn domain_registration_test(config: Config) -> Result<(), Error> { // #region domain_register_example_prepare_tx // Prepare a transaction - let metadata = UnlimitedMetadata::default(); + let metadata = Metadata::default(); let instructions: Vec = vec![create_looking_glass.into()]; let tx = iroha.build_transaction(instructions, metadata); // #endregion domain_register_example_prepare_tx @@ -101,7 +101,7 @@ fn account_registration_test(config: Config) -> Result<(), Error> { client::Client, crypto::KeyPair, data_model::{ - metadata::UnlimitedMetadata, + metadata::Metadata, prelude::{Account, AccountId, InstructionBox, Register}, }, }; @@ -127,7 +127,7 @@ fn account_registration_test(config: Config) -> Result<(), Error> { // #region register_account_prepare_tx // Prepare a transaction using the // Account's RegisterBox - let metadata = UnlimitedMetadata::new(); + let metadata = Metadata::default(); let instructions: Vec = vec![create_account.into()]; let tx = iroha.build_transaction(instructions, metadata); // #endregion register_account_prepare_tx diff --git a/client/src/client.rs b/client/src/client.rs index c97eda480e9..624f454d82c 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -16,6 +16,7 @@ use futures_util::StreamExt; use http_default::{AsyncWebSocketStream, WebSocketStream}; pub use iroha_config::client_api::ConfigDTO; use iroha_logger::prelude::*; +use iroha_primitives::json; use iroha_telemetry::metrics::Status; use iroha_torii_const::uri as torii_uri; use iroha_version::prelude::*; @@ -326,11 +327,12 @@ impl_query_output! { crate::data_model::account::Account, crate::data_model::domain::Domain, crate::data_model::block::BlockHeader, - crate::data_model::metadata::MetadataValueBox, + json::JsonString, crate::data_model::query::TransactionQueryOutput, crate::data_model::executor::ExecutorDataModel, crate::data_model::trigger::Trigger, crate::data_model::prelude::Numeric, + crate::data_model::parameter::Parameters, } /// Iroha client @@ -452,7 +454,7 @@ impl Client { pub fn build_transaction( &self, instructions: impl Into, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> SignedTransaction { let tx_builder = TransactionBuilder::new(self.chain.clone(), self.account.clone()); @@ -509,7 +511,7 @@ impl Client { &self, instructions: impl IntoIterator, ) -> Result> { - self.submit_all_with_metadata(instructions, UnlimitedMetadata::new()) + self.submit_all_with_metadata(instructions, Metadata::default()) } /// Instructions API entry point. Submits one Iroha Special Instruction to `Iroha` peers. @@ -521,7 +523,7 @@ impl Client { pub fn submit_with_metadata( &self, instruction: impl Instruction, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_all_with_metadata([instruction], metadata) } @@ -535,7 +537,7 @@ impl Client { pub fn submit_all_with_metadata( &self, instructions: impl IntoIterator, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_transaction(&self.build_transaction(instructions, metadata)) } @@ -718,7 +720,7 @@ impl Client { &self, instructions: impl IntoIterator, ) -> Result> { - self.submit_all_blocking_with_metadata(instructions, UnlimitedMetadata::new()) + self.submit_all_blocking_with_metadata(instructions, Metadata::default()) } /// Submits and waits until the transaction is either rejected or committed. @@ -730,7 +732,7 @@ impl Client { pub fn submit_blocking_with_metadata( &self, instruction: impl Instruction, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { self.submit_all_blocking_with_metadata(vec![instruction.into()], metadata) } @@ -744,7 +746,7 @@ impl Client { pub fn submit_all_blocking_with_metadata( &self, instructions: impl IntoIterator, - metadata: UnlimitedMetadata, + metadata: Metadata, ) -> Result> { let transaction = self.build_transaction(instructions, metadata); self.submit_transaction_blocking(&transaction) @@ -1620,7 +1622,7 @@ mod tests { }); let build_transaction = - || client.build_transaction(Vec::::new(), UnlimitedMetadata::new()); + || client.build_transaction(Vec::::new(), Metadata::default()); let tx1 = build_transaction(); let tx2 = build_transaction(); assert_ne!(tx1.hash(), tx2.hash()); diff --git a/client/src/config/user.rs b/client/src/config/user.rs index 000ab2a2dd8..71bf826d4d3 100644 --- a/client/src/config/user.rs +++ b/client/src/config/user.rs @@ -6,11 +6,13 @@ use iroha_config_base::{ util::{DurationMs, Emitter, EmitterResultExt}, ReadConfig, WithOrigin, }; -use iroha_crypto::{KeyPair, PrivateKey, PublicKey}; -use iroha_data_model::prelude::{AccountId, ChainId, DomainId}; use url::Url; -use crate::config::BasicAuth; +use crate::{ + config::BasicAuth, + crypto::{KeyPair, PrivateKey, PublicKey}, + data_model::prelude::{AccountId, ChainId, DomainId}, +}; /// Root of the user configuration #[derive(Clone, Debug, ReadConfig)] diff --git a/client/tests/integration/add_domain.rs b/client/tests/integration/add_domain.rs deleted file mode 100644 index 514e18b85d6..00000000000 --- a/client/tests/integration/add_domain.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::thread; - -use eyre::Result; -use iroha::{client, data_model::prelude::*}; -use iroha_config::parameters::actual::Root as Config; -use test_network::*; - -#[test] -// This test suite is also covered at the UI level in the iroha_cli tests -// in test_register_domains.py -fn client_add_domain_with_name_length_more_than_limit_should_not_commit_transaction() -> Result<()> -{ - let (_rt, _peer, test_client) = ::new().with_port(10_500).start_with_runtime(); - wait_for_genesis_committed(&vec![test_client.clone()], 0); - let pipeline_time = Config::pipeline_time(); - - // Given - - let normal_domain_id: DomainId = "sora".parse()?; - let create_domain = Register::domain(Domain::new(normal_domain_id.clone())); - test_client.submit(create_domain)?; - - let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(14)).parse()?; - let create_domain = Register::domain(Domain::new(too_long_domain_name.clone())); - test_client.submit(create_domain)?; - - thread::sleep(pipeline_time * 2); - - assert!(test_client - .request(client::domain::by_id(normal_domain_id)) - .is_ok()); - assert!(test_client - .request(client::domain::by_id(too_long_domain_name)) - .is_err()); - - Ok(()) -} diff --git a/client/tests/integration/asset.rs b/client/tests/integration/asset.rs index f0fe33e8fd9..a1f8a9d2115 100644 --- a/client/tests/integration/asset.rs +++ b/client/tests/integration/asset.rs @@ -5,7 +5,7 @@ use iroha::{ client::{self, QueryResult}, crypto::KeyPair, data_model::{ - asset::{AssetId, AssetValue, AssetValueType}, + asset::{AssetId, AssetType, AssetValue}, isi::error::{InstructionEvaluationError, InstructionExecutionError, Mismatch, TypeError}, prelude::*, transaction::error::TransactionRejectionReason, @@ -106,7 +106,7 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = numeric!(200); let mint = Mint::asset_numeric( @@ -137,7 +137,7 @@ fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount( let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = Numeric::new(2_u128.pow(65), 0); let mint = Mint::asset_numeric( @@ -168,7 +168,7 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); let create_asset = Register::asset_definition(asset_definition); - let metadata = iroha::data_model::metadata::UnlimitedMetadata::default(); + let metadata = iroha::data_model::metadata::Metadata::default(); //When let quantity = numeric!(123.456); @@ -402,7 +402,7 @@ fn fail_if_dont_satisfy_spec() { // Create asset definition which accepts only integers let asset_definition = AssetDefinition::new( asset_definition_id.clone(), - AssetValueType::Numeric(NumericSpec::integer()), + AssetType::Numeric(NumericSpec::integer()), ); test_client @@ -435,8 +435,8 @@ fn fail_if_dont_satisfy_spec() { &TransactionRejectionReason::Validation(ValidationFail::InstructionFailed( InstructionExecutionError::Evaluate(InstructionEvaluationError::Type( TypeError::from(Mismatch { - expected: AssetValueType::Numeric(NumericSpec::integer()), - actual: AssetValueType::Numeric(NumericSpec::fractional(2)) + expected: AssetType::Numeric(NumericSpec::integer()), + actual: AssetType::Numeric(NumericSpec::fractional(2)) }) )) )) diff --git a/client/tests/integration/asset_propagation.rs b/client/tests/integration/asset_propagation.rs index bcf99a5ca9c..8e6984ac0ca 100644 --- a/client/tests/integration/asset_propagation.rs +++ b/client/tests/integration/asset_propagation.rs @@ -3,12 +3,10 @@ use std::{str::FromStr as _, thread}; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -22,11 +20,9 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_a wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; let create_domain: InstructionBox = Register::domain(Domain::new(DomainId::from_str("domain")?)).into(); diff --git a/client/tests/integration/domain_owner_permissions.rs b/client/tests/integration/domain_owner_permissions.rs index e23303089ea..a3e6fb81e07 100644 --- a/client/tests/integration/domain_owner_permissions.rs +++ b/client/tests/integration/domain_owner_permissions.rs @@ -1,5 +1,6 @@ use eyre::Result; use iroha::data_model::{prelude::*, transaction::error::TransactionRejectionReason}; +use iroha_primitives::json::JsonString; use serde_json::json; use test_network::*; use test_samples::{gen_account_in, ALICE_ID, BOB_ID}; @@ -58,7 +59,7 @@ fn domain_owner_domain_permissions() -> Result<()> { // check that "alice@wonderland" as owner of domain can edit metadata in her domain let key: Name = "key".parse()?; - let value: Name = "value".parse()?; + let value = JsonString::new("value"); test_client.submit_blocking(SetKeyValue::domain(kingdom_id.clone(), key.clone(), value))?; test_client.submit_blocking(RemoveKeyValue::domain(kingdom_id.clone(), key))?; @@ -93,7 +94,7 @@ fn domain_owner_account_permissions() -> Result<()> { // check that "alice@wonderland" as owner of domain can edit metadata of account in her domain let key: Name = "key".parse()?; - let value: Name = "value".parse()?; + let value = JsonString::new("value"); test_client.submit_blocking(SetKeyValue::account( mad_hatter_id.clone(), key.clone(), @@ -160,7 +161,7 @@ fn domain_owner_asset_definition_permissions() -> Result<()> { // check that "alice@wonderland" as owner of domain can edit metadata of asset definition in her domain let key: Name = "key".parse()?; - let value: Name = "value".parse()?; + let value = JsonString::new("value"); test_client.submit_blocking(SetKeyValue::asset_definition( coin_id.clone(), key.clone(), @@ -233,7 +234,7 @@ fn domain_owner_asset_permissions() -> Result<()> { // check that "alice@wonderland" as owner of domain can edit metadata of store asset in her domain let key: Name = "key".parse()?; - let value: Name = "value".parse()?; + let value = JsonString::new("value"); let bob_store_id = AssetId::new(store_id, bob_id.clone()); test_client.submit_blocking(SetKeyValue::asset(bob_store_id.clone(), key.clone(), value))?; test_client.submit_blocking(RemoveKeyValue::asset(bob_store_id.clone(), key))?; diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index 1e35a9819ed..d623bef784d 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -151,7 +151,7 @@ fn transaction_execution_should_produce_events( // submit transaction to produce events init_receiver.recv()?; - let transaction = client.build_transaction(executable, UnlimitedMetadata::new()); + let transaction = client.build_transaction(executable, Metadata::default()); client.submit_transaction_blocking(&transaction)?; // assertion @@ -240,13 +240,13 @@ fn produce_multiple_events() -> Result<()> { DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( AccountPermissionChanged { account: bob_id.clone(), - permission: token_1.id.clone(), + permission: token_1.clone(), }, ))), DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionAdded( AccountPermissionChanged { account: bob_id.clone(), - permission: token_2.id.clone(), + permission: token_2.clone(), }, ))), DataEvent::Domain(DomainEvent::Account(AccountEvent::RoleGranted( @@ -258,13 +258,13 @@ fn produce_multiple_events() -> Result<()> { DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( AccountPermissionChanged { account: bob_id.clone(), - permission: token_1.id, + permission: token_1, }, ))), DataEvent::Domain(DomainEvent::Account(AccountEvent::PermissionRemoved( AccountPermissionChanged { account: bob_id.clone(), - permission: token_2.id, + permission: token_2, }, ))), DataEvent::Domain(DomainEvent::Account(AccountEvent::RoleRevoked( diff --git a/client/tests/integration/events/notification.rs b/client/tests/integration/events/notification.rs index 90f04abdf30..d5f7bf9887b 100644 --- a/client/tests/integration/events/notification.rs +++ b/client/tests/integration/events/notification.rs @@ -59,11 +59,11 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { let account_id = ALICE_ID.clone(); let trigger_id = TriggerId::from_str("fail_box")?; - let instruction = Fail::new("Fail box".to_owned()); + let fail_isi = Unregister::domain("dummy".parse().unwrap()); let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( - vec![InstructionBox::from(instruction)], + vec![InstructionBox::from(fail_isi)], Repeats::Indefinitely, account_id.clone(), ExecuteTriggerEventFilter::new() diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index 6d78d8685ad..f57d4382563 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -1,7 +1,4 @@ -use std::{ - num::NonZeroUsize, - thread::{self, JoinHandle}, -}; +use std::thread::{self, JoinHandle}; use eyre::Result; use iroha::{ @@ -11,13 +8,15 @@ use iroha::{ BlockEvent, BlockEventFilter, BlockStatus, TransactionEventFilter, TransactionStatus, }, isi::error::InstructionExecutionError, - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, + parameter::BlockParameter, prelude::*, transaction::error::TransactionRejectionReason, ValidationFail, }, }; use iroha_config::parameters::actual::Root as Config; +use iroha_data_model::query::error::FindError; +use nonzero_ext::nonzero; use test_network::*; // Needed to re-enable ignored tests. @@ -33,13 +32,15 @@ fn transaction_with_no_instructions_should_be_committed() -> Result<()> { // #[ignore = "Experiment"] #[test] fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { - let msg = "Should be rejected".to_owned(); + let unknown_domain_id = "dummy".parse::().unwrap(); + let fail_isi = Unregister::domain(unknown_domain_id.clone()); - let fail = Fail::new(msg.clone()); test_with_instruction_and_status_and_port( - Some(fail.into()), + Some(fail_isi.into()), &TransactionStatus::Rejected(Box::new(TransactionRejectionReason::Validation( - ValidationFail::InstructionFailed(InstructionExecutionError::Fail(msg)), + ValidationFail::InstructionFailed(InstructionExecutionError::Find(FindError::Domain( + unknown_domain_id, + ))), ))), 10_350, ) @@ -56,15 +57,13 @@ fn test_with_instruction_and_status_and_port( wait_for_genesis_committed(&clients, 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; // Given let submitter = client; - let transaction = submitter.build_transaction(instruction, UnlimitedMetadata::new()); + let transaction = submitter.build_transaction(instruction, Metadata::default()); let hash = transaction.hash(); let mut handles = Vec::new(); for listener in clients { @@ -116,7 +115,7 @@ fn applied_block_must_be_available_in_kura() { .expect("Failed to subscribe for events"); client - .submit(Fail::new("Dummy instruction".to_owned())) + .submit(Unregister::domain("dummy".parse().unwrap())) .expect("Failed to submit transaction"); let event: BlockEvent = event_iter @@ -130,8 +129,6 @@ fn applied_block_must_be_available_in_kura() { .as_ref() .expect("Must be some") .kura() - .get_block_by_height( - NonZeroUsize::new(event.header().height().try_into().unwrap()).unwrap(), - ) + .get_block_by_height(event.header().height().try_into().unwrap()) .expect("Block applied event was received earlier"); } diff --git a/client/tests/integration/extra_functional/genesis.rs b/client/tests/integration/extra_functional/genesis.rs new file mode 100644 index 00000000000..eb2da99b843 --- /dev/null +++ b/client/tests/integration/extra_functional/genesis.rs @@ -0,0 +1,33 @@ +use iroha::data_model::{ + domain::{Domain, DomainId}, + isi::Register, +}; +use test_network::{wait_for_genesis_committed, NetworkBuilder}; + +#[test] +fn all_peers_submit_genesis() { + multiple_genesis_peers(4, 4, 13_800); +} + +#[test] +fn multiple_genesis_4_peers_3_genesis() { + multiple_genesis_peers(4, 3, 13_820); +} + +#[test] +fn multiple_genesis_4_peers_2_genesis() { + multiple_genesis_peers(4, 2, 13_840); +} + +fn multiple_genesis_peers(n_peers: u32, n_genesis_peers: u32, port: u16) { + let (_rt, network, client) = NetworkBuilder::new(n_peers, Some(port)) + .with_genesis_peers(n_genesis_peers) + .create_with_runtime(); + wait_for_genesis_committed(&network.clients(), 0); + + let domain_id: DomainId = "foo".parse().expect("Valid"); + let create_domain = Register::domain(Domain::new(domain_id)); + client + .submit_blocking(create_domain) + .expect("Failed to register domain"); +} diff --git a/client/tests/integration/extra_functional/mod.rs b/client/tests/integration/extra_functional/mod.rs index ed3bef44f09..6e35d278cbd 100644 --- a/client/tests/integration/extra_functional/mod.rs +++ b/client/tests/integration/extra_functional/mod.rs @@ -1,4 +1,5 @@ mod connected_peers; +mod genesis; mod multiple_blocks_created; mod normal; mod offline_peers; diff --git a/client/tests/integration/extra_functional/multiple_blocks_created.rs b/client/tests/integration/extra_functional/multiple_blocks_created.rs index 458af606d10..f48fbf521f5 100644 --- a/client/tests/integration/extra_functional/multiple_blocks_created.rs +++ b/client/tests/integration/extra_functional/multiple_blocks_created.rs @@ -3,12 +3,10 @@ use std::thread; use eyre::Result; use iroha::{ client::{self, Client, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -22,11 +20,9 @@ fn long_multiple_blocks_created() -> Result<()> { wait_for_genesis_committed(&network.clients(), 0); let pipeline_time = Config::pipeline_time(); - client.submit_all_blocking( - ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(), - )?; + client.submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )))?; let create_domain: InstructionBox = Register::domain(Domain::new("domain".parse()?)).into(); let (account_id, _account_keypair) = gen_account_in("domain"); diff --git a/client/tests/integration/extra_functional/normal.rs b/client/tests/integration/extra_functional/normal.rs index c278deafa96..401d3b22626 100644 --- a/client/tests/integration/extra_functional/normal.rs +++ b/client/tests/integration/extra_functional/normal.rs @@ -1,25 +1,19 @@ -use std::num::NonZeroU32; - -use iroha::client::{self, Client}; -use iroha_config::parameters::actual::Root as Config; -use iroha_data_model::{asset::AssetDefinitionId, prelude::*}; +use iroha::{ + client, + data_model::{asset::AssetDefinitionId, parameter::BlockParameter, prelude::*}, +}; +use nonzero_ext::nonzero; use test_network::*; -use tokio::runtime::Runtime; #[test] fn tranasctions_should_be_applied() { - let rt = Runtime::test(); - let (network, iroha) = rt.block_on(async { - let mut configuration = Config::test(); - configuration.chain_wide.max_transactions_in_block = NonZeroU32::new(1).unwrap(); - let network = Network::new_with_offline_peers(Some(configuration), 4, 0, Some(11_300)) - .await - .unwrap(); - let iroha = Client::test(&network.genesis.api_address); - - (network, iroha) - }); + let (_rt, network, iroha) = NetworkBuilder::new(4, Some(11_300)).create_with_runtime(); wait_for_genesis_committed(&network.clients(), 0); + iroha + .submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + ))) + .unwrap(); let domain_id = "and".parse::().unwrap(); let account_id = "ed01201F803CB23B1AAFB958368DF2F67CB78A2D1DFB47FFFC3133718F165F54DFF677@and" diff --git a/client/tests/integration/extra_functional/offline_peers.rs b/client/tests/integration/extra_functional/offline_peers.rs index 163d8ce20ab..0e1e179d986 100644 --- a/client/tests/integration/extra_functional/offline_peers.rs +++ b/client/tests/integration/extra_functional/offline_peers.rs @@ -11,18 +11,13 @@ use iroha_config::parameters::actual::Root as Config; use iroha_primitives::addr::socket_addr; use test_network::*; use test_samples::ALICE_ID; -use tokio::runtime::Runtime; #[test] fn genesis_block_is_committed_with_some_offline_peers() -> Result<()> { // Given - let rt = Runtime::test(); - - let (network, client) = rt.block_on(Network::start_test_with_offline_and_set_n_shifts( - 4, - 1, - Some(10_560), - )); + let (_rt, network, client) = NetworkBuilder::new(4, Some(10_560)) + .with_offline_peers(1) + .create_with_runtime(); wait_for_genesis_committed(&network.clients(), 1); //When diff --git a/client/tests/integration/extra_functional/restart_peer.rs b/client/tests/integration/extra_functional/restart_peer.rs index 40540b7f549..03b1d6ba68a 100644 --- a/client/tests/integration/extra_functional/restart_peer.rs +++ b/client/tests/integration/extra_functional/restart_peer.rs @@ -55,7 +55,7 @@ fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { .expect("Asset not found"); assert_eq!(AssetValue::Numeric(quantity), *asset.value()); - let mut all_peers: Vec<_> = core::iter::once(network.genesis) + let mut all_peers: Vec<_> = core::iter::once(network.first_peer) .chain(network.peers.into_values()) .collect(); let removed_peer_idx = rand::thread_rng().gen_range(0..all_peers.len()); diff --git a/client/tests/integration/extra_functional/unregister_peer.rs b/client/tests/integration/extra_functional/unregister_peer.rs index ade2324d525..5fa97a5f231 100644 --- a/client/tests/integration/extra_functional/unregister_peer.rs +++ b/client/tests/integration/extra_functional/unregister_peer.rs @@ -3,12 +3,10 @@ use std::thread; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{ - parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, - prelude::*, - }, + data_model::{parameter::BlockParameter, prelude::*}, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use test_network::*; use test_samples::gen_account_in; @@ -117,20 +115,23 @@ fn init() -> Result<( let (rt, network, client) = Network::start_test_with_runtime(4, Some(10_925)); let pipeline_time = Config::pipeline_time(); iroha_logger::info!("Started"); - let parameters = ParametersBuilder::new() - .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? - .into_set_parameters(); + + let set_max_txns_in_block = SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(1_u64)), + )); + let create_domain = Register::domain(Domain::new("domain".parse()?)); let (account_id, _account_keypair) = gen_account_in("domain"); let create_account = Register::account(Account::new(account_id.clone())); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let instructions = parameters.into_iter().chain([ + let instructions: [InstructionBox; 4] = [ + set_max_txns_in_block.into(), create_domain.into(), create_account.into(), create_asset.into(), - ]); + ]; client.submit_all_blocking(instructions)?; iroha_logger::info!("Init"); Ok(( diff --git a/client/tests/integration/extra_functional/unstable_network.rs b/client/tests/integration/extra_functional/unstable_network.rs index 558991f9c64..c917b85580d 100644 --- a/client/tests/integration/extra_functional/unstable_network.rs +++ b/client/tests/integration/extra_functional/unstable_network.rs @@ -1,16 +1,17 @@ use std::thread; use iroha::{ - client::{self, Client, QueryResult}, - data_model::prelude::*, + client::{self, QueryResult}, + data_model::{ + parameter::{BlockParameter, Parameter}, + prelude::*, + }, }; use iroha_config::parameters::actual::Root as Config; +use nonzero_ext::nonzero; use rand::seq::SliceRandom; use test_network::*; use test_samples::ALICE_ID; -use tokio::runtime::Runtime; - -const MAX_TRANSACTIONS_IN_BLOCK: u32 = 5; #[test] fn unstable_network_5_peers_1_fault() { @@ -49,28 +50,24 @@ fn unstable_network( if let Err(error) = iroha_logger::install_panic_hook() { eprintln!("Installing panic hook failed: {error}"); } - let rt = Runtime::test(); + // Given - let (network, iroha) = rt.block_on(async { - let mut configuration = Config::test(); - configuration.chain_wide.max_transactions_in_block = - MAX_TRANSACTIONS_IN_BLOCK.try_into().unwrap(); - #[cfg(debug_assertions)] - { - configuration.sumeragi.debug_force_soft_fork = force_soft_fork; - } - let network = Network::new_with_offline_peers( - Some(configuration), - n_peers + n_offline_peers, - 0, - Some(port), - ) - .await - .expect("Failed to init peers"); - let client = Client::test(&network.genesis.api_address); - (network, client) - }); + let mut configuration = Config::test(); + #[cfg(debug_assertions)] + { + configuration.sumeragi.debug_force_soft_fork = force_soft_fork; + } + let (_rt, network, iroha) = NetworkBuilder::new(n_peers + n_offline_peers, Some(port)) + .with_config(configuration) + // Note: it is strange that we have `n_offline_peers` but don't set it as offline + .with_offline_peers(0) + .create_with_runtime(); wait_for_genesis_committed(&network.clients(), n_offline_peers); + iroha + .submit_blocking(SetParameter::new(Parameter::Block( + BlockParameter::MaxTransactions(nonzero!(5_u64)), + ))) + .unwrap(); let pipeline_time = Config::pipeline_time(); diff --git a/client/tests/integration/mod.rs b/client/tests/integration/mod.rs index 37299969665..13b8bd2528c 100644 --- a/client/tests/integration/mod.rs +++ b/client/tests/integration/mod.rs @@ -1,4 +1,3 @@ -mod add_domain; mod asset; mod asset_propagation; mod domain_owner_permissions; diff --git a/client/tests/integration/non_mintable.rs b/client/tests/integration/non_mintable.rs index 4f65579a9be..d976fce1eb9 100644 --- a/client/tests/integration/non_mintable.rs +++ b/client/tests/integration/non_mintable.rs @@ -3,7 +3,7 @@ use std::str::FromStr as _; use eyre::Result; use iroha::{ client::{self, QueryResult}, - data_model::{isi::InstructionBox, metadata::UnlimitedMetadata, prelude::*}, + data_model::{isi::InstructionBox, prelude::*}, }; use test_network::*; use test_samples::ALICE_ID; @@ -20,7 +20,7 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { AssetDefinition::numeric(asset_definition_id.clone()).mintable_once(), ); - let metadata = UnlimitedMetadata::default(); + let metadata = Metadata::default(); let mint = Mint::asset_numeric( 200_u32, diff --git a/client/tests/integration/permissions.rs b/client/tests/integration/permissions.rs index 6ff29a7e74d..4e9669b37e1 100644 --- a/client/tests/integration/permissions.rs +++ b/client/tests/integration/permissions.rs @@ -6,7 +6,7 @@ use iroha::{ crypto::KeyPair, data_model::{ permission::Permission, prelude::*, role::RoleId, - transaction::error::TransactionRejectionReason, JsonString, + transaction::error::TransactionRejectionReason, }, }; use iroha_genesis::GenesisBlock; @@ -251,7 +251,7 @@ fn permissions_differ_not_only_by_names() { .submit_blocking(SetKeyValue::asset( mouse_hat_id, Name::from_str("color").expect("Valid"), - "red".to_owned(), + "red".parse::().expect("Valid"), )) .expect("Failed to modify Mouse's hats"); @@ -260,7 +260,7 @@ fn permissions_differ_not_only_by_names() { let set_shoes_color = SetKeyValue::asset( mouse_shoes_id.clone(), Name::from_str("color").expect("Valid"), - "yellow".to_owned(), + "yellow".parse::().expect("Valid"), ); let _err = client .submit_blocking(set_shoes_color.clone()) @@ -315,16 +315,15 @@ fn stored_vs_granted_token_payload() -> Result<()> { .expect("Failed to register mouse"); // Allow alice to mint mouse asset and mint initial value + let value_json = JsonString::from_string_unchecked(format!( + // Introducing some whitespaces + // This way, if the executor compares just JSON strings, this test would fail + r##"{{ "asset" : "xor#wonderland#{mouse_id}" }}"## + )); + let mouse_asset = AssetId::new(asset_definition_id, mouse_id.clone()); let allow_alice_to_set_key_value_in_mouse_asset = Grant::permission( - Permission::new( - "CanSetKeyValueInUserAsset".parse().unwrap(), - JsonString::from_string_unchecked(format!( - // Introducing some whitespaces - // This way, if the executor compares just JSON strings, this test would fail - r##"{{ "asset" : "xor#wonderland#{mouse_id}" }}"## - )), - ), + Permission::new("CanSetKeyValueInUserAsset".parse().unwrap(), value_json), alice_id, ); @@ -336,7 +335,11 @@ fn stored_vs_granted_token_payload() -> Result<()> { .expect("Failed to grant permission to alice."); // Check that alice can indeed mint mouse asset - let set_key_value = SetKeyValue::asset(mouse_asset, Name::from_str("color")?, "red".to_owned()); + let set_key_value = SetKeyValue::asset( + mouse_asset, + Name::from_str("color")?, + "red".parse::().expect("Valid"), + ); iroha .submit_blocking(set_key_value) .expect("Failed to mint asset for mouse."); diff --git a/client/tests/integration/queries/account.rs b/client/tests/integration/queries/account.rs index d89d74b8ac0..1f6addaf7f9 100644 --- a/client/tests/integration/queries/account.rs +++ b/client/tests/integration/queries/account.rs @@ -24,8 +24,8 @@ fn find_accounts_with_asset() -> Result<()> { assert_eq!(received_asset_definition.id(), asset_definition.id()); assert!(matches!( - received_asset_definition.value_type(), - AssetValueType::Numeric(_) + received_asset_definition.type_(), + AssetType::Numeric(_) )); let accounts: [AccountId; 5] = [ @@ -61,8 +61,8 @@ fn find_accounts_with_asset() -> Result<()> { assert_eq!(received_asset_definition.id(), asset_definition.id()); assert_eq!( - received_asset_definition.value_type(), - AssetValueType::Numeric(NumericSpec::default()), + received_asset_definition.type_(), + AssetType::Numeric(NumericSpec::default()), ); let found_accounts = test_client diff --git a/client/tests/integration/queries/asset.rs b/client/tests/integration/queries/asset.rs index d306f5b9540..c7591c9c290 100644 --- a/client/tests/integration/queries/asset.rs +++ b/client/tests/integration/queries/asset.rs @@ -44,7 +44,7 @@ fn find_asset_total_quantity() -> Result<()> { &test_client, &accounts, "quantity#wonderland", - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), numeric!(1), numeric!(10), numeric!(5), @@ -56,7 +56,7 @@ fn find_asset_total_quantity() -> Result<()> { &test_client, &accounts, "fixed#wonderland", - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), numeric!(1.0), numeric!(10.0), numeric!(5.0), @@ -131,7 +131,7 @@ fn test_total_quantity( test_client: &Client, accounts: &[AccountId; 5], definition: &str, - asset_value_type: AssetValueType, + asset_type: AssetType, initial_value: T, to_mint: T, to_burn: T, @@ -147,7 +147,7 @@ where // Registering new asset definition let definition_id: AssetDefinitionId = definition.parse().expect("Failed to parse `definition_id`"); - let asset_definition = AssetDefinition::new(definition_id.clone(), asset_value_type); + let asset_definition = AssetDefinition::new(definition_id.clone(), asset_type); test_client.submit_blocking(Register::asset_definition(asset_definition))?; let asset_ids = accounts diff --git a/client/tests/integration/queries/smart_contract.rs b/client/tests/integration/queries/smart_contract.rs index 649f4550146..e41c4bd985a 100644 --- a/client/tests/integration/queries/smart_contract.rs +++ b/client/tests/integration/queries/smart_contract.rs @@ -3,10 +3,7 @@ use std::str::FromStr as _; use eyre::Result; use iroha::{ client::ClientQueryError, - data_model::{ - prelude::*, - query::{cursor::ForwardCursor, error::QueryExecutionFail}, - }, + data_model::{prelude::*, query::error::QueryExecutionFail}, }; use test_network::*; @@ -23,18 +20,15 @@ fn live_query_is_dropped_after_smart_contract_end() -> Result<()> { .optimize()? .into_bytes()?; - let transaction = client.build_transaction( - WasmSmartContract::from_compiled(wasm), - UnlimitedMetadata::default(), - ); + let transaction = + client.build_transaction(WasmSmartContract::from_compiled(wasm), Metadata::default()); client.submit_transaction_blocking(&transaction)?; - let metadata_value = client.request(FindAccountKeyValueByIdAndKey::new( + let metadata_value: JsonString = client.request(FindAccountKeyValueByIdAndKey::new( client.account.clone(), Name::from_str("cursor").unwrap(), ))?; - let cursor: String = metadata_value.try_into()?; - let asset_cursor = serde_json::from_str::(&cursor)?; + let asset_cursor = metadata_value.try_into_any()?; let err = client .request_with_cursor::>(asset_cursor) @@ -63,10 +57,8 @@ fn smart_contract_can_filter_queries() -> Result<()> { .optimize()? .into_bytes()?; - let transaction = client.build_transaction( - WasmSmartContract::from_compiled(wasm), - UnlimitedMetadata::default(), - ); + let transaction = + client.build_transaction(WasmSmartContract::from_compiled(wasm), Metadata::default()); client.submit_transaction_blocking(&transaction)?; Ok(()) diff --git a/client/tests/integration/roles.rs b/client/tests/integration/roles.rs index 13cf7afb95f..c4822a464d2 100644 --- a/client/tests/integration/roles.rs +++ b/client/tests/integration/roles.rs @@ -1,5 +1,3 @@ -use std::str::FromStr as _; - use eyre::Result; use iroha::{ client::{self, QueryResult}, @@ -59,7 +57,7 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { test_client.submit_blocking(register_mouse)?; // Registering role - let role_id = RoleId::from_str("ACCESS_TO_MOUSE_METADATA")?; + let role_id = "ACCESS_TO_MOUSE_METADATA".parse::()?; let role = Role::new(role_id.clone()) .add_permission(Permission::new( "CanSetKeyValueInAccount".parse()?, @@ -82,8 +80,8 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { // Alice modifies Mouse's metadata let set_key_value = SetKeyValue::account( mouse_id, - Name::from_str("key").expect("Valid"), - "value".to_owned(), + "key".parse::()?, + "value".parse::()?, ); test_client.submit_blocking(set_key_value)?; @@ -144,7 +142,7 @@ fn role_with_invalid_permissions_is_not_accepted() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(11_025).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let role_id = RoleId::from_str("ACCESS_TO_ACCOUNT_METADATA")?; + let role_id = "ACCESS_TO_ACCOUNT_METADATA".parse()?; let rose_asset_id: AssetId = format!("rose##{}", ALICE_ID.clone()) .parse() .expect("should be valid"); @@ -222,13 +220,13 @@ fn grant_revoke_role_permissions() -> Result<()> { test_client.submit_blocking(register_mouse)?; // Registering role - let role_id = RoleId::from_str("ACCESS_TO_MOUSE_METADATA")?; + let role_id = "ACCESS_TO_MOUSE_METADATA".parse::()?; let role = Role::new(role_id.clone()); let register_role = Register::role(role); test_client.submit_blocking(register_role)?; // Transfer domain ownership to Mouse - let domain_id = DomainId::from_str("wonderland")?; + let domain_id = "wonderland".parse::()?; let transfer_domain = Transfer::domain(alice_id.clone(), domain_id, mouse_id.clone()); test_client.submit_blocking(transfer_domain)?; @@ -241,8 +239,8 @@ fn grant_revoke_role_permissions() -> Result<()> { let set_key_value = SetKeyValue::account( mouse_id.clone(), - Name::from_str("key").expect("Valid"), - "value".to_owned(), + "key".parse()?, + "value".parse::()?, ); let permission = Permission::new( "CanSetKeyValueInAccount".parse()?, diff --git a/client/tests/integration/set_parameter.rs b/client/tests/integration/set_parameter.rs index cf2fa11accb..ec376d3f817 100644 --- a/client/tests/integration/set_parameter.rs +++ b/client/tests/integration/set_parameter.rs @@ -1,9 +1,12 @@ -use std::str::FromStr; +use std::time::Duration; use eyre::Result; use iroha::{ - client::{self, QueryResult}, - data_model::prelude::*, + client, + data_model::{ + parameter::{Parameter, Parameters, SumeragiParameter, SumeragiParameters}, + prelude::*, + }, }; use test_network::*; @@ -12,51 +15,22 @@ fn can_change_parameter_value() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(11_135).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let parameter = Parameter::from_str("?BlockTime=4000")?; - let parameter_id = ParameterId::from_str("BlockTime")?; - let param_box = SetParameter::new(parameter); + let old_params: Parameters = test_client.request(client::parameter::all())?; + assert_eq!( + old_params.sumeragi().block_time(), + SumeragiParameters::default().block_time() + ); - let old_params = test_client - .request(client::parameter::all())? - .collect::>>()?; - let param_val_old = old_params - .iter() - .find(|param| param.id() == ¶meter_id) - .expect("Parameter should exist") - .val(); + let block_time = 40_000; + let parameter = Parameter::Sumeragi(SumeragiParameter::BlockTimeMs(block_time)); + let set_param_isi = SetParameter::new(parameter); + test_client.submit_blocking(set_param_isi)?; - test_client.submit_blocking(param_box)?; + let sumeragi_params = test_client.request(client::parameter::all())?.sumeragi; + assert_eq!( + sumeragi_params.block_time(), + Duration::from_millis(block_time) + ); - let new_params = test_client - .request(client::parameter::all())? - .collect::>>()?; - let param_val_new = new_params - .iter() - .find(|param| param.id() == ¶meter_id) - .expect("Parameter should exist") - .val(); - - assert_ne!(param_val_old, param_val_new); - Ok(()) -} - -#[test] -fn parameter_propagated() -> Result<()> { - let (_rt, _peer, test_client) = ::new().with_port(10_985).start_with_runtime(); - wait_for_genesis_committed(&vec![test_client.clone()], 0); - - let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(8)).parse()?; - let create_domain = Register::domain(Domain::new(too_long_domain_name)); - let _ = test_client - .submit_blocking(create_domain.clone()) - .expect_err("Should fail before ident length limits update"); - - let parameter = Parameter::from_str("?WSVIdentLengthLimits=1,256_LL")?; - let param_box = SetParameter::new(parameter); - test_client.submit_blocking(param_box)?; - - test_client - .submit_blocking(create_domain) - .expect("Should work after ident length limits update"); Ok(()) } diff --git a/client/tests/integration/smartcontracts/Cargo.toml b/client/tests/integration/smartcontracts/Cargo.toml index 5004748a0c0..e6fb9bcaf40 100644 --- a/client/tests/integration/smartcontracts/Cargo.toml +++ b/client/tests/integration/smartcontracts/Cargo.toml @@ -13,6 +13,7 @@ members = [ "mint_rose_trigger", "executor_with_admin", "executor_with_custom_permission", + "executor_with_custom_parameter", "executor_remove_permission", "executor_with_migration_fail", "executor_custom_instructions_simple", diff --git a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs index 811e64fbbb8..83fb83970b0 100644 --- a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs @@ -21,8 +21,6 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { let accounts_cursor = FindAllAccounts.execute().dbg_unwrap(); - let limits = MetadataLimits::new(256, 256); - let bad_domain_ids: [DomainId; 2] = [ "genesis".parse().dbg_unwrap(), "garden_of_live_flowers".parse().dbg_unwrap(), @@ -35,7 +33,7 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { continue; } - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); let name = format!( "nft_for_{}_in_{}", account.id().signatory(), @@ -43,14 +41,14 @@ fn main(_id: TriggerId, _owner: AccountId, _event: EventBox) { ) .parse() .dbg_unwrap(); - metadata.insert_with_limits(name, true, limits).dbg_unwrap(); + metadata.insert(name, true); let nft_id = generate_new_nft_id(account.id()); let nft_definition = AssetDefinition::store(nft_id.clone()) .mintable_once() .with_metadata(metadata); let account_nft_id = AssetId::new(nft_id, account.id().clone()); - let account_nft = Asset::new(account_nft_id, Metadata::new()); + let account_nft = Asset::new(account_nft_id, Metadata::default()); Register::asset_definition(nft_definition) .execute() diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml b/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml index 6ce6deb6833..be8ba50c7f6 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/Cargo.toml @@ -8,6 +8,9 @@ authors.workspace = true license.workspace = true [dependencies] +# TODO: Cargo complains if I take it from workspace +iroha_executor = { version = "=2.0.0-pre-rc.21", path = "../../../../../smart_contract/executor", features = ["debug"] } + iroha_data_model.workspace = true iroha_schema.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/complex.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/complex_isi.rs similarity index 87% rename from client/tests/integration/smartcontracts/executor_custom_data_model/src/complex.rs rename to client/tests/integration/smartcontracts/executor_custom_data_model/src/complex_isi.rs index 66fe1c54c71..c025c27b022 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/src/complex.rs +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/complex_isi.rs @@ -11,13 +11,13 @@ mod isi { use alloc::{boxed::Box, format, string::String, vec::Vec}; use iroha_data_model::{ - isi::{Custom, InstructionBox}, - JsonString, + isi::{CustomInstruction, Instruction, InstructionBox}, + prelude::JsonString, }; use iroha_schema::IntoSchema; use serde::{Deserialize, Serialize}; - use crate::complex::expression::EvaluatesTo; + use crate::complex_isi::expression::EvaluatesTo; #[derive(Debug, Deserialize, Serialize, IntoSchema)] pub enum CustomInstructionExpr { @@ -26,7 +26,23 @@ mod isi { // Other custom instructions } - impl From for Custom { + impl From for CustomInstructionExpr { + fn from(isi: CoreExpr) -> Self { + Self::Core(isi) + } + } + + impl From for CustomInstructionExpr { + fn from(isi: ConditionalExpr) -> Self { + Self::If(Box::new(isi)) + } + } + + impl Instruction for CustomInstructionExpr {} + impl Instruction for ConditionalExpr {} + impl Instruction for CoreExpr {} + + impl From for CustomInstruction { fn from(isi: CustomInstructionExpr) -> Self { let payload = serde_json::to_value(&isi) .expect("INTERNAL BUG: Couldn't serialize custom instruction"); @@ -35,9 +51,21 @@ mod isi { } } - impl CustomInstructionExpr { - pub fn into_instruction(self) -> InstructionBox { - InstructionBox::Custom(self.into()) + impl From for InstructionBox { + fn from(isi: CustomInstructionExpr) -> Self { + Self::Custom(isi.into()) + } + } + + impl From for InstructionBox { + fn from(isi: CoreExpr) -> Self { + Self::Custom(CustomInstructionExpr::from(isi).into()) + } + } + + impl From for InstructionBox { + fn from(isi: ConditionalExpr) -> Self { + Self::Custom(CustomInstructionExpr::from(isi).into()) } } @@ -107,14 +135,14 @@ mod expression { pub struct EvaluatesTo { #[serde(flatten)] pub(crate) expression: Box, - _value_type: PhantomData, + type_: PhantomData, } impl EvaluatesTo { pub fn new_unchecked(expression: impl Into) -> Self { Self { expression: Box::new(expression.into()), - _value_type: PhantomData, + type_: PhantomData, } } } @@ -250,7 +278,7 @@ mod evaluate { isi::error::InstructionExecutionError, query::QueryBox, ValidationFail, }; - use crate::complex::expression::{EvaluatesTo, Expression, Greater, Value}; + use crate::complex_isi::expression::{EvaluatesTo, Expression, Greater, Value}; pub trait Evaluate { /// The resulting type of the expression. diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs index 62405125d13..d245d6f5290 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/lib.rs @@ -1,8 +1,9 @@ -//! Example of custom instructions which can be used in custom executor +//! Example of data model which can be used in custom executor #![no_std] extern crate alloc; -pub mod complex; -pub mod simple; +pub mod complex_isi; +pub mod parameters; +pub mod simple_isi; diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs new file mode 100644 index 00000000000..621e646a963 --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/parameters.rs @@ -0,0 +1,21 @@ +//! Module with custom parameters +use alloc::{format, string::String, vec::Vec}; + +use iroha_executor::prelude::*; +use iroha_schema::IntoSchema; +use serde::{Deserialize, Serialize}; + +/// Parameter that controls domain limits +#[derive(PartialEq, Eq, Parameter, Serialize, Deserialize, IntoSchema)] +pub struct DomainLimits { + /// Length of domain id in bytes + pub id_len: u32, +} + +impl Default for DomainLimits { + fn default() -> Self { + Self { + id_len: 2_u32.pow(4), + } + } +} diff --git a/client/tests/integration/smartcontracts/executor_custom_data_model/src/simple.rs b/client/tests/integration/smartcontracts/executor_custom_data_model/src/simple_isi.rs similarity index 59% rename from client/tests/integration/smartcontracts/executor_custom_data_model/src/simple.rs rename to client/tests/integration/smartcontracts/executor_custom_data_model/src/simple_isi.rs index 2f4b5d9528f..e82e5b0a97f 100644 --- a/client/tests/integration/smartcontracts/executor_custom_data_model/src/simple.rs +++ b/client/tests/integration/smartcontracts/executor_custom_data_model/src/simple_isi.rs @@ -5,9 +5,8 @@ use alloc::{format, string::String, vec::Vec}; use iroha_data_model::{ asset::AssetDefinitionId, - isi::{Custom, InstructionBox}, - prelude::Numeric, - JsonString, + isi::{CustomInstruction, Instruction, InstructionBox}, + prelude::{JsonString, Numeric}, }; use iroha_schema::IntoSchema; use serde::{Deserialize, Serialize}; @@ -24,7 +23,16 @@ pub struct MintAssetForAllAccounts { pub quantity: Numeric, } -impl From for Custom { +impl From for CustomInstructionBox { + fn from(isi: MintAssetForAllAccounts) -> Self { + Self::MintAssetForAllAccounts(isi) + } +} + +impl Instruction for CustomInstructionBox {} +impl Instruction for MintAssetForAllAccounts {} + +impl From for CustomInstruction { fn from(isi: CustomInstructionBox) -> Self { let payload = serde_json::to_value(&isi) .expect("INTERNAL BUG: Couldn't serialize custom instruction"); @@ -33,9 +41,15 @@ impl From for Custom { } } -impl CustomInstructionBox { - pub fn into_instruction(self) -> InstructionBox { - InstructionBox::Custom(self.into()) +impl From for InstructionBox { + fn from(isi: MintAssetForAllAccounts) -> Self { + Self::Custom(CustomInstructionBox::from(isi).into()) + } +} + +impl From for InstructionBox { + fn from(isi: CustomInstructionBox) -> Self { + Self::Custom(isi.into()) } } diff --git a/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs index 13cf48b8fab..a9ae532107f 100644 --- a/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_instructions_complex/src/lib.rs @@ -10,11 +10,11 @@ extern crate alloc; #[cfg(not(test))] extern crate panic_halt; -use executor_custom_data_model::complex::{ +use executor_custom_data_model::complex_isi::{ ConditionalExpr, CoreExpr, CustomInstructionExpr, Evaluate, Value, }; use iroha_executor::{ - data_model::{isi::Custom, query::QueryOutputBox}, + data_model::{isi::CustomInstruction, query::QueryOutputBox}, prelude::*, }; use lol_alloc::{FreeListAllocator, LockedAllocator}; @@ -31,7 +31,7 @@ struct Executor { block_height: u64, } -fn visit_custom(executor: &mut Executor, _authority: &AccountId, isi: &Custom) { +fn visit_custom(executor: &mut Executor, _authority: &AccountId, isi: &CustomInstruction) { let Ok(isi) = CustomInstructionExpr::try_from(isi.payload()) else { deny!(executor, "Failed to parse custom instruction"); }; @@ -66,7 +66,7 @@ fn execute_if(isi: ConditionalExpr) -> Result<(), ValidationFail> { struct Context; -impl executor_custom_data_model::complex::Context for Context { +impl executor_custom_data_model::complex_isi::Context for Context { fn query(&self, query: &QueryBox) -> Result { // Note: supported only queries which return numeric result match query.execute()?.into_inner() { @@ -77,9 +77,11 @@ impl executor_custom_data_model::complex::Context for Context { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { DataModelBuilder::with_default_permissions() - .with_custom_instruction::() + .add_instruction::() + .add_instruction::() + .add_instruction::() .build_and_set(); Ok(()) diff --git a/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs b/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs index a10b0ea9fc6..f1dfafe37d1 100644 --- a/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_custom_instructions_simple/src/lib.rs @@ -8,8 +8,8 @@ extern crate alloc; #[cfg(not(test))] extern crate panic_halt; -use executor_custom_data_model::simple::{CustomInstructionBox, MintAssetForAllAccounts}; -use iroha_executor::{data_model::isi::Custom, debug::DebugExpectExt, prelude::*}; +use executor_custom_data_model::simple_isi::{CustomInstructionBox, MintAssetForAllAccounts}; +use iroha_executor::{data_model::isi::CustomInstruction, debug::DebugExpectExt, prelude::*}; use lol_alloc::{FreeListAllocator, LockedAllocator}; #[global_allocator] @@ -24,7 +24,7 @@ struct Executor { block_height: u64, } -fn visit_custom(executor: &mut Executor, _authority: &AccountId, isi: &Custom) { +fn visit_custom(executor: &mut Executor, _authority: &AccountId, isi: &CustomInstruction) { let Ok(isi) = CustomInstructionBox::try_from(isi.payload()) else { deny!(executor, "Failed to parse custom instruction"); }; @@ -55,9 +55,10 @@ fn execute_mint_asset_for_all_accounts(isi: MintAssetForAllAccounts) -> Result<( } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { DataModelBuilder::with_default_permissions() - .with_custom_instruction::() + .add_instruction::() + .add_instruction::() .build_and_set(); Ok(()) diff --git a/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs b/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs index a88a34fd123..83583d2cec0 100644 --- a/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_remove_permission/src/lib.rs @@ -23,7 +23,7 @@ struct Executor { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { // Note that actually migration will reset token schema to default (minus `CanUnregisterDomain`) // So any added custom permission tokens will be also removed DataModelBuilder::with_default_permissions() diff --git a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs index f34d4f2eb57..0f6b152a16b 100644 --- a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs @@ -32,6 +32,6 @@ fn visit_instruction(executor: &mut Executor, authority: &AccountId, isi: &Instr } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { Ok(()) } diff --git a/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml b/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml new file mode 100644 index 00000000000..fe61791b90e --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_custom_parameter/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "executor_with_custom_parameter" + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lib] +crate-type = ['cdylib'] + +[dependencies] +executor_custom_data_model.workspace = true +iroha_executor.workspace = true +iroha_schema.workspace = true + +parity-scale-codec.workspace = true +anyhow.workspace = true +serde_json.workspace = true +serde.workspace = true + +panic-halt.workspace = true +lol_alloc.workspace = true +getrandom.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs new file mode 100644 index 00000000000..401ce4a17cf --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_custom_parameter/src/lib.rs @@ -0,0 +1,51 @@ +//! Runtime Executor which allows domains whose id satisfies the length limit +#![no_std] + +extern crate alloc; +#[cfg(not(test))] +extern crate panic_halt; + +use alloc::format; + +use executor_custom_data_model::parameters::DomainLimits; +use iroha_executor::{prelude::*, DataModelBuilder}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +getrandom::register_custom_getrandom!(iroha_executor::stub_getrandom); + +#[derive(Constructor, ValidateEntrypoints, Validate, Visit)] +#[visit(custom(visit_register_domain))] +struct Executor { + verdict: Result, + block_height: u64, +} + +fn visit_register_domain(executor: &mut Executor, _authority: &AccountId, isi: &Register) { + let parameters = FindAllParameters.execute().unwrap().into_inner(); + + let domain_limits: DomainLimits = parameters + .custom() + .get(&DomainLimits::id()) + .unwrap() + .try_into() + .expect("INTERNAL BUG: Failed to deserialize json as `DomainLimits`"); + + iroha_executor::log::info!(&format!("Registering domain: {}", isi.object().id())); + if isi.object().id().name().as_ref().len() > domain_limits.id_len as usize { + deny!(executor, "Domain id exceeds the limit"); + } + + execute!(executor, isi); +} + +#[entrypoint] +fn migrate(_block_height: u64) -> MigrationResult { + DataModelBuilder::with_default_permissions() + .add_parameter(DomainLimits::default()) + .build_and_set(); + + Ok(()) +} diff --git a/client/tests/integration/smartcontracts/executor_with_custom_permission/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_custom_permission/src/lib.rs index c59333e5e8d..b04daa68ba1 100644 --- a/client/tests/integration/smartcontracts/executor_with_custom_permission/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_custom_permission/src/lib.rs @@ -110,9 +110,9 @@ impl Executor { fn replace_token(accounts: &[Account]) -> MigrationResult { let can_unregister_domain_definition_id = - iroha_executor::default::permissions::domain::CanUnregisterDomain::id(); + iroha_executor::default::permissions::domain::CanUnregisterDomain::name(); - let can_control_domain_lives_definition_id = token::CanControlDomainLives::id(); + let can_control_domain_lives_definition_id = token::CanControlDomainLives::name(); accounts .iter() diff --git a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs index 0aaa7907707..98d517d084c 100644 --- a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs +++ b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs @@ -24,7 +24,7 @@ struct Executor { } #[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { +fn migrate(_block_height: u64) -> MigrationResult { // Performing side-effects to check in the test that it won't be applied after failure // Registering a new domain (using ISI) diff --git a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs index 1420b7fd394..c4cd864b96f 100644 --- a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs @@ -26,7 +26,7 @@ fn main(id: TriggerId, owner: AccountId, _event: EventBox) { .execute() .dbg_unwrap() .into_inner() - .try_into() + .try_into_any() .dbg_unwrap(); Mint::asset_numeric(val, rose_id) diff --git a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs index 74ee7b4377d..716725f12e2 100644 --- a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs +++ b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs @@ -7,13 +7,7 @@ extern crate panic_halt; extern crate alloc; -use alloc::string::ToString as _; - -use iroha_smart_contract::{ - data_model::{metadata::MetadataValueBox, query::cursor::ForwardCursor}, - parse, - prelude::*, -}; +use iroha_smart_contract::{data_model::query::cursor::ForwardCursor, parse, prelude::*}; use lol_alloc::{FreeListAllocator, LockedAllocator}; use nonzero_ext::nonzero; use parity_scale_codec::{Decode, DecodeAll, Encode}; @@ -47,11 +41,7 @@ fn main(owner: AccountId) { SetKeyValue::account( owner, parse!(Name, "cursor"), - MetadataValueBox::String( - serde_json::to_value(&asset_cursor.cursor) - .dbg_expect("Failed to convert cursor to JSON") - .to_string(), - ), + JsonString::new(asset_cursor.cursor), ) .execute() .dbg_expect("Failed to save cursor to the owner's metadata"); diff --git a/client/tests/integration/smartcontracts/smart_contract_can_filter_queries/src/lib.rs b/client/tests/integration/smartcontracts/smart_contract_can_filter_queries/src/lib.rs index 420ff477e67..0e45690ca93 100644 --- a/client/tests/integration/smartcontracts/smart_contract_can_filter_queries/src/lib.rs +++ b/client/tests/integration/smartcontracts/smart_contract_can_filter_queries/src/lib.rs @@ -35,14 +35,14 @@ fn main(_owner: AccountId) { Register::asset_definition(AssetDefinition::new( time_id.clone(), - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), )) .execute() .dbg_unwrap(); Register::asset_definition(AssetDefinition::new( space_id.clone(), - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), )) .execute() .dbg_unwrap(); diff --git a/client/tests/integration/sorting.rs b/client/tests/integration/sorting.rs index dd95bba5579..bae12a3ed30 100644 --- a/client/tests/integration/sorting.rs +++ b/client/tests/integration/sorting.rs @@ -19,6 +19,7 @@ use test_network::*; use test_samples::ALICE_ID; #[test] +#[ignore] #[allow(clippy::cast_possible_truncation)] fn correct_pagination_assets_after_creating_new_one() { // FIXME transaction is rejected for more than a certain number of instructions @@ -51,14 +52,8 @@ fn correct_pagination_assets_after_creating_new_one() { let asset_definition_id = AssetDefinitionId::from_str(&format!("xor{i}#wonderland")).expect("Valid"); let asset_definition = AssetDefinition::store(asset_definition_id.clone()); - let mut asset_metadata = Metadata::new(); - asset_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - i as u32, - MetadataLimits::new(10, 23), - ) - .expect("Valid"); + let mut asset_metadata = Metadata::default(); + asset_metadata.insert(sort_by_metadata_key.clone(), i as u32); let asset = Asset::new( AssetId::new(asset_definition_id, account_id.clone()), AssetValue::Store(asset_metadata), @@ -146,14 +141,8 @@ fn correct_sorting_of_entities() { for i in 0..n { let asset_definition_id = AssetDefinitionId::from_str(&format!("xor_{i}#wonderland")).expect("Valid"); - let mut asset_metadata = Metadata::new(); - asset_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut asset_metadata = Metadata::default(); + asset_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()) .with_metadata(asset_metadata.clone()); @@ -207,14 +196,8 @@ fn correct_sorting_of_entities() { public_keys.sort_unstable(); for i in 0..n { let account_id = AccountId::new(domain_id.clone(), public_keys[i as usize].clone()); - let mut account_metadata = Metadata::new(); - account_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut account_metadata = Metadata::default(); + account_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let account = Account::new(account_id.clone()).with_metadata(account_metadata.clone()); accounts.push(account_id); @@ -255,14 +238,8 @@ fn correct_sorting_of_entities() { let n = 10u32; for i in 0..n { let domain_id = DomainId::from_str(&format!("neverland{i}")).expect("Valid"); - let mut domain_metadata = Metadata::new(); - domain_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut domain_metadata = Metadata::default(); + domain_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); @@ -302,14 +279,8 @@ fn correct_sorting_of_entities() { let mut instructions = vec![]; for (idx, val) in input { let domain_id = DomainId::from_str(&format!("neverland_{idx}")).expect("Valid"); - let mut domain_metadata = Metadata::new(); - domain_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - val, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut domain_metadata = Metadata::default(); + domain_metadata.insert(sort_by_metadata_key.clone(), val); let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); @@ -376,14 +347,8 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { accounts_b.push(account_id); account } else { - let mut account_metadata = Metadata::new(); - account_metadata - .insert_with_limits( - sort_by_metadata_key.clone(), - n - i - 1, - MetadataLimits::new(10, 28), - ) - .expect("Valid"); + let mut account_metadata = Metadata::default(); + account_metadata.insert(sort_by_metadata_key.clone(), n - i - 1); let account = Account::new(account_id.clone()).with_metadata(account_metadata); accounts_a.push(account_id); account diff --git a/client/tests/integration/triggers/by_call_trigger.rs b/client/tests/integration/triggers/by_call_trigger.rs index 868cbb161d8..2df35ff4aad 100644 --- a/client/tests/integration/triggers/by_call_trigger.rs +++ b/client/tests/integration/triggers/by_call_trigger.rs @@ -13,8 +13,9 @@ use iroha::{ use iroha_genesis::GenesisBlock; use iroha_logger::info; use serde_json::json; -use test_network::*; +use test_network::{Peer as TestPeer, *}; use test_samples::ALICE_ID; +use tokio::runtime::Runtime; const TRIGGER_NAME: &str = "mint_rose"; @@ -117,7 +118,8 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { // Registering trigger that should fail on execution let bad_trigger_id = TriggerId::from_str("bad_trigger")?; // Invalid instruction - let bad_trigger_instructions = vec![Fail::new("Bad trigger".to_owned())]; + let fail_isi = Unregister::domain("dummy".parse()?); + let bad_trigger_instructions = vec![fail_isi]; let register_bad_trigger = Register::trigger(Trigger::new( bad_trigger_id.clone(), Action::new( @@ -439,13 +441,17 @@ fn trigger_in_genesis_using_base64() -> Result<()> { ), ); + let mut peer = TestPeer::new().expect("Failed to create peer"); + let topology = vec![peer.id.clone()]; + // Registering trigger in genesis - let genesis = GenesisBlock::test_with_instructions([Register::trigger(trigger).into()], vec![]); + let genesis = + GenesisBlock::test_with_instructions([Register::trigger(trigger).into()], topology); - let (_rt, _peer, mut test_client) = ::new() - .with_genesis(genesis) - .with_port(10_045) - .start_with_runtime(); + let rt = Runtime::test(); + let builder = PeerBuilder::new().with_genesis(genesis).with_port(10_045); + rt.block_on(builder.start_with_peer(&mut peer)); + let mut test_client = Client::test(&peer.api_address); wait_for_genesis_committed(&vec![test_client.clone()], 0); let asset_definition_id = "rose#wonderland".parse()?; @@ -457,7 +463,7 @@ fn trigger_in_genesis_using_base64() -> Result<()> { .submit_blocking(SetKeyValue::trigger( trigger_id.clone(), "VAL".parse()?, - numeric!(1), + 1_u32, )) .unwrap(); let call_trigger = ExecuteTrigger::new(trigger_id); diff --git a/client/tests/integration/triggers/orphans.rs b/client/tests/integration/triggers/orphans.rs index 37602773cfd..4e49d3a12f8 100644 --- a/client/tests/integration/triggers/orphans.rs +++ b/client/tests/integration/triggers/orphans.rs @@ -28,10 +28,11 @@ fn set_up_trigger( Register::account(Account::new(the_one_who_fails.clone())).into(); let fail_on_account_events = "fail".parse::()?; + let fail_isi = Unregister::domain("dummy".parse().unwrap()); let register_fail_on_account_events: InstructionBox = Register::trigger(Trigger::new( fail_on_account_events.clone(), Action::new( - [Fail::new(":(".to_owned())], + [fail_isi], Repeats::Indefinitely, the_one_who_fails.clone(), AccountEventFilter::new(), diff --git a/client/tests/integration/triggers/time_trigger.rs b/client/tests/integration/triggers/time_trigger.rs index b44acc1611a..c77ca97eea9 100644 --- a/client/tests/integration/triggers/time_trigger.rs +++ b/client/tests/integration/triggers/time_trigger.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr as _, time::Duration}; +use std::time::Duration; use eyre::Result; use iroha::{ @@ -6,16 +6,31 @@ use iroha::{ data_model::{ asset::AssetId, events::pipeline::{BlockEventFilter, BlockStatus}, + parameter::SumeragiParameters, prelude::*, transaction::WasmSmartContract, Level, }, }; -use iroha_config::parameters::defaults::chain_wide::CONSENSUS_ESTIMATION as DEFAULT_CONSENSUS_ESTIMATION; use iroha_logger::info; use test_network::*; use test_samples::{gen_account_in, ALICE_ID}; +/// Default estimation of consensus duration. +pub fn default_consensus_estimation() -> Duration { + let default_parameters = SumeragiParameters::default(); + + default_parameters + .block_time() + .checked_add( + default_parameters + .commit_time() + .checked_div(2) + .map_or_else(|| unreachable!(), |x| x), + ) + .map_or_else(|| unreachable!(), |x| x) +} + fn curr_time() -> core::time::Duration { use std::time::SystemTime; @@ -41,7 +56,7 @@ macro_rules! const_assert { fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result<()> { const PERIOD: Duration = Duration::from_millis(100); const ACCEPTABLE_ERROR_PERCENT: u8 = 15; - const_assert!(PERIOD.as_millis() < DEFAULT_CONSENSUS_ESTIMATION.as_millis()); + assert!(PERIOD.as_millis() < default_consensus_estimation().as_millis()); const_assert!(ACCEPTABLE_ERROR_PERCENT <= 100); let (_rt, _peer, mut test_client) = ::new().with_port(10_775).start_with_runtime(); @@ -77,7 +92,7 @@ fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result Duration::from_secs(1), 3, )?; - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); let finish_time = curr_time(); let average_count = finish_time.saturating_sub(start_time).as_millis() / PERIOD.as_millis(); @@ -101,12 +116,14 @@ fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result #[test] fn mint_asset_after_3_sec() -> Result<()> { - let (_rt, _peer, test_client) = ::new().with_port(10_660).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_665).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); // Sleep to certainly bypass time interval analyzed by genesis - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); - let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland").expect("Valid"); + let asset_definition_id = "rose#wonderland" + .parse::() + .expect("Valid"); let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); @@ -137,7 +154,7 @@ fn mint_asset_after_3_sec() -> Result<()> { assert_eq!(init_quantity, after_registration_quantity); // Sleep long enough that trigger start is in the past - std::thread::sleep(DEFAULT_CONSENSUS_ESTIMATION); + std::thread::sleep(default_consensus_estimation()); test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; let after_wait_quantity = test_client.request(FindAssetQuantityById { @@ -189,7 +206,7 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { let sample_isi = SetKeyValue::account( account_id.clone(), "key".parse::()?, - String::from("value"), + "value".parse::()?, ); test_client.submit(sample_isi)?; } @@ -202,6 +219,17 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { const TRIGGER_PERIOD: Duration = Duration::from_millis(1000); const EXPECTED_COUNT: u64 = 4; + info!("Building trigger"); + let wasm = iroha_wasm_builder::Builder::new( + "tests/integration/smartcontracts/create_nft_for_every_user_trigger", + ) + .show_output() + .build()? + .optimize()? + .into_bytes()?; + + info!("WASM size is {} bytes", wasm.len()); + let (_rt, _peer, mut test_client) = ::new().with_port(10_780).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); @@ -224,19 +252,6 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { .collect::>(); test_client.submit_all_blocking(register_accounts)?; - // Building trigger - info!("Building trigger"); - - let wasm = iroha_wasm_builder::Builder::new( - "tests/integration/smartcontracts/create_nft_for_every_user_trigger", - ) - .show_output() - .build()? - .optimize()? - .into_bytes()?; - - info!("WASM size is {} bytes", wasm.len()); - // Start listening BEFORE submitting any transaction not to miss any block committed event let event_listener = get_block_committed_event_listener(&test_client)?; @@ -245,13 +260,15 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let offset = Duration::from_secs(10); let start_time = curr_time() + offset; let schedule = TimeSchedule::starting_at(start_time).with_period(TRIGGER_PERIOD); + + let filter = TimeEventFilter(ExecutionTime::Schedule(schedule)); let register_trigger = Register::trigger(Trigger::new( "mint_nft_for_all".parse()?, Action::new( WasmSmartContract::from_compiled(wasm), Repeats::Indefinitely, alice_id.clone(), - TimeEventFilter::new(ExecutionTime::Schedule(schedule)), + filter, ), )); test_client.submit_blocking(register_trigger)?; @@ -325,7 +342,7 @@ fn submit_sample_isi_on_every_block_commit( let sample_isi = SetKeyValue::account( account_id.clone(), "key".parse::()?, - String::from("value"), + JsonString::new("value"), ); test_client.submit(sample_isi)?; } diff --git a/client/tests/integration/triggers/trigger_rollback.rs b/client/tests/integration/triggers/trigger_rollback.rs index 36c9d40cd70..754b19e0118 100644 --- a/client/tests/integration/triggers/trigger_rollback.rs +++ b/client/tests/integration/triggers/trigger_rollback.rs @@ -19,10 +19,8 @@ fn failed_trigger_revert() -> Result<()> { let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); - let instructions: [InstructionBox; 2] = [ - create_asset.into(), - Fail::new("Always fail".to_owned()).into(), - ]; + let fail_isi = Unregister::domain("dummy".parse().unwrap()); + let instructions: [InstructionBox; 2] = [create_asset.into(), fail_isi.into()]; let register_trigger = Register::trigger(Trigger::new( trigger_id.clone(), Action::new( diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index 77ee2fcfa2e..be415f90eae 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -46,7 +46,7 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> &mint_not_existed_asset }; let instructions: Vec = vec![mint_asset.clone().into()]; - let transaction = client.build_transaction(instructions, UnlimitedMetadata::new()); + let transaction = client.build_transaction(instructions, Metadata::default()); client.submit_transaction(&transaction)?; } thread::sleep(pipeline_time * 5); diff --git a/client/tests/integration/upgrade.rs b/client/tests/integration/upgrade.rs index 2d06f4a8a75..242bf039412 100644 --- a/client/tests/integration/upgrade.rs +++ b/client/tests/integration/upgrade.rs @@ -4,14 +4,16 @@ use eyre::Result; use futures_util::TryStreamExt as _; use iroha::{ client::{self, Client, QueryResult}, - data_model::prelude::*, + data_model::{ + parameter::{Parameter, SmartContractParameter}, + prelude::*, + }, }; -use iroha_data_model::parameter::{default::EXECUTOR_FUEL_LIMIT, ParametersBuilder}; use iroha_logger::info; +use nonzero_ext::nonzero; use serde_json::json; use test_network::*; use test_samples::{ALICE_ID, BOB_ID}; -use tokio::sync::mpsc; const ADMIN_PUBLIC_KEY_MULTIHASH: &str = "ed012076E5CA9698296AF9BE2CA45F525CB3BCFDEB7EE068BA56F973E9DD90564EF4FC"; @@ -73,14 +75,14 @@ fn executor_upgrade_should_run_migration() -> Result<()> { let (_rt, _peer, client) = ::new().with_port(10_990).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - let can_unregister_domain_token_id = "CanUnregisterDomain".parse().unwrap(); + let can_unregister_domain_token_id = "CanUnregisterDomain"; // Check that `CanUnregisterDomain` exists assert!(client .request(FindExecutorDataModel)? .permissions() .iter() - .any(|id| id == &can_unregister_domain_token_id)); + .any(|id| id == can_unregister_domain_token_id)); // Check that Alice has permission to unregister Wonderland let alice_id = ALICE_ID.clone(); @@ -89,7 +91,7 @@ fn executor_upgrade_should_run_migration() -> Result<()> { .collect::>>() .expect("Valid"); assert!(alice_tokens.contains(&Permission::new( - can_unregister_domain_token_id.clone(), + can_unregister_domain_token_id.parse().unwrap(), json!({ "domain": DomainId::from_str("wonderland").unwrap() }), ))); @@ -103,14 +105,14 @@ fn executor_upgrade_should_run_migration() -> Result<()> { assert!(!data_model .permissions() .iter() - .any(|id| id == &can_unregister_domain_token_id)); + .any(|id| id == can_unregister_domain_token_id)); - let can_control_domain_lives_token_id = "CanControlDomainLives".parse().unwrap(); + let can_control_domain_lives_token_id = "CanControlDomainLives"; assert!(data_model .permissions() .iter() - .any(|id| id == &can_control_domain_lives_token_id)); + .any(|id| id == can_control_domain_lives_token_id)); // Check that Alice has `can_control_domain_lives` permission let alice_tokens = client @@ -118,7 +120,7 @@ fn executor_upgrade_should_run_migration() -> Result<()> { .collect::>>() .expect("Valid"); assert!(alice_tokens.contains(&Permission::new( - can_control_domain_lives_token_id, + can_control_domain_lives_token_id.parse().unwrap(), json!(null), ))); @@ -146,7 +148,7 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { assert!(client .request(FindExecutorDataModel)? .permissions() - .contains(&can_unregister_domain_token.id)); + .contains(can_unregister_domain_token.name())); // Check that `TEST_ROLE` has permission assert!(client @@ -174,7 +176,7 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { assert!(!client .request(FindExecutorDataModel)? .permissions() - .contains(&can_unregister_domain_token.id)); + .contains(can_unregister_domain_token.name())); // Check that `TEST_ROLE` doesn't have permission assert!(!client @@ -197,7 +199,7 @@ fn executor_upgrade_should_revoke_removed_permissions() -> Result<()> { #[test] fn executor_custom_instructions_simple() -> Result<()> { - use executor_custom_data_model::simple::{CustomInstructionBox, MintAssetForAllAccounts}; + use executor_custom_data_model::simple_isi::MintAssetForAllAccounts; let (_rt, _peer, client) = ::new().with_port(11_270).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); @@ -224,8 +226,7 @@ fn executor_custom_instructions_simple() -> Result<()> { asset_definition: asset_definition_id, quantity: Numeric::from(1u32), }; - let isi = CustomInstructionBox::MintAssetForAllAccounts(isi); - client.submit_blocking(isi.into_instruction())?; + client.submit_blocking(isi)?; // Check that bob has 2 roses assert_eq!( @@ -238,28 +239,17 @@ fn executor_custom_instructions_simple() -> Result<()> { #[test] fn executor_custom_instructions_complex() -> Result<()> { - use executor_custom_data_model::complex::{ - ConditionalExpr, CoreExpr, CustomInstructionExpr, EvaluatesTo, Expression, Greater, + use executor_custom_data_model::complex_isi::{ + ConditionalExpr, CoreExpr, EvaluatesTo, Expression, Greater, }; - use iroha_config::parameters::actual::Root as Config; - let mut config = Config::test(); - // Note that this value will be overwritten by genesis block with NewParameter ISI - // But it will be needed after NewParameter removal in #4597 - config.chain_wide.executor_runtime.fuel_limit = 1_000_000_000; - - let (_rt, _peer, client) = PeerBuilder::new() - .with_port(11_275) - .with_config(config) - .start_with_runtime(); + let (_rt, _peer, client) = PeerBuilder::new().with_port(11_275).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - // Remove this after #4597 - config value will be used (see above) - let parameters = ParametersBuilder::new() - .add_parameter(EXECUTOR_FUEL_LIMIT, Numeric::from(1_000_000_000_u32))? - .into_set_parameters(); - client.submit_all_blocking(parameters)?; - + let executor_fuel_limit = SetParameter::new(Parameter::Executor(SmartContractParameter::Fuel( + nonzero!(1_000_000_000_u64), + ))); + client.submit_blocking(executor_fuel_limit)?; upgrade_executor( &client, "tests/integration/smartcontracts/executor_custom_instructions_complex", @@ -286,9 +276,9 @@ fn executor_custom_instructions_complex() -> Result<()> { ); let then = Burn::asset_numeric(Numeric::from(1u32), bob_rose.clone()); let then: InstructionBox = then.into(); - let then = CustomInstructionExpr::Core(CoreExpr::new(then)); - let isi = CustomInstructionExpr::If(Box::new(ConditionalExpr::new(condition, then))); - client.submit_blocking(isi.into_instruction())?; + let then = CoreExpr::new(then); + let isi = ConditionalExpr::new(condition, then); + client.submit_blocking(isi)?; Ok(()) }; burn_bob_rose_if_more_then_5()?; @@ -345,10 +335,8 @@ fn migration_should_cause_upgrade_event() { let (rt, _peer, client) = ::new().with_port(10_996).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - let (sender, mut receiver) = mpsc::channel(1); let events_client = client.clone(); - - let _handle = rt.spawn(async move { + let task = rt.spawn(async move { let mut stream = events_client .listen_for_events_async([ExecutorEventFilter::new()]) .await @@ -358,7 +346,8 @@ fn migration_should_cause_upgrade_event() { new_data_model, }))) = event { - let _ = sender.send(new_data_model).await; + assert!(!new_data_model.permissions.is_empty()); + break; } } }); @@ -369,15 +358,43 @@ fn migration_should_cause_upgrade_event() { ) .unwrap(); - let data_model = rt - .block_on(async { - tokio::time::timeout(std::time::Duration::from_secs(60), receiver.recv()).await - }) - .ok() - .flatten() - .expect("should receive upgraded event immediately after upgrade"); + rt.block_on(async { + tokio::time::timeout(core::time::Duration::from_secs(60), task) + .await + .unwrap() + }) + .expect("should receive upgraded event immediately after upgrade"); +} + +#[test] +fn define_custom_parameter() -> Result<()> { + use executor_custom_data_model::parameters::DomainLimits; + + let (_rt, _peer, client) = ::new().with_port(10_996).start_with_runtime(); + wait_for_genesis_committed(&vec![client.clone()], 0); - assert!(!data_model.permissions.is_empty()); + let long_domain_name = "0".repeat(2_usize.pow(5)).parse::()?; + let create_domain = Register::domain(Domain::new(long_domain_name)); + client.submit_blocking(create_domain)?; + + upgrade_executor( + &client, + "tests/integration/smartcontracts/executor_with_custom_parameter", + ) + .unwrap(); + + let too_long_domain_name = "1".repeat(2_usize.pow(5)).parse::()?; + let create_domain = Register::domain(Domain::new(too_long_domain_name)); + let _err = client.submit_blocking(create_domain.clone()).unwrap_err(); + + let parameter = DomainLimits { + id_len: 2_u32.pow(6), + } + .into(); + let set_param_isi: InstructionBox = SetParameter::new(parameter).into(); + client.submit_all_blocking([set_param_isi, create_domain.into()])?; + + Ok(()) } fn upgrade_executor(client: &Client, executor: impl AsRef) -> Result<()> { diff --git a/client_cli/README.md b/client_cli/README.md index 869fe9b3763..08c4494f3d2 100644 --- a/client_cli/README.md +++ b/client_cli/README.md @@ -61,7 +61,7 @@ Check the [Bash guide in Iroha Tutorial](https://hyperledger.github.io/iroha-2-d ```bash ./iroha domain register --id="Soramitsu" ./iroha account register --id="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" -./iroha asset register --id="XOR#Soramitsu" --value-type=Numeric +./iroha asset register --id="XOR#Soramitsu" --type=Numeric ./iroha asset mint --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" --quantity=1010 ./iroha asset get --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" ``` @@ -111,7 +111,7 @@ To do so, you must first register an Asset Definition and only then add some Ass Every asset has its own value spec. In this example, it is defined as `Numeric`, a 96-bit unsigned decimal. We also support `Store` for key-value structured data. ```bash -./iroha asset register --id="XOR#Soramitsu" --value-type=Numeric +./iroha asset register --id="XOR#Soramitsu" --type=Numeric ./iroha asset mint --account="ed01204A3C5A6B77BBE439969F95F0AA4E01AE31EC45A0D68C131B2C622751FCC5E3B6@Soramitsu" --asset="XOR#Soramitsu" --quantity=1010 ``` @@ -186,8 +186,8 @@ To test transactions in the JSON format (used in the genesis block and by other cat /path/to/file.json | ./iroha json transaction ``` -### Request arbitrary query +### Request arbitrary query -```bash +```bash echo '{ "FindAllParameters": null }' | ./iroha --config client.toml json query ``` diff --git a/client_cli/pytests/common/consts.py b/client_cli/pytests/common/consts.py index c46337ff847..837c9eb388e 100644 --- a/client_cli/pytests/common/consts.py +++ b/client_cli/pytests/common/consts.py @@ -20,7 +20,7 @@ class Stderr(Enum): TOO_LONG = "Name length violation" FAILED_TO_FIND_DOMAIN = "Failed to find domain" INVALID_CHARACTER = "Failed to parse" - INVALID_VALUE_TYPE = "should be either `Store` or `Numeric`" + INVALID_TYPE = "should be either `Store` or `Numeric`" RESERVED_CHARACTER = ( "The `@` character is reserved for `account@domain` constructs, " "and `#` — for `asset#domain`." diff --git a/client_cli/pytests/models/asset.py b/client_cli/pytests/models/asset.py index c3decd39da4..ae4d55f2c07 100644 --- a/client_cli/pytests/models/asset.py +++ b/client_cli/pytests/models/asset.py @@ -14,13 +14,13 @@ class AssetDefinition: :type name: str :param domain: The domain of the asset definition. :type domain: str - :param value_type: The value type of the asset definition. - :type value_type: str + :param type_: The value type of the asset definition. + :type type_: str """ name: str domain: str - value_type: str + type_: str def __repr__(self): return f"{self.name}#{self.domain}" diff --git a/client_cli/pytests/poetry.lock b/client_cli/pytests/poetry.lock index a252cadf742..037bbaf9e3f 100644 --- a/client_cli/pytests/poetry.lock +++ b/client_cli/pytests/poetry.lock @@ -278,13 +278,13 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "faker" -version = "25.9.1" +version = "26.0.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, - {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, + {file = "Faker-26.0.0-py3-none-any.whl", hash = "sha256:886ee28219be96949cd21ecc96c4c742ee1680e77f687b095202c8def1a08f06"}, + {file = "Faker-26.0.0.tar.gz", hash = "sha256:0f60978314973de02c00474c2ae899785a42b2cf4f41b7987e93c132a2b8a4a9"}, ] [package.dependencies] @@ -486,13 +486,13 @@ files = [ [[package]] name = "pylint" -version = "3.2.3" +version = "3.2.4" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, - {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, + {file = "pylint-3.2.4-py3-none-any.whl", hash = "sha256:43b8ffdf1578e4e4439fa1f6ace402281f5dd61999192280fa12fe411bef2999"}, + {file = "pylint-3.2.4.tar.gz", hash = "sha256:5753d27e49a658b12a48c2883452751a2ecfc7f38594e0980beb03a6e77e6f86"}, ] [package.dependencies] @@ -615,4 +615,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "e328c585cf1eb261f37eab6b738f9e85cc6d9472177694ac6474e328173c3cd7" +content-hash = "be29b6aaac04ee65ef9b245b42c8cfa028d74326fcc58efb874a8b647ae013a2" diff --git a/client_cli/pytests/pyproject.toml b/client_cli/pytests/pyproject.toml index 388f6809a12..6fb170f12df 100644 --- a/client_cli/pytests/pyproject.toml +++ b/client_cli/pytests/pyproject.toml @@ -22,7 +22,7 @@ black = "*" flake8 = "*" [tool.poetry.group.dev.dependencies] -pylint = "^3.2.3" +pylint = "^3.2.4" isort = "^5.13.2" [build-system] diff --git a/client_cli/pytests/src/client_cli/client_cli.py b/client_cli/pytests/src/client_cli/client_cli.py index 6ebcdadafba..4ecde368a26 100644 --- a/client_cli/pytests/src/client_cli/client_cli.py +++ b/client_cli/pytests/src/client_cli/client_cli.py @@ -150,7 +150,7 @@ def account(self, signatory: str, domain: str): self.execute() return self - def asset(self, asset_definition=None, account=None, value_of_value_type=None): + def asset(self, asset_definition=None, account=None, value_of_type=None): """ Executes the 'asset' command with the given asset definition, account, and value. @@ -158,13 +158,13 @@ def asset(self, asset_definition=None, account=None, value_of_value_type=None): :type asset_definition: AssetDefinition :param account: The account to be queried, defaults to None. :type account: Account - :param value_of_value_type: The value of the value type, defaults to None. - :type value_of_value_type: str, optional + :param value_of_type: The value of the asset type, defaults to None. + :type value_of_type: str, optional :return: The current ClientCli object. :rtype: ClientCli """ self.command.insert(2, "asset") - if asset_definition and account and value_of_value_type: + if asset_definition and account and value_of_type: self.command.append( "--asset-id=" + asset_definition.name @@ -175,7 +175,7 @@ def asset(self, asset_definition=None, account=None, value_of_value_type=None): + "@" + account.domain ) - self.command.append("--quantity=" + value_of_value_type) + self.command.append("--quantity=" + value_of_type) self.execute() return self @@ -239,7 +239,7 @@ def burn(self, account, asset, quantity: str): self.execute() return self - def definition(self, asset: str, domain: str, value_type: str): + def definition(self, asset: str, domain: str, type_: str): """ Executes the 'definition' command for the given asset, domain, and value type. @@ -247,13 +247,13 @@ def definition(self, asset: str, domain: str, value_type: str): :type asset: str :param domain: The domain of the asset. :type domain: str - :param value_type: The value type of the asset. - :type value_type: str + :param type_: The value type of the asset. + :type type_: str :return: The current ClientCli object. :rtype: ClientCli """ self.command.append("--definition-id=" + asset + "#" + domain) - self.command.append("--value-type=" + value_type) + self.command.append("--type=" + type_) self.execute() return self diff --git a/client_cli/pytests/src/client_cli/iroha.py b/client_cli/pytests/src/client_cli/iroha.py index 7bfa0ba3af6..756a056dc6a 100644 --- a/client_cli/pytests/src/client_cli/iroha.py +++ b/client_cli/pytests/src/client_cli/iroha.py @@ -113,9 +113,9 @@ def asset_definitions(self) -> Dict[str, str]: for domain in domains: asset_defs = domain.get("asset_definitions") for asset_def in asset_defs.values(): - value_type = asset_def.get("value_type") - if value_type: - asset_definitions[asset_def["id"]] = value_type + type_ = asset_def.get("type_") + if type_: + asset_definitions[asset_def["id"]] = type_ return asset_definitions else: return {} diff --git a/client_cli/pytests/test/__init__.py b/client_cli/pytests/test/__init__.py index 1ca1a357dae..567088bd974 100644 --- a/client_cli/pytests/test/__init__.py +++ b/client_cli/pytests/test/__init__.py @@ -15,14 +15,14 @@ GIVEN_public_key, GIVEN_numeric_asset_for_account, GIVEN_numeric_value, - GIVEN_numeric_value_type, + GIVEN_numeric_type, GIVEN_random_character, GIVEN_registered_account, - GIVEN_registered_asset_definition_with_numeric_value_type, - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_numeric_type, + GIVEN_registered_asset_definition_with_store_type, GIVEN_registered_domain, GIVEN_registered_domain_with_uppercase_letter, - GIVEN_store_value_type, + GIVEN_store_type, GIVEN_string_with_reserved_character, GIVEN_string_with_whitespaces, before_all, diff --git a/client_cli/pytests/test/accounts/test_set_key_value_pair.py b/client_cli/pytests/test/accounts/test_set_key_value_pair.py index 86a4d9b9b90..01c426fb975 100644 --- a/client_cli/pytests/test/accounts/test_set_key_value_pair.py +++ b/client_cli/pytests/test/accounts/test_set_key_value_pair.py @@ -12,7 +12,7 @@ def story_client_change_account_metadata(): def test_set_key_value_in_foreign_asset_after_granting_role( GIVEN_currently_authorized_account, GIVEN_registered_account, - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_store_type, ): assert 0 @@ -22,6 +22,6 @@ def test_set_key_value_in_foreign_asset_after_granting_role( def test_set_key_value_pair_for_another_account_asset_definition( GIVEN_currently_authorized_account, GIVEN_registered_account, - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_store_type, ): assert 0 diff --git a/client_cli/pytests/test/assets/conftest.py b/client_cli/pytests/test/assets/conftest.py index aa67951891e..5bc69f5b2fc 100644 --- a/client_cli/pytests/test/assets/conftest.py +++ b/client_cli/pytests/test/assets/conftest.py @@ -9,12 +9,12 @@ GIVEN_public_key, GIVEN_numeric_asset_for_account, GIVEN_numeric_value, - GIVEN_numeric_value_type, + GIVEN_numeric_type, GIVEN_registered_account, - GIVEN_registered_asset_definition_with_numeric_value_type, - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_numeric_type, + GIVEN_registered_asset_definition_with_store_type, GIVEN_registered_domain, - GIVEN_store_value_type, + GIVEN_store_type, before_all, before_each, ) diff --git a/client_cli/pytests/test/assets/test_burn_assets.py b/client_cli/pytests/test/assets/test_burn_assets.py index b7d22f3ab3b..97b832e693b 100644 --- a/client_cli/pytests/test/assets/test_burn_assets.py +++ b/client_cli/pytests/test/assets/test_burn_assets.py @@ -41,7 +41,7 @@ def test_burn_asset_for_account_in_same_domain( @allure.label("permission", "can_burn_assets_with_definition") @pytest.mark.xfail(reason="TO DO") def test_burn_other_user_asset( - GIVEN_registered_asset_definition_with_numeric_value_type, + GIVEN_registered_asset_definition_with_numeric_type, GIVEN_registered_account, GIVEN_numeric_value, ): diff --git a/client_cli/pytests/test/assets/test_mint_assets.py b/client_cli/pytests/test/assets/test_mint_assets.py index ef83e794df0..5cfdf0f1e60 100644 --- a/client_cli/pytests/test/assets/test_mint_assets.py +++ b/client_cli/pytests/test/assets/test_mint_assets.py @@ -11,34 +11,34 @@ def story_account_mint_asset(): @allure.label("sdk_test_id", "mint_asset_for_account_in_same_domain") def test_mint_asset_for_account_in_same_domain( - GIVEN_registered_asset_definition_with_numeric_value_type, + GIVEN_registered_asset_definition_with_numeric_type, GIVEN_registered_account, GIVEN_numeric_value, ): with allure.step( f'WHEN client_cli mint "{GIVEN_numeric_value}" of ' - f'"{GIVEN_registered_asset_definition_with_numeric_value_type}" ' + f'"{GIVEN_registered_asset_definition_with_numeric_type}" ' f'for the "{GIVEN_registered_account}"' ): client_cli.mint().asset( account=GIVEN_registered_account, - asset_definition=GIVEN_registered_asset_definition_with_numeric_value_type, - value_of_value_type=GIVEN_numeric_value, + asset_definition=GIVEN_registered_asset_definition_with_numeric_type, + value_of_type=GIVEN_numeric_value, ) with allure.step( f'THEN "{GIVEN_registered_account}" ' f'should have the "{GIVEN_numeric_value}" of ' - f'"{GIVEN_registered_asset_definition_with_numeric_value_type}"' + f'"{GIVEN_registered_asset_definition_with_numeric_type}"' ): iroha.should( have.asset( - f"{GIVEN_registered_asset_definition_with_numeric_value_type.name}##" + f"{GIVEN_registered_asset_definition_with_numeric_type.name}##" f"{GIVEN_registered_account}" ) ) iroha.should( have.asset_has_quantity( - f"{GIVEN_registered_asset_definition_with_numeric_value_type.name}##" + f"{GIVEN_registered_asset_definition_with_numeric_type.name}##" f"{GIVEN_registered_account}", GIVEN_numeric_value, ) @@ -55,7 +55,7 @@ def test_mint_asset_quantity_after_minting(GIVEN_minted_asset_quantity): client_cli.mint().asset( account=GIVEN_minted_asset_quantity.account, asset_definition=GIVEN_minted_asset_quantity.definition, - value_of_value_type="1", + value_of_type="1", ) expected_quantity = int(GIVEN_minted_asset_quantity.value) + 1 with allure.step( diff --git a/client_cli/pytests/test/assets/test_register_asset_definitions.py b/client_cli/pytests/test/assets/test_register_asset_definitions.py index 03233ccc373..6ef4c4f9a6f 100644 --- a/client_cli/pytests/test/assets/test_register_asset_definitions.py +++ b/client_cli/pytests/test/assets/test_register_asset_definitions.py @@ -11,19 +11,19 @@ def story_account_registers_asset_definitions(): allure.dynamic.label("permission", "no_permission_required") -@allure.label("sdk_test_id", "register_asset_definition_with_numeric_value_type") -def test_register_asset_definition_with_numeric_value_type( - GIVEN_fake_asset_name, GIVEN_registered_domain, GIVEN_numeric_value_type +@allure.label("sdk_test_id", "register_asset_definition_with_numeric_type") +def test_register_asset_definition_with_numeric_type( + GIVEN_fake_asset_name, GIVEN_registered_domain, GIVEN_numeric_type ): with allure.step( f'WHEN client_cli registers the asset_definition "{GIVEN_fake_asset_name}" ' - f'with "{GIVEN_numeric_value_type}" value type' + f'with "{GIVEN_numeric_type}" value type' f'in the "{GIVEN_registered_domain.name}" domain' ): client_cli.register().asset().definition( asset=GIVEN_fake_asset_name, domain=GIVEN_registered_domain.name, - value_type=GIVEN_numeric_value_type, + type_=GIVEN_numeric_type, ) with allure.step(f'THEN Iroha should have the asset "{GIVEN_fake_asset_name}"'): iroha.should( @@ -33,37 +33,19 @@ def test_register_asset_definition_with_numeric_value_type( ) -@allure.label("sdk_test_id", "register_asset_definition_with_too_long_name") -def test_register_asset_definition_with_too_long_name( - GIVEN_129_length_name, GIVEN_registered_domain, GIVEN_numeric_value_type -): - with allure.step( - f'WHEN client_cli registers the asset_definition "{GIVEN_129_length_name}" ' - f'with "{GIVEN_numeric_value_type}" value type' - f'in the "{GIVEN_registered_domain.name}" domain' - ): - client_cli.register().asset().definition( - asset=GIVEN_129_length_name, - domain=GIVEN_registered_domain.name, - value_type=GIVEN_numeric_value_type, - ) - with allure.step(f'THEN Iroha should have the asset "{GIVEN_129_length_name}"'): - client_cli.should(have.error(Stderr.TOO_LONG.value)) - - -@allure.label("sdk_test_id", "register_asset_definition_with_store_value_type") -def test_register_asset_definition_with_store_value_type( - GIVEN_fake_asset_name, GIVEN_registered_domain, GIVEN_store_value_type +@allure.label("sdk_test_id", "register_asset_definition_with_store_type") +def test_register_asset_definition_with_store_type( + GIVEN_fake_asset_name, GIVEN_registered_domain, GIVEN_store_type ): with allure.step( f'WHEN client_cli registers the asset_definition "{GIVEN_fake_asset_name}" ' - f'with "{GIVEN_store_value_type}" value type' + f'with "{GIVEN_store_type}" value type' f'in the "{GIVEN_registered_domain.name}" domain' ): client_cli.register().asset().definition( asset=GIVEN_fake_asset_name, domain=GIVEN_registered_domain.name, - value_type=GIVEN_store_value_type, + type_=GIVEN_store_type, ) with allure.step(f'THEN Iroha should have the asset "{GIVEN_fake_asset_name}"'): iroha.should( @@ -91,21 +73,21 @@ def test_register_fixed_asset_definition( @allure.label("sdk_test_id", "register_asset_with_existing_name") def test_register_asset_with_existing_name( - GIVEN_registered_asset_definition_with_numeric_value_type, + GIVEN_registered_asset_definition_with_numeric_type, ): with allure.step( f"WHEN account tries to register an asset definition " - f'with the same name "{GIVEN_registered_asset_definition_with_numeric_value_type.name}"' - f'in the "{GIVEN_registered_asset_definition_with_numeric_value_type.domain}" domain' + f'with the same name "{GIVEN_registered_asset_definition_with_numeric_type.name}"' + f'in the "{GIVEN_registered_asset_definition_with_numeric_type.domain}" domain' ): client_cli.register().asset().definition( - asset=GIVEN_registered_asset_definition_with_numeric_value_type.name, - domain=GIVEN_registered_asset_definition_with_numeric_value_type.domain, - value_type=GIVEN_registered_asset_definition_with_numeric_value_type.value_type, + asset=GIVEN_registered_asset_definition_with_numeric_type.name, + domain=GIVEN_registered_asset_definition_with_numeric_type.domain, + type_=GIVEN_registered_asset_definition_with_numeric_type.type_, ) with allure.step( f'THEN client_cli should have the asset definition error: "' - f'{GIVEN_registered_asset_definition_with_numeric_value_type.__repr__()}"' + f'{GIVEN_registered_asset_definition_with_numeric_type.__repr__()}"' ): client_cli.should(have.error(Stderr.REPETITION.value)) @@ -117,7 +99,7 @@ def test_register_asset_with_empty_name(GIVEN_registered_domain): f'in the "{GIVEN_registered_domain.name}" domain' ): client_cli.register().asset().definition( - asset="", domain=GIVEN_registered_domain.name, value_type="Numeric" + asset="", domain=GIVEN_registered_domain.name, type_="Numeric" ) with allure.step(f'THEN сlient_cli should have the asset error: "{Stderr.EMPTY}"'): client_cli.should(have.error(Stderr.EMPTY.value)) @@ -125,7 +107,7 @@ def test_register_asset_with_empty_name(GIVEN_registered_domain): @allure.label("sdk_test_id", "register_asset_with_not_existing_domain") def test_register_asset_with_not_existing_domain( - GIVEN_not_existing_name, GIVEN_numeric_value_type, GIVEN_fake_asset_name + GIVEN_not_existing_name, GIVEN_numeric_type, GIVEN_fake_asset_name ): with allure.step( "WHEN client_cli tries to register an asset definition with not existing domain" @@ -133,14 +115,14 @@ def test_register_asset_with_not_existing_domain( client_cli.register().asset().definition( asset=GIVEN_fake_asset_name, domain=GIVEN_not_existing_name, - value_type=GIVEN_numeric_value_type, + type_=GIVEN_numeric_type, ) with allure.step("THEN client_cli should have the error"): client_cli.should(have.error(Stderr.FAILED_TO_FIND_DOMAIN.value)) -@allure.label("sdk_test_id", "register_asset_with_too_long_value_type") -def test_register_asset_with_too_long_value_type( +@allure.label("sdk_test_id", "register_asset_with_too_long_type") +def test_register_asset_with_too_long_type( GIVEN_fake_asset_name, GIVEN_registered_domain ): with allure.step( @@ -149,7 +131,7 @@ def test_register_asset_with_too_long_value_type( client_cli.register().asset().definition( asset=GIVEN_fake_asset_name, domain=GIVEN_registered_domain.name, - value_type="coin", + type_="coin", ) with allure.step("THEN client_cli should have the error"): - client_cli.should(have.error(Stderr.INVALID_VALUE_TYPE.value)) + client_cli.should(have.error(Stderr.INVALID_TYPE.value)) diff --git a/client_cli/pytests/test/conftest.py b/client_cli/pytests/test/conftest.py index 364456252ff..4a4dfaacde9 100644 --- a/client_cli/pytests/test/conftest.py +++ b/client_cli/pytests/test/conftest.py @@ -93,13 +93,13 @@ def GIVEN_currently_authorized_account(): @pytest.fixture() def GIVEN_currently_account_quantity_with_two_quantity_of_asset( - GIVEN_currently_authorized_account, GIVEN_numeric_value_type, GIVEN_fake_asset_name + GIVEN_currently_authorized_account, GIVEN_numeric_type, GIVEN_fake_asset_name ): """Fixture to get the currently authorized account asset""" asset_def = AssetDefinition( name=GIVEN_fake_asset_name, domain=GIVEN_currently_authorized_account.domain, - value_type=GIVEN_numeric_value_type, + type_=GIVEN_numeric_type, ) asset = Asset( definition=asset_def, value="2", account=GIVEN_currently_authorized_account @@ -112,26 +112,26 @@ def GIVEN_currently_account_quantity_with_two_quantity_of_asset( client_cli.register().asset().definition( asset=asset.definition.name, domain=asset.definition.domain, - value_type=asset.definition.value_type, + type_=asset.definition.type_, ) client_cli.mint().asset( account=GIVEN_currently_authorized_account, asset_definition=asset.definition, - value_of_value_type=asset.value, + value_of_type=asset.value, ) return asset @pytest.fixture() def GIVEN_numeric_asset_for_account( - request, GIVEN_numeric_value_type, GIVEN_fake_asset_name, GIVEN_numeric_value + request, GIVEN_numeric_type, GIVEN_fake_asset_name, GIVEN_numeric_value ): """Fixture to get an asset for a given account and domain with specified quantity.""" account, domain = request.param.split("@") account = Account(signatory=account, domain=domain) asset_def = AssetDefinition( - name=GIVEN_fake_asset_name, domain=domain, value_type=GIVEN_numeric_value_type + name=GIVEN_fake_asset_name, domain=domain, type_=GIVEN_numeric_type ) asset = Asset( definition=asset_def, value=GIVEN_numeric_value, account=account.signatory @@ -143,26 +143,26 @@ def GIVEN_numeric_asset_for_account( client_cli.register().asset().definition( asset=asset.definition.name, domain=asset.definition.domain, - value_type=asset.definition.value_type, + type_=asset.definition.type_, ) client_cli.mint().asset( account=account, asset_definition=asset.definition, - value_of_value_type=asset.value, + value_of_type=asset.value, ) return asset @pytest.fixture() -def GIVEN_registered_asset_definition_with_numeric_value_type( - GIVEN_registered_domain, GIVEN_numeric_value_type, GIVEN_fake_asset_name +def GIVEN_registered_asset_definition_with_numeric_type( + GIVEN_registered_domain, GIVEN_numeric_type, GIVEN_fake_asset_name ): """Fixture to create and register an asset definition with numeric value type.""" asset_def = AssetDefinition( name=GIVEN_fake_asset_name, domain=GIVEN_registered_domain.name, - value_type=GIVEN_numeric_value_type, + type_=GIVEN_numeric_type, ) with allure.step( f'GIVEN the asset_definition "{GIVEN_fake_asset_name}" ' @@ -171,14 +171,14 @@ def GIVEN_registered_asset_definition_with_numeric_value_type( client_cli.register().asset().definition( asset=asset_def.name, domain=asset_def.domain, - value_type=asset_def.value_type, + type_=asset_def.type_, ) return asset_def @pytest.fixture() def GIVEN_minted_asset_quantity( - GIVEN_registered_asset_definition_with_numeric_value_type, + GIVEN_registered_asset_definition_with_numeric_type, GIVEN_registered_account, GIVEN_numeric_value, ): @@ -187,26 +187,26 @@ def GIVEN_minted_asset_quantity( """ asset = Asset( account=GIVEN_registered_account, - definition=GIVEN_registered_asset_definition_with_numeric_value_type, + definition=GIVEN_registered_asset_definition_with_numeric_type, value=GIVEN_numeric_value, ) client_cli.mint().asset( account=asset.account, asset_definition=asset.definition, - value_of_value_type=asset.value, + value_of_type=asset.value, ) return asset @pytest.fixture() -def GIVEN_registered_asset_definition_with_store_value_type( - GIVEN_registered_domain, GIVEN_store_value_type, GIVEN_fake_asset_name +def GIVEN_registered_asset_definition_with_store_type( + GIVEN_registered_domain, GIVEN_store_type, GIVEN_fake_asset_name ): """Fixture to create and register an asset definition with store value type.""" asset_def = AssetDefinition( name=GIVEN_fake_asset_name, domain=GIVEN_registered_domain.name, - value_type=GIVEN_store_value_type, + type_=GIVEN_store_type, ) with allure.step( f'GIVEN the asset_definition "{GIVEN_fake_asset_name}" ' @@ -215,7 +215,7 @@ def GIVEN_registered_asset_definition_with_store_value_type( client_cli.register().asset().definition( asset=asset_def.name, domain=asset_def.domain, - value_type=asset_def.value_type, + type=asset_def.type, ) return asset_def @@ -290,19 +290,19 @@ def GIVEN_key_with_invalid_character_in_key( @pytest.fixture() -def GIVEN_numeric_value_type(): +def GIVEN_numeric_type(): """Fixture to provide a numeric value type.""" - value_type = ValueTypes.NUMERIC.value - with allure.step(f'GIVEN a "{value_type}" value type'): - return value_type + type_ = ValueTypes.NUMERIC.value + with allure.step(f'GIVEN a "{type_}" value type'): + return type_ @pytest.fixture() -def GIVEN_store_value_type(): +def GIVEN_store_type(): """Fixture to provide a store value type.""" - value_type = ValueTypes.STORE.value - with allure.step(f'GIVEN a "{value_type}" value type'): - return value_type + type_ = ValueTypes.STORE.value + with allure.step(f'GIVEN a "{type_}" value type'): + return type_ @pytest.fixture() diff --git a/client_cli/pytests/test/domains/test_register_domains.py b/client_cli/pytests/test/domains/test_register_domains.py index 7f01073934e..4b9752c9b5d 100644 --- a/client_cli/pytests/test/domains/test_register_domains.py +++ b/client_cli/pytests/test/domains/test_register_domains.py @@ -66,30 +66,6 @@ def test_register_one_letter_domain(GIVEN_random_character): iroha.should(have.domain(GIVEN_random_character)) -@allure.label("sdk_test_id", "register_max_length_domain") -def test_register_max_length_domain(GIVEN_128_length_name): - with allure.step( - f'WHEN client_cli registers the longest domain "{GIVEN_128_length_name}"' - ): - client_cli.register().domain(GIVEN_128_length_name) - with allure.step( - f'THEN Iroha should have the longest domain "{GIVEN_128_length_name}"' - ): - iroha.should(have.domain(GIVEN_128_length_name)) - - -@allure.label("sdk_test_id", "register_domain_with_too_long_name") -def test_register_domain_with_too_long_name(GIVEN_129_length_name): - with allure.step( - f'WHEN client_cli registers the domain "{GIVEN_129_length_name}" with too long name' - ): - client_cli.register().domain(GIVEN_129_length_name) - with allure.step( - f'THEN client_cli should have the too long domain error: "{Stderr.TOO_LONG}"' - ): - client_cli.should(have.error(Stderr.TOO_LONG.value)) - - @allure.label("sdk_test_id", "register_domain_with_reserved_character") def test_register_domain_with_reserved_character(GIVEN_string_with_reserved_character): with allure.step( diff --git a/client_cli/pytests/test/roles/test_register_roles.py b/client_cli/pytests/test/roles/test_register_roles.py index 583976956a6..de84cf1603d 100644 --- a/client_cli/pytests/test/roles/test_register_roles.py +++ b/client_cli/pytests/test/roles/test_register_roles.py @@ -16,7 +16,7 @@ def test_register_role(GIVEN_fake_name): @allure.label("sdk_test_id", "attach_permissions_to_role") @pytest.mark.xfail(reason="TO DO") def test_attach_permissions_to_role( - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_store_type, ): assert 0 @@ -26,6 +26,6 @@ def test_attach_permissions_to_role( def test_grant_role_to_account( GIVEN_currently_authorized_account, GIVEN_registered_account, - GIVEN_registered_asset_definition_with_store_value_type, + GIVEN_registered_asset_definition_with_store_type, ): assert 0 diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index 3a2ea0caa3e..3ccf2ddea90 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -13,9 +13,9 @@ use eyre::{eyre, Error, Result, WrapErr}; use iroha::{ client::{Client, QueryResult}, config::Config, - data_model::{metadata::MetadataValueBox, prelude::*}, + data_model::prelude::*, }; -use iroha_primitives::addr::SocketAddr; +use iroha_primitives::{addr::SocketAddr, json::JsonString}; use thiserror::Error; /// Re-usable clap `--metadata ` (`-m`) argument. @@ -30,20 +30,19 @@ pub struct MetadataArgs { } impl MetadataArgs { - fn load(self) -> Result { - let value: Option = self + fn load(self) -> Result { + let value: Option = self .metadata .map(|path| { let content = fs::read_to_string(&path).wrap_err_with(|| { eyre!("Failed to read the metadata file `{}`", path.display()) })?; - let metadata: UnlimitedMetadata = - json5::from_str(&content).wrap_err_with(|| { - eyre!( - "Failed to deserialize metadata from file `{}`", - path.display() - ) - })?; + let metadata: Metadata = json5::from_str(&content).wrap_err_with(|| { + eyre!( + "Failed to deserialize metadata from file `{}`", + path.display() + ) + })?; Ok::<_, eyre::Report>(metadata) }) .transpose()?; @@ -61,20 +60,18 @@ pub struct MetadataValueArg { /// The following types are supported: /// Numbers: decimal with optional point /// Booleans: false/true - /// JSON: e.g. {"Vec":[{"String":"a"},{"String":"b"}]} + /// Objects: e.g. {"Vec":[{"String":"a"},{"String":"b"}]} #[arg(short, long)] - value: MetadataValueBox, + value: JsonString, } impl FromStr for MetadataValueArg { type Err = Error; fn from_str(s: &str) -> Result { - s.parse::() - .map(MetadataValueBox::Bool) - .or_else(|_| s.parse::().map(MetadataValueBox::Numeric)) - .or_else(|_| serde_json::from_str::(s).map_err(Into::into)) - .map(|value| MetadataValueArg { value }) + Ok(MetadataValueArg { + value: JsonString::from_str(s)?, + }) } } @@ -237,7 +234,7 @@ fn color_mode() -> ColorMode { #[allow(clippy::shadow_unrelated)] fn submit( instructions: impl Into, - metadata: UnlimitedMetadata, + metadata: Metadata, context: &mut dyn RunContext, ) -> Result<()> { let iroha = context.client_from_config(); @@ -490,7 +487,7 @@ mod domain { value: MetadataValueArg { value }, } = self; let set_key_value = SetKeyValue::domain(id, key, value); - submit([set_key_value], UnlimitedMetadata::new(), context) + submit([set_key_value], Metadata::default(), context) .wrap_err("Failed to submit Set instruction") } } @@ -510,7 +507,7 @@ mod domain { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { id, key } = self; let remove_key_value = RemoveKeyValue::domain(id, key); - submit([remove_key_value], UnlimitedMetadata::new(), context) + submit([remove_key_value], Metadata::default(), context) .wrap_err("Failed to submit Remove instruction") } } @@ -712,7 +709,7 @@ mod asset { pub unmintable: bool, /// Value type stored in asset #[arg(short, long)] - pub value_type: AssetValueType, + pub r#type: AssetType, #[command(flatten)] pub metadata: MetadataArgs, } @@ -721,11 +718,11 @@ mod asset { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { definition_id, - value_type, + r#type, unmintable, metadata, } = self; - let mut asset_definition = AssetDefinition::new(definition_id, value_type); + let mut asset_definition = AssetDefinition::new(definition_id, r#type); if unmintable { asset_definition = asset_definition.mintable_once(); } @@ -887,7 +884,7 @@ mod asset { } = self; let set = iroha::data_model::isi::SetKeyValue::asset(asset_id, key, value); - submit([set], UnlimitedMetadata::default(), context)?; + submit([set], Metadata::default(), context)?; Ok(()) } } @@ -905,7 +902,7 @@ mod asset { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { asset_id, key } = self; let remove = iroha::data_model::isi::RemoveKeyValue::asset(asset_id, key); - submit([remove], UnlimitedMetadata::default(), context)?; + submit([remove], Metadata::default(), context)?; Ok(()) } } @@ -1036,7 +1033,7 @@ mod wasm { submit( WasmSmartContract::from_compiled(raw_data), - UnlimitedMetadata::new(), + Metadata::default(), context, ) .wrap_err("Failed to submit a Wasm smart contract") @@ -1076,7 +1073,7 @@ mod json { match self.variant { Variant::Transaction => { let instructions: Vec = json5::from_str(&string_content)?; - submit(instructions, UnlimitedMetadata::new(), context) + submit(instructions, Metadata::default(), context) .wrap_err("Failed to submit parsed instructions") } Variant::Query => { @@ -1110,22 +1107,15 @@ mod tests { } // Boolean values - case!("true", true.into()); - case!("false", false.into()); + case!("true", JsonString::new(true)); + case!("false", JsonString::new(false)); // Numeric values - case!("123", numeric!(123).into()); - case!("123.0", numeric!(123.0).into()); + case!("\"123\"", JsonString::new(numeric!(123))); + case!("\"123.0\"", JsonString::new(numeric!(123.0))); // JSON Value let json_str = r#"{"Vec":[{"String":"a"},{"String":"b"}]}"#; case!(json_str, serde_json::from_str(json_str).unwrap()); } - - #[test] - fn error_parse_invalid_value() { - let invalid_str = "not_a_valid_value"; - let _invalid_value = MetadataValueArg::from_str(invalid_str) - .expect_err("Should fail invalid type from string but passed"); - } } diff --git a/config/src/parameters/actual.rs b/config/src/parameters/actual.rs index 0bea7dd1b13..8e51f814c44 100644 --- a/config/src/parameters/actual.rs +++ b/config/src/parameters/actual.rs @@ -10,12 +10,8 @@ use std::{ use error_stack::{Result, ResultExt}; use iroha_config_base::{read::ConfigReader, toml::TomlSource, util::Bytes, WithOrigin}; use iroha_crypto::{KeyPair, PublicKey}; -use iroha_data_model::{ - metadata::Limits as MetadataLimits, peer::PeerId, transaction::TransactionLimits, ChainId, - LengthLimits, -}; +use iroha_data_model::{peer::PeerId, ChainId}; use iroha_primitives::{addr::SocketAddr, unique_vec::UniqueVec}; -use serde::{Deserialize, Serialize}; use url::Url; pub use user::{DevTelemetry, Logger, Snapshot}; @@ -42,7 +38,6 @@ pub struct Root { pub snapshot: Snapshot, pub telemetry: Option, pub dev_telemetry: DevTelemetry, - pub chain_wide: ChainWide, } /// See [`Root::from_toml_source`] @@ -168,78 +163,14 @@ impl Default for LiveQueryStore { #[derive(Debug, Clone, Copy)] pub struct BlockSync { pub gossip_period: Duration, - pub gossip_max_size: NonZeroU32, + pub gossip_size: NonZeroU32, } #[derive(Debug, Clone, Copy)] #[allow(missing_docs)] pub struct TransactionGossiper { pub gossip_period: Duration, - pub gossip_max_size: NonZeroU32, -} - -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -#[allow(missing_docs)] -pub struct ChainWide { - pub max_transactions_in_block: NonZeroU32, - pub block_time: Duration, - pub commit_time: Duration, - pub transaction_limits: TransactionLimits, - pub domain_metadata_limits: MetadataLimits, - pub asset_definition_metadata_limits: MetadataLimits, - pub account_metadata_limits: MetadataLimits, - pub asset_metadata_limits: MetadataLimits, - pub trigger_metadata_limits: MetadataLimits, - pub ident_length_limits: LengthLimits, - pub executor_runtime: WasmRuntime, - pub wasm_runtime: WasmRuntime, -} - -impl ChainWide { - /// Calculate pipeline time based on the block time and commit time - pub fn pipeline_time(&self) -> Duration { - self.block_time + self.commit_time - } - - /// Estimates as `block_time + commit_time / 2` - pub fn consensus_estimation(&self) -> Duration { - self.block_time + (self.commit_time / 2) - } -} - -impl Default for ChainWide { - fn default() -> Self { - Self { - max_transactions_in_block: defaults::chain_wide::MAX_TXS, - block_time: defaults::chain_wide::BLOCK_TIME, - commit_time: defaults::chain_wide::COMMIT_TIME, - transaction_limits: defaults::chain_wide::TRANSACTION_LIMITS, - domain_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - account_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - asset_definition_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - asset_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - trigger_metadata_limits: defaults::chain_wide::METADATA_LIMITS, - ident_length_limits: defaults::chain_wide::IDENT_LENGTH_LIMITS, - executor_runtime: WasmRuntime::default(), - wasm_runtime: WasmRuntime::default(), - } - } -} - -#[allow(missing_docs)] -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct WasmRuntime { - pub fuel_limit: u64, - pub max_memory: Bytes, -} - -impl Default for WasmRuntime { - fn default() -> Self { - Self { - fuel_limit: defaults::chain_wide::WASM_FUEL_LIMIT, - max_memory: defaults::chain_wide::WASM_MAX_MEMORY, - } - } + pub gossip_size: NonZeroU32, } #[derive(Debug, Clone)] diff --git a/config/src/parameters/defaults.rs b/config/src/parameters/defaults.rs index e68bfbf1c73..aa5e96ce6a9 100644 --- a/config/src/parameters/defaults.rs +++ b/config/src/parameters/defaults.rs @@ -8,7 +8,6 @@ use std::{ time::Duration, }; -use iroha_data_model::{prelude::MetadataLimits, transaction::TransactionLimits, LengthLimits}; use nonzero_ext::nonzero; pub mod queue { @@ -29,10 +28,10 @@ pub mod network { use super::*; pub const TRANSACTION_GOSSIP_PERIOD: Duration = Duration::from_secs(1); - pub const TRANSACTION_GOSSIP_MAX_SIZE: NonZeroU32 = nonzero!(500u32); + pub const TRANSACTION_GOSSIP_SIZE: NonZeroU32 = nonzero!(500u32); pub const BLOCK_GOSSIP_PERIOD: Duration = Duration::from_secs(10); - pub const BLOCK_GOSSIP_MAX_SIZE: NonZeroU32 = nonzero!(4u32); + pub const BLOCK_GOSSIP_SIZE: NonZeroU32 = nonzero!(4u32); pub const IDLE_TIMEOUT: Duration = Duration::from_secs(60); } @@ -45,41 +44,6 @@ pub mod snapshot { pub const CREATE_EVERY: Duration = Duration::from_secs(60); } -pub mod chain_wide { - use iroha_config_base::util::Bytes; - - use super::*; - - pub const MAX_TXS: NonZeroU32 = nonzero!(2_u32.pow(9)); - pub const BLOCK_TIME: Duration = Duration::from_secs(2); - pub const COMMIT_TIME: Duration = Duration::from_secs(4); - pub const WASM_FUEL_LIMIT: u64 = 55_000_000; - pub const WASM_MAX_MEMORY: Bytes = Bytes(500 * 2_u32.pow(20)); - - /// Default estimation of consensus duration. - pub const CONSENSUS_ESTIMATION: Duration = - match BLOCK_TIME.checked_add(match COMMIT_TIME.checked_div(2) { - Some(x) => x, - None => unreachable!(), - }) { - Some(x) => x, - None => unreachable!(), - }; - - /// Default limits for metadata - pub const METADATA_LIMITS: MetadataLimits = MetadataLimits::new(2_u32.pow(20), 2_u32.pow(12)); - /// Default limits for ident length - pub const IDENT_LENGTH_LIMITS: LengthLimits = LengthLimits::new(1, 2_u32.pow(7)); - /// Default maximum number of instructions and expressions per transaction - pub const MAX_INSTRUCTION_NUMBER: u64 = 2_u64.pow(12); - /// Default maximum number of instructions and expressions per transaction - pub const MAX_WASM_SIZE_BYTES: u64 = 4 * 2_u64.pow(20); - - /// Default transaction limits - pub const TRANSACTION_LIMITS: TransactionLimits = - TransactionLimits::new(MAX_INSTRUCTION_NUMBER, MAX_WASM_SIZE_BYTES); -} - pub mod torii { use std::time::Duration; diff --git a/config/src/parameters/user.rs b/config/src/parameters/user.rs index a619626e896..c3fd636ec8f 100644 --- a/config/src/parameters/user.rs +++ b/config/src/parameters/user.rs @@ -25,10 +25,7 @@ use iroha_config_base::{ ReadConfig, WithOrigin, }; use iroha_crypto::{PrivateKey, PublicKey}; -use iroha_data_model::{ - metadata::Limits as MetadataLimits, peer::PeerId, transaction::TransactionLimits, ChainId, - LengthLimits, Level, -}; +use iroha_data_model::{peer::PeerId, ChainId, Level}; use iroha_primitives::{addr::SocketAddr, unique_vec::UniqueVec}; use serde::Deserialize; use url::Url; @@ -81,8 +78,6 @@ pub struct Root { dev_telemetry: DevTelemetry, #[config(nested)] torii: Torii, - #[config(nested)] - chain_wide: ChainWide, } #[derive(thiserror::Error, Debug, Copy, Clone)] @@ -119,7 +114,6 @@ impl Root { let dev_telemetry = self.dev_telemetry; let (torii, live_query_store) = self.torii.parse(); let telemetry = self.telemetry.map(actual::Telemetry::from); - let chain_wide = self.chain_wide.parse(); let peer_id = key_pair.as_ref().map(|key_pair| { PeerId::new( @@ -156,7 +150,6 @@ impl Root { snapshot, telemetry, dev_telemetry, - chain_wide, }) } } @@ -272,12 +265,12 @@ pub struct Network { /// Peer-to-peer address #[config(env = "P2P_ADDRESS")] pub address: WithOrigin, - #[config(default = "defaults::network::BLOCK_GOSSIP_MAX_SIZE")] - pub block_gossip_max_size: NonZeroU32, + #[config(default = "defaults::network::BLOCK_GOSSIP_SIZE")] + pub block_gossip_size: NonZeroU32, #[config(default = "defaults::network::BLOCK_GOSSIP_PERIOD.into()")] pub block_gossip_period_ms: DurationMs, - #[config(default = "defaults::network::TRANSACTION_GOSSIP_MAX_SIZE")] - pub transaction_gossip_max_size: NonZeroU32, + #[config(default = "defaults::network::TRANSACTION_GOSSIP_SIZE")] + pub transaction_gossip_size: NonZeroU32, #[config(default = "defaults::network::TRANSACTION_GOSSIP_PERIOD.into()")] pub transaction_gossip_period_ms: DurationMs, /// Duration of time after which connection with peer is terminated if peer is idle @@ -295,9 +288,9 @@ impl Network { ) { let Self { address, - block_gossip_max_size, + block_gossip_size, block_gossip_period_ms: block_gossip_period, - transaction_gossip_max_size, + transaction_gossip_size, transaction_gossip_period_ms: transaction_gossip_period, idle_timeout_ms: idle_timeout, } = self; @@ -309,11 +302,11 @@ impl Network { }, actual::BlockSync { gossip_period: block_gossip_period.get(), - gossip_max_size: block_gossip_max_size, + gossip_size: block_gossip_size, }, actual::TransactionGossiper { gossip_period: transaction_gossip_period.get(), - gossip_max_size: transaction_gossip_max_size, + gossip_size: transaction_gossip_size, }, ) } @@ -433,81 +426,6 @@ pub struct Snapshot { pub store_dir: WithOrigin, } -// TODO: make serde -#[derive(Debug, Copy, Clone, ReadConfig)] -pub struct ChainWide { - #[config(default = "defaults::chain_wide::MAX_TXS")] - pub max_transactions_in_block: NonZeroU32, - #[config(default = "defaults::chain_wide::BLOCK_TIME.into()")] - pub block_time_ms: DurationMs, - #[config(default = "defaults::chain_wide::COMMIT_TIME.into()")] - pub commit_time_ms: DurationMs, - #[config(default = "defaults::chain_wide::TRANSACTION_LIMITS")] - pub transaction_limits: TransactionLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub domain_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub asset_definition_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub account_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub asset_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::METADATA_LIMITS")] - pub trigger_metadata_limits: MetadataLimits, - #[config(default = "defaults::chain_wide::IDENT_LENGTH_LIMITS")] - pub ident_length_limits: LengthLimits, - #[config(default = "defaults::chain_wide::WASM_FUEL_LIMIT")] - pub executor_fuel_limit: u64, - #[config(default = "defaults::chain_wide::WASM_MAX_MEMORY")] - pub executor_max_memory: Bytes, - #[config(default = "defaults::chain_wide::WASM_FUEL_LIMIT")] - pub wasm_fuel_limit: u64, - #[config(default = "defaults::chain_wide::WASM_MAX_MEMORY")] - pub wasm_max_memory: Bytes, -} - -impl ChainWide { - fn parse(self) -> actual::ChainWide { - let Self { - max_transactions_in_block, - block_time_ms: DurationMs(block_time), - commit_time_ms: DurationMs(commit_time), - transaction_limits, - asset_metadata_limits, - trigger_metadata_limits, - asset_definition_metadata_limits, - account_metadata_limits, - domain_metadata_limits, - ident_length_limits, - executor_fuel_limit, - executor_max_memory, - wasm_fuel_limit, - wasm_max_memory, - } = self; - - actual::ChainWide { - max_transactions_in_block, - block_time, - commit_time, - transaction_limits, - asset_metadata_limits, - trigger_metadata_limits, - asset_definition_metadata_limits, - account_metadata_limits, - domain_metadata_limits, - ident_length_limits, - executor_runtime: actual::WasmRuntime { - fuel_limit: executor_fuel_limit, - max_memory: executor_max_memory, - }, - wasm_runtime: actual::WasmRuntime { - fuel_limit: wasm_fuel_limit, - max_memory: wasm_max_memory, - }, - } - } -} - #[derive(Debug, ReadConfig)] pub struct Torii { #[config(env = "API_ADDRESS")] diff --git a/config/tests/fixtures.rs b/config/tests/fixtures.rs index ae29cb5ba15..411b1460fd8 100644 --- a/config/tests/fixtures.rs +++ b/config/tests/fixtures.rs @@ -162,11 +162,11 @@ fn minimal_config_snapshot() { }, block_sync: BlockSync { gossip_period: 10s, - gossip_max_size: 4, + gossip_size: 4, }, transaction_gossiper: TransactionGossiper { gossip_period: 1s, - gossip_max_size: 500, + gossip_size: 500, }, live_query_store: LiveQueryStore { idle_time: 30s, @@ -197,51 +197,6 @@ fn minimal_config_snapshot() { dev_telemetry: DevTelemetry { out_file: None, }, - chain_wide: ChainWide { - max_transactions_in_block: 512, - block_time: 2s, - commit_time: 4s, - transaction_limits: TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 4194304, - }, - domain_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - asset_definition_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - account_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - asset_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - trigger_metadata_limits: Limits { - capacity: 1048576, - max_entry_len: 4096, - }, - ident_length_limits: LengthLimits { - min: 1, - max: 128, - }, - executor_runtime: WasmRuntime { - fuel_limit: 55000000, - max_memory: Bytes( - 524288000, - ), - }, - wasm_runtime: WasmRuntime { - fuel_limit: 55000000, - max_memory: Bytes( - 524288000, - ), - }, - }, }"#]].assert_eq(&format!("{config:#?}")); } diff --git a/config/tests/fixtures/full.toml b/config/tests/fixtures/full.toml index 02404b6cf3d..22aa92459af 100644 --- a/config/tests/fixtures/full.toml +++ b/config/tests/fixtures/full.toml @@ -11,9 +11,9 @@ signed_file = "genesis.signed.scale" [network] address = "localhost:3840" block_gossip_period_ms = 10_000 -block_gossip_max_size = 4 +block_gossip_size = 4 transaction_gossip_period_ms = 1_000 -transaction_gossip_max_size = 500 +transaction_gossip_size = 500 idle_timeout_ms = 10_000 [torii] @@ -40,8 +40,8 @@ level = "TRACE" format = "compact" [queue] -capacity = 65536 -capacity_per_user = 65536 +capacity = 65_536 +capacity_per_user = 65_536 transaction_time_to_live_ms = 100 future_threshold_ms = 50 @@ -58,16 +58,3 @@ max_retry_delay_exponent = 4 [dev_telemetry] out_file = "./dev_telemetry.json" - -[chain_wide] -max_transactions_in_block = 512 -block_time_ms = 2_000 -commit_time_ms = 4_000 -transaction_limits = { max_instruction_number = 4096, max_wasm_size_bytes = 4194304 } -asset_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -asset_definition_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -account_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -domain_metadata_limits = { capacity = 1048576, max_entry_len = 4096 } -ident_length_limits = { min = 1, max = 128 } -wasm_fuel_limit = 55000000 -wasm_max_memory = 524288000 diff --git a/configs/peer.template.toml b/configs/peer.template.toml index 2c32102420d..0dcfb679abb 100644 --- a/configs/peer.template.toml +++ b/configs/peer.template.toml @@ -20,9 +20,9 @@ [network] # address = # block_gossip_period_ms = 10_000 -# block_gossip_max_size = 4 +# block_gossip_size = 4 # transaction_gossip_period_ms = 1_000 -# transaction_gossip_max_size = 500 +# transaction_gossip_size = 500 # idle_timeout_ms = 60_000 [torii] diff --git a/configs/swarm/docker-compose.local.yml b/configs/swarm/docker-compose.local.yml index 16142fa6b39..6049d4e1339 100644 --- a/configs/swarm/docker-compose.local.yml +++ b/configs/swarm/docker-compose.local.yml @@ -50,7 +50,7 @@ services: --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " irohad1: depends_on: diff --git a/configs/swarm/docker-compose.single.yml b/configs/swarm/docker-compose.single.yml index 49dbec866de..75dffcb1b0a 100644 --- a/configs/swarm/docker-compose.single.yml +++ b/configs/swarm/docker-compose.single.yml @@ -49,5 +49,5 @@ services: --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " diff --git a/configs/swarm/docker-compose.yml b/configs/swarm/docker-compose.yml index e96c7f31aef..f54d50da59e 100644 --- a/configs/swarm/docker-compose.yml +++ b/configs/swarm/docker-compose.yml @@ -42,7 +42,7 @@ services: --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " irohad1: image: hyperledger/iroha:dev diff --git a/configs/swarm/executor.wasm b/configs/swarm/executor.wasm index 6d82237ec4e..8cf4a4e0d8c 100644 Binary files a/configs/swarm/executor.wasm and b/configs/swarm/executor.wasm differ diff --git a/configs/swarm/genesis.json b/configs/swarm/genesis.json index b4dc7488a7d..c952e7dd08a 100644 --- a/configs/swarm/genesis.json +++ b/configs/swarm/genesis.json @@ -8,9 +8,7 @@ "id": "wonderland", "logo": null, "metadata": { - "key": { - "String": "value" - } + "key": "value" } } } @@ -20,9 +18,7 @@ "Account": { "id": "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland", "metadata": { - "key": { - "String": "value" - } + "key": "value" } } } @@ -32,9 +28,7 @@ "Account": { "id": "ed012004FF5B81046DDCCF19E2E451C45DFB6F53759D4EB30FA2EFA807284D1CC33016@wonderland", "metadata": { - "key": { - "String": "value" - } + "key": "value" } } } @@ -43,7 +37,7 @@ "Register": { "AssetDefinition": { "id": "rose#wonderland", - "value_type": "Numeric", + "type_": "Numeric", "mintable": "Infinitely", "logo": null, "metadata": {} @@ -71,7 +65,7 @@ "Register": { "AssetDefinition": { "id": "cabbage#garden_of_live_flowers", - "value_type": "Numeric", + "type_": "Numeric", "mintable": "Infinitely", "logo": null, "metadata": {} @@ -116,68 +110,26 @@ "Grant": { "Permission": { "object": { - "id": "CanSetParameters", + "name": "CanSetParameters", "payload": null }, "destination": "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" } } }, - { - "NewParameter": "?MaxTransactionsInBlock=512" - }, - { - "NewParameter": "?BlockTime=2000" - }, - { - "NewParameter": "?CommitTimeLimit=4000" - }, - { - "NewParameter": "?TransactionLimits=4096,4194304_TL" - }, - { - "NewParameter": "?WSVDomainMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAssetDefinitionMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAccountMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVAssetMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVTriggerMetadataLimits=1048576,4096_ML" - }, - { - "NewParameter": "?WSVIdentLengthLimits=1,128_LL" - }, - { - "NewParameter": "?ExecutorFuelLimit=55000000" - }, - { - "NewParameter": "?ExecutorMaxMemory=524288000" - }, - { - "NewParameter": "?WASMFuelLimit=55000000" - }, - { - "NewParameter": "?WASMMaxMemory=524288000" - }, { "Register": { "Role": { "id": "ALICE_METADATA_ACCESS", "permissions": [ { - "id": "CanRemoveKeyValueInAccount", + "name": "CanRemoveKeyValueInAccount", "payload": { "account": "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" } }, { - "id": "CanSetKeyValueInAccount", + "name": "CanSetKeyValueInAccount", "payload": { "account": "ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland" } diff --git a/core/benches/blocks/apply_blocks.rs b/core/benches/blocks/apply_blocks.rs index eef0b25d7dd..b8ea2bd46a0 100644 --- a/core/benches/blocks/apply_blocks.rs +++ b/core/benches/blocks/apply_blocks.rs @@ -1,5 +1,8 @@ use eyre::Result; -use iroha_core::{block::CommittedBlock, prelude::*, state::State}; +use iroha_core::{ + block::CommittedBlock, prelude::*, state::State, sumeragi::network_topology::Topology, +}; +use iroha_data_model::peer::PeerId; use test_samples::gen_account_in; #[path = "./common.rs"] @@ -20,17 +23,22 @@ impl StateApplyBlocks { /// - Failed to generate [`KeyPair`] /// - Failed to create instructions for block pub fn setup(rt: &tokio::runtime::Handle) -> Self { - let domains = 100; - let accounts_per_domain = 1000; - let assets_per_domain = 1000; + let domains = 10; + let accounts_per_domain = 100; + let assets_per_domain = 100; + let (domain_ids, account_ids, asset_definition_ids) = + generate_ids(domains, accounts_per_domain, assets_per_domain); + let (peer_public_key, peer_private_key) = KeyPair::random().into_parts(); + let peer_id = PeerId::new("127.0.0.1:8080".parse().unwrap(), peer_public_key); + let topology = Topology::new(vec![peer_id]); let (alice_id, alice_keypair) = gen_account_in("wonderland"); let state = build_state(rt, &alice_id); - let nth = 100; + let nth = 10; let instructions = [ - populate_state(domains, accounts_per_domain, assets_per_domain, &alice_id), - delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + populate_state(&domain_ids, &account_ids, &asset_definition_ids, &alice_id), + delete_every_nth(&domain_ids, &account_ids, &asset_definition_ids, nth), + restore_every_nth(&domain_ids, &account_ids, &asset_definition_ids, nth), ]; let blocks = { @@ -45,6 +53,8 @@ impl StateApplyBlocks { instructions, alice_id.clone(), alice_keypair.private_key(), + &topology, + &peer_private_key, ); let _events = state_block.apply_without_execution(&block); state_block.commit(); diff --git a/core/benches/blocks/apply_blocks_oneshot.rs b/core/benches/blocks/apply_blocks_oneshot.rs index 6492d17e2c7..6275f9ec573 100644 --- a/core/benches/blocks/apply_blocks_oneshot.rs +++ b/core/benches/blocks/apply_blocks_oneshot.rs @@ -8,6 +8,8 @@ mod apply_blocks; use apply_blocks::StateApplyBlocks; +use iroha_config::base::{env::std_env, read::ConfigReader}; +use iroha_logger::InitConfig; fn main() { let rt = tokio::runtime::Builder::new_multi_thread() @@ -16,7 +18,12 @@ fn main() { .expect("Failed building the Runtime"); { let _guard = rt.enter(); - iroha_logger::test_logger(); + let config = ConfigReader::new() + .with_env(std_env) + .read_and_complete() + .expect("Failed to load config"); + let config = InitConfig::new(config, true); + let _ = iroha_logger::init_global(config).expect("Failed to initialize logger"); } iroha_logger::info!("Starting..."); let bench = StateApplyBlocks::setup(rt.handle()); diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs index 30ac642aee4..6ec0bcf3055 100644 --- a/core/benches/blocks/common.rs +++ b/core/benches/blocks/common.rs @@ -1,4 +1,4 @@ -use std::str::FromStr as _; +use std::{num::NonZeroU64, str::FromStr as _}; use iroha_core::{ block::{BlockBuilder, CommittedBlock}, @@ -13,11 +13,11 @@ use iroha_data_model::{ asset::{AssetDefinition, AssetDefinitionId}, domain::Domain, isi::InstructionBox, + parameter::TransactionParameters, prelude::*, - transaction::TransactionLimits, - ChainId, JsonString, + ChainId, }; -use iroha_primitives::unique_vec::UniqueVec; +use iroha_primitives::{json::JsonString, unique_vec::UniqueVec}; use serde_json::json; /// Create block @@ -25,27 +25,26 @@ pub fn create_block( state: &mut StateBlock<'_>, instructions: Vec, account_id: AccountId, - private_key: &PrivateKey, + account_private_key: &PrivateKey, + topology: &Topology, + peer_private_key: &PrivateKey, ) -> CommittedBlock { let chain_id = ChainId::from("00000000-0000-0000-0000-000000000000"); let transaction = TransactionBuilder::new(chain_id.clone(), account_id) .with_instructions(instructions) - .sign(private_key); - let limits = state.transaction_executor().transaction_limits; + .sign(account_private_key); + let limits = state.transaction_executor().limits; - let (peer_public_key, _) = KeyPair::random().into_parts(); - let peer_id = PeerId::new("127.0.0.1:8080".parse().unwrap(), peer_public_key); - let topology = Topology::new(vec![peer_id]); let block = BlockBuilder::new( vec![AcceptedTransaction::accept(transaction, &chain_id, limits).unwrap()], topology.clone(), Vec::new(), ) .chain(0, state) - .sign(private_key) + .sign(peer_private_key) .unpack(|_| {}) - .commit(&topology) + .commit(topology) .unpack(|_| {}) .unwrap(); @@ -58,76 +57,83 @@ pub fn create_block( } pub fn populate_state( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, + domains: &[DomainId], + accounts: &[AccountId], + asset_definitions: &[AssetDefinitionId], owner_id: &AccountId, ) -> Vec { let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = construct_domain_id(i); + + for domain_id in domains { let domain = Domain::new(domain_id.clone()); instructions.push(Register::domain(domain).into()); let can_unregister_domain = Grant::permission( Permission::new( "CanUnregisterDomain".parse().unwrap(), - JsonString::from(&json!({ "domain_id": domain_id.clone() })), + JsonString::from(&json!({ "domain": domain_id.clone() })), ), owner_id.clone(), ); instructions.push(can_unregister_domain.into()); - for _ in 0..accounts_per_domain { - let account_id = generate_account_id(domain_id.clone()); - let account = Account::new(account_id.clone()); - instructions.push(Register::account(account).into()); - let can_unregister_account = Grant::permission( - Permission::new( - "CanUnregisterAccount".parse().unwrap(), - JsonString::from(&json!({ "account_id": account_id.clone() })), - ), - owner_id.clone(), - ); - instructions.push(can_unregister_account.into()); - } - for k in 0..assets_per_domain { - let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); - let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); - instructions.push(Register::asset_definition(asset_definition).into()); - let can_unregister_asset_definition = Grant::permission( - Permission::new( - "CanUnregisterAssetDefinition".parse().unwrap(), - JsonString::from(&json!({ "asset_definition_id": asset_definition_id })), - ), - owner_id.clone(), - ); - instructions.push(can_unregister_asset_definition.into()); - } } + + for account_id in accounts { + let account = Account::new(account_id.clone()); + instructions.push(Register::account(account).into()); + let can_unregister_account = Grant::permission( + Permission::new( + "CanUnregisterAccount".parse().unwrap(), + JsonString::from(&json!({ "account": account_id.clone() })), + ), + owner_id.clone(), + ); + instructions.push(can_unregister_account.into()); + } + + for asset_definition_id in asset_definitions { + let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); + instructions.push(Register::asset_definition(asset_definition).into()); + let can_unregister_asset_definition = Grant::permission( + Permission::new( + "CanUnregisterAssetDefinition".parse().unwrap(), + JsonString::from(&json!({ "asset_definition": asset_definition_id })), + ), + owner_id.clone(), + ); + instructions.push(can_unregister_asset_definition.into()); + } + instructions } pub fn delete_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, + domains: &[DomainId], + accounts: &[AccountId], + asset_definitions: &[AssetDefinitionId], nth: usize, ) -> Vec { let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = construct_domain_id(i); + for (i, domain_id) in domains.iter().enumerate() { if i % nth == 0 { instructions.push(Unregister::domain(domain_id.clone()).into()); } else { - for j in 0..accounts_per_domain { + for (j, account_id) in accounts + .iter() + .filter(|account_id| account_id.domain() == domain_id) + .enumerate() + { if j % nth == 0 { - let account_id = generate_account_id(domain_id.clone()); instructions.push(Unregister::account(account_id.clone()).into()); } } - for k in 0..assets_per_domain { + for (k, asset_definition_id) in asset_definitions + .iter() + .filter(|asset_definition_id| asset_definition_id.domain() == domain_id) + .enumerate() + { if k % nth == 0 { - let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); - instructions.push(Unregister::asset_definition(asset_definition_id).into()); + instructions + .push(Unregister::asset_definition(asset_definition_id.clone()).into()); } } } @@ -136,29 +142,34 @@ pub fn delete_every_nth( } pub fn restore_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, + domains: &[DomainId], + accounts: &[AccountId], + asset_definitions: &[AssetDefinitionId], nth: usize, ) -> Vec { let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = construct_domain_id(i); + for (i, domain_id) in domains.iter().enumerate() { if i % nth == 0 { let domain = Domain::new(domain_id.clone()); instructions.push(Register::domain(domain).into()); } - for j in 0..accounts_per_domain { + for (j, account_id) in accounts + .iter() + .filter(|account_id| account_id.domain() == domain_id) + .enumerate() + { if j % nth == 0 || i % nth == 0 { - let account_id = generate_account_id(domain_id.clone()); let account = Account::new(account_id.clone()); instructions.push(Register::account(account).into()); } } - for k in 0..assets_per_domain { + for (k, asset_definition_id) in asset_definitions + .iter() + .filter(|asset_definition_id| asset_definition_id.domain() == domain_id) + .enumerate() + { if k % nth == 0 || i % nth == 0 { - let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); - let asset_definition = AssetDefinition::numeric(asset_definition_id); + let asset_definition = AssetDefinition::numeric(asset_definition_id.clone()); instructions.push(Register::asset_definition(asset_definition).into()); } } @@ -186,9 +197,10 @@ pub fn build_state(rt: &tokio::runtime::Handle, account_id: &AccountId) -> State { let mut state_block = state.block(); - state_block.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); - state_block.config.executor_runtime.fuel_limit = u64::MAX; - state_block.config.executor_runtime.max_memory = u32::MAX.into(); + state_block.world.parameters.transaction = + TransactionParameters::new(NonZeroU64::MAX, NonZeroU64::MAX); + state_block.world.parameters.executor.fuel = NonZeroU64::MAX; + state_block.world.parameters.executor.memory = NonZeroU64::MAX; let mut state_transaction = state_block.transaction(); let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) @@ -221,3 +233,28 @@ fn construct_asset_definition_id(i: usize, domain_id: DomainId) -> AssetDefiniti Name::from_str(&format!("non_inlinable_asset_definition_name_{i}")).unwrap(), ) } + +pub fn generate_ids( + domains: usize, + accounts_per_domain: usize, + assets_per_domain: usize, +) -> (Vec, Vec, Vec) { + let mut domain_ids = Vec::new(); + let mut account_ids = Vec::new(); + let mut asset_definition_ids = Vec::new(); + + for i in 0..domains { + let domain_id = construct_domain_id(i); + domain_ids.push(domain_id.clone()); + for _ in 0..accounts_per_domain { + let account_id = generate_account_id(domain_id.clone()); + account_ids.push(account_id) + } + for k in 0..assets_per_domain { + let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); + asset_definition_ids.push(asset_definition_id); + } + } + + (domain_ids, account_ids, asset_definition_ids) +} diff --git a/core/benches/blocks/validate_blocks.rs b/core/benches/blocks/validate_blocks.rs index c9e7e082ab4..47a495dbe40 100644 --- a/core/benches/blocks/validate_blocks.rs +++ b/core/benches/blocks/validate_blocks.rs @@ -1,4 +1,4 @@ -use iroha_core::{prelude::*, state::State}; +use iroha_core::{prelude::*, state::State, sumeragi::network_topology::Topology}; use iroha_data_model::{isi::InstructionBox, prelude::*}; use test_samples::gen_account_in; @@ -10,8 +10,10 @@ use common::*; pub struct StateValidateBlocks { state: State, instructions: Vec>, - private_key: PrivateKey, + account_private_key: PrivateKey, account_id: AccountId, + topology: Topology, + peer_private_key: PrivateKey, } impl StateValidateBlocks { @@ -23,17 +25,22 @@ impl StateValidateBlocks { /// - Failed to generate [`KeyPair`] /// - Failed to create instructions for block pub fn setup(rt: &tokio::runtime::Handle) -> Self { - let domains = 100; - let accounts_per_domain = 1000; - let assets_per_domain = 1000; + let domains = 10; + let accounts_per_domain = 100; + let assets_per_domain = 100; + let (domain_ids, account_ids, asset_definition_ids) = + generate_ids(domains, accounts_per_domain, assets_per_domain); + let (peer_public_key, peer_private_key) = KeyPair::random().into_parts(); + let peer_id = PeerId::new("127.0.0.1:8080".parse().unwrap(), peer_public_key); + let topology = Topology::new(vec![peer_id]); let (alice_id, alice_keypair) = gen_account_in("wonderland"); let state = build_state(rt, &alice_id); - let nth = 100; + let nth = 10; let instructions = [ - populate_state(domains, accounts_per_domain, assets_per_domain, &alice_id), - delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + populate_state(&domain_ids, &account_ids, &asset_definition_ids, &alice_id), + delete_every_nth(&domain_ids, &account_ids, &asset_definition_ids, nth), + restore_every_nth(&domain_ids, &account_ids, &asset_definition_ids, nth), ] .into_iter() .collect::>(); @@ -41,8 +48,10 @@ impl StateValidateBlocks { Self { state, instructions, - private_key: alice_keypair.private_key().clone(), + account_private_key: alice_keypair.private_key().clone(), account_id: alice_id, + topology, + peer_private_key, } } @@ -58,8 +67,10 @@ impl StateValidateBlocks { Self { state, instructions, - private_key, + account_private_key, account_id, + topology, + peer_private_key, }: Self, ) { for (instructions, i) in instructions.into_iter().zip(1..) { @@ -68,7 +79,9 @@ impl StateValidateBlocks { &mut state_block, instructions, account_id.clone(), - &private_key, + &account_private_key, + &topology, + &peer_private_key, ); let _events = state_block.apply_without_execution(&block); assert_eq!(state_block.height(), i); diff --git a/core/benches/blocks/validate_blocks_oneshot.rs b/core/benches/blocks/validate_blocks_oneshot.rs index 8c8b20b1343..95fa30fc102 100644 --- a/core/benches/blocks/validate_blocks_oneshot.rs +++ b/core/benches/blocks/validate_blocks_oneshot.rs @@ -7,6 +7,8 @@ mod validate_blocks; +use iroha_config::base::{env::std_env, read::ConfigReader}; +use iroha_logger::InitConfig; use validate_blocks::StateValidateBlocks; fn main() { @@ -16,7 +18,12 @@ fn main() { .expect("Failed building the Runtime"); { let _guard = rt.enter(); - iroha_logger::test_logger(); + let config = ConfigReader::new() + .with_env(std_env) + .read_and_complete() + .expect("Failed to load config"); + let config = InitConfig::new(config, true); + let _ = iroha_logger::init_global(config).expect("Failed to initialize logger"); } iroha_logger::test_logger(); iroha_logger::info!("Starting..."); diff --git a/core/benches/kura.rs b/core/benches/kura.rs index bc3b55a49e6..ad00f123533 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -14,7 +14,8 @@ use iroha_core::{ sumeragi::network_topology::Topology, }; use iroha_crypto::KeyPair; -use iroha_data_model::{prelude::*, transaction::TransactionLimits}; +use iroha_data_model::{parameter::TransactionParameters, prelude::*}; +use nonzero_ext::nonzero; use test_samples::gen_account_in; use tokio::{fs, runtime::Runtime}; @@ -29,11 +30,11 @@ async fn measure_block_size_for_n_executors(n_executors: u32) { let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions([transfer]) .sign(alice_keypair.private_key()); - let transaction_limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let txn_limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1_u64), }; - let tx = AcceptedTransaction::accept(tx, &chain_id, transaction_limits) + let tx = AcceptedTransaction::accept(tx, &chain_id, txn_limits) .expect("Failed to accept Transaction."); let dir = tempfile::tempdir().expect("Could not create tempfile."); let cfg = Config { diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 4814d0dd2ca..91feaa7a9da 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -11,12 +11,11 @@ use iroha_core::{ tx::TransactionExecutor, }; use iroha_data_model::{ - account::AccountId, - isi::InstructionBox, - prelude::*, - transaction::{TransactionBuilder, TransactionLimits}, + account::AccountId, isi::InstructionBox, parameter::TransactionParameters, prelude::*, + transaction::TransactionBuilder, }; use iroha_primitives::unique_vec::UniqueVec; +use nonzero_ext::nonzero; use once_cell::sync::Lazy; use test_samples::gen_account_in; @@ -25,10 +24,8 @@ static STARTER_KEYPAIR: Lazy = Lazy::new(KeyPair::random); static STARTER_ID: Lazy = Lazy::new(|| AccountId::new(STARTER_DOMAIN.clone(), STARTER_KEYPAIR.public_key().clone())); -const TRANSACTION_LIMITS: TransactionLimits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, -}; +const TRANSACTION_LIMITS: TransactionParameters = + TransactionParameters::new(nonzero!(4096_u64), nonzero!(1_u64)); fn build_test_transaction(chain_id: ChainId) -> TransactionBuilder { let domain_id: DomainId = "domain".parse().unwrap(); diff --git a/core/src/block.rs b/core/src/block.rs index f0aadb019a7..179b5d6024c 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -107,7 +107,10 @@ pub enum InvalidGenesisError { pub struct BlockBuilder(B); mod pending { - use std::time::{Duration, SystemTime}; + use std::{ + num::NonZeroUsize, + time::{Duration, SystemTime}, + }; use iroha_data_model::transaction::CommittedTransaction; @@ -156,11 +159,14 @@ mod pending { consensus_estimation: Duration, ) -> BlockHeader { BlockHeader { - height: prev_height - .checked_add(1) - .expect("INTERNAL BUG: Blockchain height exceeds usize::MAX") - .try_into() - .expect("INTERNAL BUG: Number of blocks exceeds u64::MAX"), + height: NonZeroUsize::new( + prev_height + .checked_add(1) + .expect("INTERNAL BUG: Blockchain height exceeds usize::MAX"), + ) + .expect("INTERNAL BUG: block height must not be 0") + .try_into() + .expect("INTERNAL BUG: Number of blocks exceeds u64::MAX"), prev_block_hash, transactions_hash: transactions .iter() @@ -168,7 +174,7 @@ mod pending { .collect::>() .hash() .expect("INTERNAL BUG: Empty block created"), - timestamp_ms: SystemTime::now() + creation_time_ms: SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .expect("INTERNAL BUG: Failed to get the current system time") .as_millis() @@ -228,7 +234,7 @@ mod pending { state.latest_block_hash(), view_change_index, &transactions, - state.config.consensus_estimation(), + state.world.parameters().sumeragi.consensus_estimation(), ), transactions, commit_topology: self.0.commit_topology.into_iter().collect(), @@ -414,10 +420,14 @@ mod valid { genesis_account: &AccountId, state_block: &mut StateBlock<'_>, ) -> WithEvents> { - let expected_block_height = state_block.height() + 1; + let expected_block_height = state_block + .height() + .checked_add(1) + .expect("INTERNAL BUG: Block height exceeds usize::MAX"); let actual_height = block .header() .height + .get() .try_into() .expect("INTERNAL BUG: Block height exceeds usize::MAX"); @@ -516,7 +526,7 @@ mod valid { AcceptedTransaction::accept( value, expected_chain_id, - transaction_executor.transaction_limits, + transaction_executor.limits, ) }?; @@ -638,14 +648,16 @@ mod valid { leader_private_key: &PrivateKey, f: impl FnOnce(&mut BlockPayload), ) -> Self { + use nonzero_ext::nonzero; + let mut payload = BlockPayload { header: BlockHeader { - height: 2, + height: nonzero!(2_u64), prev_block_hash: None, transactions_hash: HashOf::from_untyped_unchecked(Hash::prehashed( [1; Hash::LENGTH], )), - timestamp_ms: 0, + creation_time_ms: 0, view_change_index: 0, consensus_estimation_ms: 4_000, }, @@ -997,7 +1009,7 @@ mod tests { Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); // Making two transactions that have the same instruction - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let tx = TransactionBuilder::new(chain_id.clone(), alice_id) .with_instructions([create_asset_definition]) .sign(alice_keypair.private_key()); @@ -1053,7 +1065,7 @@ mod tests { Register::asset_definition(AssetDefinition::numeric(asset_definition_id.clone())); // Making two transactions that have the same instruction - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions([create_asset_definition]) .sign(alice_keypair.private_key()); @@ -1120,17 +1132,16 @@ mod tests { let query_handle = LiveQueryStore::test().start(); let state = State::new(world, kura, query_handle); let mut state_block = state.block(); - let transaction_limits = state_block.transaction_executor().transaction_limits; + let transaction_limits = state_block.transaction_executor().limits; let domain_id = DomainId::from_str("domain").expect("Valid"); let create_domain = Register::domain(Domain::new(domain_id)); let asset_definition_id = AssetDefinitionId::from_str("coin#domain").expect("Valid"); let create_asset = Register::asset_definition(AssetDefinition::numeric(asset_definition_id)); - let instructions_fail: [InstructionBox; 2] = [ - create_domain.clone().into(), - Fail::new("Always fail".to_owned()).into(), - ]; + let fail_isi = Unregister::domain("dummy".parse().unwrap()); + let instructions_fail: [InstructionBox; 2] = + [create_domain.clone().into(), fail_isi.into()]; let instructions_accept: [InstructionBox; 2] = [create_domain.into(), create_asset.into()]; let tx_fail = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) .with_instructions(instructions_fail) diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 85dd12ff289..99553729f92 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -41,7 +41,7 @@ pub struct BlockSynchronizer { kura: Arc, peer_id: PeerId, gossip_period: Duration, - gossip_max_size: NonZeroU32, + gossip_size: NonZeroU32, network: IrohaNetwork, state: Arc, } @@ -118,7 +118,7 @@ impl BlockSynchronizer { sumeragi, kura, gossip_period: config.gossip_period, - gossip_max_size: config.gossip_max_size, + gossip_size: config.gossip_size, network, state, } @@ -219,7 +219,7 @@ pub mod message { }; let blocks = (start_height.get()..) - .take(block_sync.gossip_max_size.get() as usize + 1) + .take(block_sync.gossip_size.get() as usize + 1) .map_while(|height| { NonZeroUsize::new(height) .and_then(|height| block_sync.kura.get_block_by_height(height)) diff --git a/core/src/executor.rs b/core/src/executor.rs index 28c2ff9f7a0..ab0bd0b2b12 100644 --- a/core/src/executor.rs +++ b/core/src/executor.rs @@ -18,6 +18,7 @@ use serde::{ use crate::{ smartcontracts::{wasm, Execute as _}, state::{deserialize::WasmSeed, StateReadOnly, StateTransaction}, + WorldReadOnly as _, }; impl From for ValidationFail { @@ -151,7 +152,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world.parameters().executor) .build()?; runtime.execute_executor_validate_transaction( @@ -187,7 +188,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world.parameters().executor) .build()?; runtime.execute_executor_validate_instruction( @@ -221,7 +222,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::>::new() .with_engine(state_ro.engine().clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_ro.config().executor_runtime) + .with_config(state_ro.world().parameters().executor) .build()?; runtime.execute_executor_validate_query( @@ -256,7 +257,7 @@ impl Executor { let runtime = wasm::RuntimeBuilder::::new() .with_engine(state_transaction.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs - .with_config(state_transaction.config.executor_runtime) + .with_config(state_transaction.world().parameters().executor) .build()?; runtime diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 4f5018aa9f6..4a08606108e 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -8,7 +8,10 @@ use iroha_p2p::Broadcast; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; -use crate::{queue::Queue, state::State, tx::AcceptedTransaction, IrohaNetwork, NetworkMessage}; +use crate::{ + queue::Queue, state::State, tx::AcceptedTransaction, IrohaNetwork, NetworkMessage, + StateReadOnly, WorldReadOnly, +}; /// [`Gossiper`] actor handle. #[derive(Clone)] @@ -26,21 +29,18 @@ impl TransactionGossiperHandle { } } -/// Actor to gossip transactions and receive transaction gossips +/// Actor which gossips transactions and receives transaction gossips pub struct TransactionGossiper { /// Unique id of the blockchain. Used for simple replay attack protection. chain_id: ChainId, - /// The size of batch that is being gossiped. Smaller size leads - /// to longer time to synchronise, useful if you have high packet loss. - gossip_max_size: NonZeroU32, - /// The time between gossiping. More frequent gossiping shortens + /// The time between gossip messages. More frequent gossiping shortens /// the time to sync, but can overload the network. gossip_period: Duration, - /// Address of queue - queue: Arc, - /// [`iroha_p2p::Network`] actor handle + /// Maximum size of a batch that is being gossiped. Smaller size leads + /// to longer time to synchronise, useful if you have high packet loss. + gossip_size: NonZeroU32, network: IrohaNetwork, - /// [`WorldState`] + queue: Arc, state: Arc, } @@ -57,7 +57,7 @@ impl TransactionGossiper { chain_id: ChainId, Config { gossip_period, - gossip_max_size, + gossip_size, }: Config, network: IrohaNetwork, queue: Arc, @@ -65,10 +65,10 @@ impl TransactionGossiper { ) -> Self { Self { chain_id, - gossip_max_size, gossip_period, - queue, + gossip_size, network, + queue, state, } } @@ -93,7 +93,7 @@ impl TransactionGossiper { fn gossip_transactions(&self) { let txs = self .queue - .n_random_transactions(self.gossip_max_size.get(), &self.state.view()); + .n_random_transactions(self.gossip_size.get(), &self.state.view()); if txs.is_empty() { return; @@ -110,7 +110,7 @@ impl TransactionGossiper { let state_view = self.state.view(); for tx in txs { - let transaction_limits = state_view.config.transaction_limits; + let transaction_limits = state_view.world().parameters().transaction; match AcceptedTransaction::accept(tx, &self.chain_id, transaction_limits) { Ok(tx) => match self.queue.push(tx, &state_view) { diff --git a/core/src/lib.rs b/core/src/lib.rs index f1104d47163..0d25bf98ca7 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -18,7 +18,6 @@ pub mod tx; use core::time::Duration; use gossiper::TransactionGossip; -use indexmap::IndexSet; use iroha_data_model::{events::EventBox, prelude::*}; use iroha_primitives::unique_vec::UniqueVec; use parity_scale_codec::{Decode, Encode}; @@ -39,9 +38,6 @@ pub type IrohaNetwork = iroha_p2p::NetworkHandle; /// Ids of peers. pub type PeersIds = UniqueVec; -/// Parameters set. -pub type Parameters = IndexSet; - /// Type of `Sender` which should be used for channels of `Event` messages. pub type EventsSender = broadcast::Sender; diff --git a/core/src/query/store.rs b/core/src/query/store.rs index be9da50dbd3..8e2574a0fd0 100644 --- a/core/src/query/store.rs +++ b/core/src/query/store.rs @@ -8,7 +8,11 @@ use std::{ use indexmap::IndexMap; use iroha_config::parameters::actual::LiveQueryStore as Config; use iroha_data_model::{ - query::{cursor::ForwardCursor, error::QueryExecutionFail, QueryId, QueryOutputBox}, + query::{ + cursor::{ForwardCursor, QueryId}, + error::QueryExecutionFail, + QueryOutputBox, + }, BatchedResponse, BatchedResponseV1, ValidationFail, }; use iroha_logger::trace; @@ -184,7 +188,7 @@ impl LiveQueryStoreHandle { &self, cursor: ForwardCursor, ) -> Result> { - let query_id = cursor.query_id.ok_or(UnknownCursor)?; + let query_id = cursor.query.ok_or(UnknownCursor)?; let live_query = self.remove(query_id.clone())?.ok_or(UnknownCursor)?; self.construct_query_response(query_id, cursor.cursor.map(NonZeroU64::get), live_query) @@ -235,7 +239,7 @@ impl LiveQueryStoreHandle { let query_response = BatchedResponseV1 { batch: QueryOutputBox::Vec(batch), cursor: ForwardCursor { - query_id: Some(query_id), + query: Some(query_id), cursor: next_cursor, }, }; @@ -246,10 +250,8 @@ impl LiveQueryStoreHandle { #[cfg(test)] mod tests { - use iroha_data_model::{ - metadata::MetadataValueBox, - query::{predicate::PredicateBox, FetchSize, Pagination, Sorting}, - }; + use iroha_data_model::query::{predicate::PredicateBox, FetchSize, Pagination, Sorting}; + use iroha_primitives::json::JsonString; use nonzero_ext::nonzero; use super::*; @@ -269,9 +271,8 @@ mod tests { }; let sorting = Sorting::default(); - let query_output = LazyQueryOutput::Iter(Box::new( - (0..100).map(|_| MetadataValueBox::from(false).into()), - )); + let query_output = + LazyQueryOutput::Iter(Box::new((0..100).map(|_| JsonString::from(false).into()))); let mut counter = 0; diff --git a/core/src/queue.rs b/core/src/queue.rs index 658ffafe405..d3ac864685b 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -284,9 +284,9 @@ impl Queue { fn collect_transactions_for_block( &self, state_view: &StateView, - max_txs_in_block: usize, + max_txs_in_block: NonZeroUsize, ) -> Vec { - let mut transactions = Vec::with_capacity(max_txs_in_block); + let mut transactions = Vec::with_capacity(max_txs_in_block.get()); self.get_transactions_for_block(state_view, max_txs_in_block, &mut transactions); transactions } @@ -297,10 +297,10 @@ impl Queue { pub fn get_transactions_for_block( &self, state_view: &StateView, - max_txs_in_block: usize, + max_txs_in_block: NonZeroUsize, transactions: &mut Vec, ) { - if transactions.len() >= max_txs_in_block { + if transactions.len() >= max_txs_in_block.get() { return; } @@ -315,7 +315,7 @@ impl Queue { transactions.iter().map(|tx| tx.as_ref().hash()).collect(); let txs = txs_from_queue .filter(|tx| !transactions_hashes.contains(&tx.as_ref().hash())) - .take(max_txs_in_block - transactions.len()); + .take(max_txs_in_block.get() - transactions.len()); transactions.extend(txs); seen_queue @@ -377,7 +377,7 @@ impl Queue { pub mod tests { use std::{str::FromStr, sync::Arc, thread, time::Duration}; - use iroha_data_model::{prelude::*, transaction::TransactionLimits}; + use iroha_data_model::{parameter::TransactionParameters, prelude::*}; use nonzero_ext::nonzero; use rand::Rng as _; use test_samples::gen_account_in; @@ -419,17 +419,15 @@ pub mod tests { time_source: &TimeSource, ) -> AcceptedTransaction { let chain_id = ChainId::from("00000000-0000-0000-0000-000000000000"); - let message = std::iter::repeat_with(rand::random::) - .take(16) - .collect(); - let instructions = [Fail { message }]; + let fail_isi = Unregister::domain("dummy".parse().unwrap()); + let instructions = [fail_isi]; let tx = TransactionBuilder::new_with_time_source(chain_id.clone(), account_id, time_source) .with_instructions(instructions) .sign(key_pair.private_key()); - let limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1024_u64), }; AcceptedTransaction::accept(tx, &chain_id, limits).expect("Failed to accept Transaction.") } @@ -504,7 +502,7 @@ pub mod tests { #[test] async fn get_available_txs() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -527,7 +525,7 @@ pub mod tests { } let available = queue.collect_transactions_for_block(&state_view, max_txs_in_block); - assert_eq!(available.len(), max_txs_in_block); + assert_eq!(available.len(), max_txs_in_block.get()); } #[test] @@ -538,7 +536,9 @@ pub mod tests { let (_time_handle, time_source) = TimeSource::new_mock(Duration::default()); let tx = accepted_tx_by_someone(&time_source); let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); let state_view = state.view(); let queue = Queue::test(config_factory(), &time_source); @@ -554,7 +554,7 @@ pub mod tests { #[test] async fn get_tx_drop_if_in_blockchain() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = State::new(world_with_test_domains(), kura, query_handle); @@ -563,7 +563,9 @@ pub mod tests { let queue = Queue::test(config_factory(), &time_source); queue.push(tx.clone(), &state.view()).unwrap(); let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); assert_eq!( queue @@ -576,7 +578,7 @@ pub mod tests { #[test] async fn get_available_txs_with_timeout() { - let max_txs_in_block = 6; + let max_txs_in_block = nonzero!(6_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -591,7 +593,7 @@ pub mod tests { }, &time_source, ); - for _ in 0..(max_txs_in_block - 1) { + for _ in 0..(max_txs_in_block.get() - 1) { queue .push(accepted_tx_by_someone(&time_source), &state_view) .expect("Failed to push tx into queue"); @@ -625,7 +627,7 @@ pub mod tests { // Others should stay in the queue until that moment. #[test] async fn transactions_available_after_pop() { - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -658,7 +660,7 @@ pub mod tests { let chain_id = ChainId::from("00000000-0000-0000-0000-000000000000"); - let max_txs_in_block = 2; + let max_txs_in_block = nonzero!(2_usize); let (alice_id, alice_keypair) = gen_account_in("wonderland"); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); @@ -669,17 +671,16 @@ pub mod tests { let mut queue = Queue::test(config_factory(), &time_source); let (event_sender, mut event_receiver) = tokio::sync::broadcast::channel(1); queue.events_sender = event_sender; - let instructions = [Fail { - message: "expired".to_owned(), - }]; + let fail_isi = Unregister::domain("dummy".parse().unwrap()); + let instructions = [fail_isi]; let mut tx = TransactionBuilder::new_with_time_source(chain_id.clone(), alice_id, &time_source) .with_instructions(instructions); tx.set_ttl(Duration::from_millis(TTL_MS)); let tx = tx.sign(alice_keypair.private_key()); - let limits = TransactionLimits { - max_instruction_number: 4096, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(4096_u64), + smart_contract_size: nonzero!(1024_u64), }; let tx_hash = tx.hash(); let tx = AcceptedTransaction::accept(tx, &chain_id, limits) @@ -718,7 +719,7 @@ pub mod tests { #[test] async fn concurrent_stress_test() { - let max_txs_in_block = 10; + let max_txs_in_block = nonzero!(10_usize); let kura = Kura::blank_kura_for_testing(); let query_handle = LiveQueryStore::test().start(); let state = Arc::new(State::new(world_with_test_domains(), kura, query_handle)); @@ -766,7 +767,9 @@ pub mod tests { for tx in queue.collect_transactions_for_block(&state.view(), max_txs_in_block) { let mut state_block = state.block(); - state_block.transactions.insert(tx.as_ref().hash(), 1); + state_block + .transactions + .insert(tx.as_ref().hash(), nonzero!(1_usize)); state_block.commit(); } // Simulate random small delays @@ -884,18 +887,18 @@ pub mod tests { ) .expect("Failed to push tx into queue"); - let transactions = queue.collect_transactions_for_block(&state.view(), 10); + let transactions = queue.collect_transactions_for_block(&state.view(), nonzero!(10_usize)); assert_eq!(transactions.len(), 2); let mut state_block = state.block(); for transaction in transactions { // Put transaction hashes into state as if they were in the blockchain state_block .transactions - .insert(transaction.as_ref().hash(), 1); + .insert(transaction.as_ref().hash(), nonzero!(1_usize)); } state_block.commit(); // Cleanup transactions - let transactions = queue.collect_transactions_for_block(&state.view(), 10); + let transactions = queue.collect_transactions_for_block(&state.view(), nonzero!(10_usize)); assert!(transactions.is_empty()); // After cleanup Alice and Bob pushes should work fine diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index 2932b0d9882..4278dd67229 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -25,7 +25,7 @@ impl Registrable for iroha_data_model::account::NewAccount { /// - Revoke permissions or roles pub mod isi { use iroha_data_model::{ - asset::{AssetValue, AssetValueType}, + asset::{AssetType, AssetValue}, isi::{ error::{MintabilityError, RepetitionError}, InstructionType, @@ -78,7 +78,7 @@ pub mod isi { _ => Err(err.into()), }, Ok(_) => Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::AssetId(asset_id.clone()), } .into()), @@ -174,21 +174,14 @@ pub mod isi { ) -> Result<(), Error> { let account_id = self.object; - let account_metadata_limits = state_transaction.config.account_metadata_limits; - state_transaction .world .account_mut(&account_id) .map_err(Error::from) - .and_then(|account| { + .map(|account| { account .metadata - .insert_with_limits( - self.key.clone(), - self.value.clone(), - account_metadata_limits, - ) - .map_err(Error::from) + .insert(self.key.clone(), self.value.clone()) })?; state_transaction @@ -243,7 +236,6 @@ pub mod isi { ) -> Result<(), Error> { let account_id = self.destination; let permission = self.object; - let permission_id = permission.id.clone(); // Check if account exists state_transaction.world.account_mut(&account_id)?; @@ -253,22 +245,22 @@ pub mod isi { .account_contains_inherent_permission(&account_id, &permission) { return Err(RepetitionError { - instruction_type: InstructionType::Grant, - id: permission.id.into(), + instruction: InstructionType::Grant, + id: permission.into(), } .into()); } state_transaction .world - .add_account_permission(&account_id, permission); + .add_account_permission(&account_id, permission.clone()); state_transaction .world .emit_events(Some(AccountEvent::PermissionAdded( AccountPermissionChanged { account: account_id, - permission: permission_id, + permission, }, ))); @@ -293,7 +285,7 @@ pub mod isi { .world .remove_account_permission(&account_id, &permission) { - return Err(FindError::Permission(permission.id).into()); + return Err(FindError::Permission(permission).into()); } state_transaction @@ -301,7 +293,7 @@ pub mod isi { .emit_events(Some(AccountEvent::PermissionRemoved( AccountPermissionChanged { account: account_id, - permission: permission.id, + permission, }, ))); @@ -324,10 +316,8 @@ pub mod isi { .roles .get(&role_id) .ok_or_else(|| FindError::Role(role_id.clone()))? - .clone() .permissions - .into_iter() - .map(|token| token.id); + .clone(); state_transaction.world.account(&account_id)?; @@ -341,7 +331,7 @@ pub mod isi { .is_some() { return Err(RepetitionError { - instruction_type: InstructionType::Grant, + instruction: InstructionType::Grant, id: IdBox::RoleId(role_id), } .into()); @@ -350,10 +340,11 @@ pub mod isi { state_transaction.world.emit_events({ let account_id_clone = account_id.clone(); permissions + .into_iter() .zip(core::iter::repeat_with(move || account_id.clone())) - .map(|(permission_id, account_id)| AccountPermissionChanged { + .map(|(permission, account_id)| AccountPermissionChanged { account: account_id, - permission: permission_id, + permission, }) .map(AccountEvent::PermissionAdded) .chain(std::iter::once(AccountEvent::RoleGranted( @@ -383,10 +374,8 @@ pub mod isi { .roles .get(&role_id) .ok_or_else(|| FindError::Role(role_id.clone()))? - .clone() .permissions - .into_iter() - .map(|token| token.id); + .clone(); if state_transaction .world @@ -403,10 +392,11 @@ pub mod isi { state_transaction.world.emit_events({ let account_id_clone = account_id.clone(); permissions + .into_iter() .zip(core::iter::repeat_with(move || account_id.clone())) - .map(|(permission_id, account_id)| AccountPermissionChanged { + .map(|(permission, account_id)| AccountPermissionChanged { account: account_id, - permission: permission_id, + permission, }) .map(AccountEvent::PermissionRemoved) .chain(std::iter::once(AccountEvent::RoleRevoked( @@ -427,15 +417,12 @@ pub mod isi { state_transaction: &mut StateTransaction<'_, '_>, value: &AssetValue, ) -> Result<(), Error> { - let expected_asset_value_type = match value.value_type() { - AssetValueType::Numeric(_) => asset::isi::expected_asset_value_type_numeric, - AssetValueType::Store => asset::isi::expected_asset_value_type_store, + let expected_asset_type = match value.type_() { + AssetType::Numeric(_) => asset::isi::expected_asset_type_numeric, + AssetType::Store => asset::isi::expected_asset_type_store, }; - let definition = asset::isi::assert_asset_type( - definition_id, - state_transaction, - expected_asset_value_type, - )?; + let definition = + asset::isi::assert_asset_type(definition_id, state_transaction, expected_asset_type)?; if let AssetValue::Numeric(numeric) = value { assert_numeric_spec(numeric, &definition)?; } @@ -494,9 +481,9 @@ pub mod query { use eyre::Result; use iroha_data_model::{ - account::Account, metadata::MetadataValueBox, permission::Permission, - query::error::QueryExecutionFail as Error, + account::Account, permission::Permission, query::error::QueryExecutionFail as Error, }; + use iroha_primitives::json::JsonString; use super::*; use crate::state::StateReadOnly; @@ -563,7 +550,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let id = &self.domain_id; + let id = &self.domain; iroha_logger::trace!(%id); Ok(Box::new( @@ -574,7 +561,7 @@ pub mod query { impl ValidQuery for FindAccountKeyValueByIdAndKey { #[metrics(+"find_account_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); @@ -592,7 +579,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let asset_definition_id = self.asset_definition_id.clone(); + let asset_definition_id = self.asset_definition.clone(); iroha_logger::trace!(%asset_definition_id); Ok(Box::new( diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index 38621f4ec5d..d3221810474 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -18,7 +18,7 @@ impl Registrable for NewAssetDefinition { fn build(self, authority: &AccountId) -> Self::Target { Self::Target { id: self.id, - value_type: self.value_type, + type_: self.type_, mintable: self.mintable, logo: self.logo, metadata: self.metadata, @@ -32,7 +32,7 @@ impl Registrable for NewAssetDefinition { /// - update metadata /// - transfer, etc. pub mod isi { - use iroha_data_model::{asset::AssetValueType, isi::error::MintabilityError}; + use iroha_data_model::{asset::AssetType, isi::error::MintabilityError}; use super::*; use crate::smartcontracts::account::isi::forbid_minting; @@ -49,7 +49,7 @@ pub mod isi { assert_asset_type( &asset_id.definition, state_transaction, - expected_asset_value_type_store, + expected_asset_type_store, )?; // Increase `Store` asset total quantity by 1 if asset was not present earlier @@ -62,21 +62,16 @@ pub mod isi { .increase_asset_total_amount(&asset_id.definition, Numeric::ONE)?; } - let asset_metadata_limits = state_transaction.config.asset_metadata_limits; let asset = state_transaction .world - .asset_or_insert(asset_id.clone(), Metadata::new())?; + .asset_or_insert(asset_id.clone(), Metadata::default())?; { let AssetValue::Store(store) = &mut asset.value else { return Err(Error::Conversion("Expected store asset type".to_owned())); }; - store.insert_with_limits( - self.key.clone(), - self.value.clone(), - asset_metadata_limits, - )?; + store.insert(self.key.clone(), self.value.clone()); } state_transaction @@ -103,7 +98,7 @@ pub mod isi { assert_asset_type( &asset_id.definition, state_transaction, - expected_asset_value_type_store, + expected_asset_type_store, )?; let value = { @@ -141,7 +136,7 @@ pub mod isi { assert_asset_type( &asset_id.definition, state_transaction, - expected_asset_value_type_store, + expected_asset_type_store, )?; let asset = state_transaction @@ -183,7 +178,7 @@ pub mod isi { let asset_definition = assert_asset_type( &asset_id.definition, state_transaction, - expected_asset_value_type_numeric, + expected_asset_type_numeric, )?; assert_numeric_spec(&self.object, &asset_definition)?; @@ -231,7 +226,7 @@ pub mod isi { let asset_definition = assert_asset_type( &asset_id.definition, state_transaction, - expected_asset_value_type_numeric, + expected_asset_type_numeric, )?; assert_numeric_spec(&self.object, &asset_definition)?; @@ -286,7 +281,7 @@ pub mod isi { let asset_definition = assert_asset_type( &source_id.definition, state_transaction, - expected_asset_value_type_numeric, + expected_asset_type_numeric, )?; assert_numeric_spec(&self.object, &asset_definition)?; @@ -348,34 +343,34 @@ pub mod isi { asset_definition: &AssetDefinition, ) -> Result { let object_spec = NumericSpec::fractional(object.scale()); - let object_asset_value_type = AssetValueType::Numeric(object_spec); - let asset_definition_spec = match asset_definition.value_type { - AssetValueType::Numeric(spec) => spec, + let object_asset_type = AssetType::Numeric(object_spec); + let asset_definition_spec = match asset_definition.type_ { + AssetType::Numeric(spec) => spec, other => { return Err(TypeError::from(Mismatch { expected: other, - actual: object_asset_value_type, + actual: object_asset_type, }) .into()) } }; asset_definition_spec.check(object).map_err(|_| { TypeError::from(Mismatch { - expected: AssetValueType::Numeric(asset_definition_spec), - actual: object_asset_value_type, + expected: AssetType::Numeric(asset_definition_spec), + actual: object_asset_type, }) })?; Ok(asset_definition_spec) } - /// Asserts that asset definition with [`definition_id`] has asset type [`expected_value_type`]. + /// Asserts that asset definition with [`definition_id`] has asset type [`expected_type`]. pub(crate) fn assert_asset_type( definition_id: &AssetDefinitionId, state_transaction: &StateTransaction<'_, '_>, - expected_value_type: impl Fn(&AssetValueType) -> Result<(), TypeError>, + expected_type: impl Fn(&AssetType) -> Result<(), TypeError>, ) -> Result { let asset_definition = state_transaction.world.asset_definition(definition_id)?; - expected_value_type(&asset_definition.value_type) + expected_type(&asset_definition.type_) .map(|()| asset_definition) .map_err(Into::into) } @@ -402,21 +397,17 @@ pub mod isi { } } - pub(crate) fn expected_asset_value_type_numeric( - asset_value_type: &AssetValueType, - ) -> Result<(), TypeError> { - match asset_value_type { - AssetValueType::Numeric(_) => Ok(()), - other => Err(TypeError::NumericAssetValueTypeExpected(*other)), + pub(crate) fn expected_asset_type_numeric(asset_type: &AssetType) -> Result<(), TypeError> { + match asset_type { + AssetType::Numeric(_) => Ok(()), + other => Err(TypeError::NumericAssetTypeExpected(*other)), } } - pub(crate) fn expected_asset_value_type_store( - asset_value_type: &AssetValueType, - ) -> Result<(), TypeError> { - match asset_value_type { - AssetValueType::Store => Ok(()), - other => Err(TypeError::NumericAssetValueTypeExpected(*other)), + pub(crate) fn expected_asset_type_store(asset_type: &AssetType) -> Result<(), TypeError> { + match asset_type { + AssetType::Store => Ok(()), + other => Err(TypeError::NumericAssetTypeExpected(*other)), } } } @@ -426,9 +417,9 @@ pub mod query { use eyre::Result; use iroha_data_model::{ asset::{Asset, AssetDefinition, AssetValue}, - metadata::MetadataValueBox, query::{asset::FindAssetDefinitionById, error::QueryExecutionFail as Error}, }; + use iroha_primitives::json::JsonString; use super::*; use crate::state::StateReadOnly; @@ -522,7 +513,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let id = &self.account_id; + let id = &self.account; iroha_logger::trace!(%id); Ok(Box::new(state_ro.world().account_assets(id)?.cloned())) } @@ -534,7 +525,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let id = self.asset_definition_id.clone(); + let id = self.asset_definition.clone(); iroha_logger::trace!(%id); Ok(Box::new( state_ro @@ -559,7 +550,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let id = &self.domain_id; + let id = &self.domain; iroha_logger::trace!(%id); Ok(Box::new( state_ro @@ -577,8 +568,8 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, Error> { - let domain_id = self.domain_id.clone(); - let asset_definition_id = self.asset_definition_id.clone(); + let domain_id = self.domain.clone(); + let asset_definition_id = self.asset_definition.clone(); let domain = state_ro.world().domain(&domain_id)?; let _definition = domain .asset_definitions @@ -641,7 +632,7 @@ pub mod query { impl ValidQuery for FindAssetKeyValueByIdAndKey { #[metrics(+"find_asset_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { let id = &self.id; let key = &self.key; let asset = state_ro.world().asset(id).map_err(|asset_err| { diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index 5dcad8e7b58..81d80232226 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -57,7 +57,7 @@ pub mod isi { let _domain = state_transaction.world.domain_mut(&account_id.domain)?; if state_transaction.world.account(&account_id).is_ok() { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::AccountId(account_id), } .into()); @@ -102,6 +102,13 @@ pub mod isi { .expect("should succeed") }); + state_transaction + .world + .account_permissions + .remove(account_id.clone()); + + state_transaction.world.remove_account_roles(&account_id); + if state_transaction .world .accounts @@ -127,11 +134,6 @@ pub mod isi { state_transaction: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { let asset_definition = self.object.build(authority); - asset_definition - .id() - .name - .validate_len(state_transaction.config.ident_length_limits) - .map_err(Error::from)?; let asset_definition_id = asset_definition.id().clone(); let domain = state_transaction @@ -139,7 +141,7 @@ pub mod isi { .domain_mut(&asset_definition_id.domain)?; if domain.asset_definitions.contains_key(&asset_definition_id) { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::AssetDefinitionId(asset_definition_id), } .into()); @@ -230,16 +232,14 @@ pub mod isi { ) -> Result<(), Error> { let asset_definition_id = self.object; - let metadata_limits = state_transaction.config.asset_definition_metadata_limits; state_transaction .world .asset_definition_mut(&asset_definition_id) .map_err(Error::from) - .and_then(|asset_definition| { + .map(|asset_definition| { asset_definition .metadata - .insert_with_limits(self.key.clone(), self.value.clone(), metadata_limits) - .map_err(Error::from) + .insert(self.key.clone(), self.value.clone()) })?; state_transaction @@ -298,12 +298,8 @@ pub mod isi { ) -> Result<(), Error> { let domain_id = self.object; - let limits = state_transaction.config.domain_metadata_limits; - let domain = state_transaction.world.domain_mut(&domain_id)?; - domain - .metadata - .insert_with_limits(self.key.clone(), self.value.clone(), limits)?; + domain.metadata.insert(self.key.clone(), self.value.clone()); state_transaction .world @@ -381,9 +377,8 @@ pub mod isi { /// Query module provides [`Query`] Domain related implementations. pub mod query { use eyre::Result; - use iroha_data_model::{ - domain::Domain, metadata::MetadataValueBox, query::error::QueryExecutionFail as Error, - }; + use iroha_data_model::{domain::Domain, query::error::QueryExecutionFail as Error}; + use iroha_primitives::json::JsonString; use super::*; use crate::state::StateReadOnly; @@ -409,7 +404,7 @@ pub mod query { impl ValidQuery for FindDomainKeyValueByIdAndKey { #[metrics(+"find_domain_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); @@ -423,7 +418,7 @@ pub mod query { impl ValidQuery for FindAssetDefinitionKeyValueByIdAndKey { #[metrics(+"find_asset_definition_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index 72282ac66b4..156473d0a7d 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -48,14 +48,12 @@ impl Execute for InstructionBox { Self::Mint(isi) => isi.execute(authority, state_transaction), Self::Burn(isi) => isi.execute(authority, state_transaction), Self::Transfer(isi) => isi.execute(authority, state_transaction), - Self::Fail(isi) => isi.execute(authority, state_transaction), Self::SetKeyValue(isi) => isi.execute(authority, state_transaction), Self::RemoveKeyValue(isi) => isi.execute(authority, state_transaction), Self::Grant(isi) => isi.execute(authority, state_transaction), Self::Revoke(isi) => isi.execute(authority, state_transaction), Self::ExecuteTrigger(isi) => isi.execute(authority, state_transaction), Self::SetParameter(isi) => isi.execute(authority, state_transaction), - Self::NewParameter(isi) => isi.execute(authority, state_transaction), Self::Upgrade(isi) => isi.execute(authority, state_transaction), Self::Log(isi) => isi.execute(authority, state_transaction), Self::Custom(_) => { @@ -191,18 +189,6 @@ impl Execute for RemoveKeyValueBox { } } -impl Execute for Fail { - fn execute( - self, - _authority: &AccountId, - _state_transaction: &mut StateTransaction<'_, '_>, - ) -> Result<(), Error> { - iroha_logger::trace!(?self); - - Err(Error::Fail(self.message)) - } -} - impl Execute for GrantBox { #[iroha_logger::log(name = "grant", skip_all, fields(object))] fn execute( @@ -243,7 +229,6 @@ mod tests { use core::str::FromStr as _; use std::sync::Arc; - use iroha_data_model::metadata::MetadataValueBox; use test_samples::{ gen_account_in, ALICE_ID, SAMPLE_GENESIS_ACCOUNT_ID, SAMPLE_GENESIS_ACCOUNT_KEYPAIR, }; @@ -285,25 +270,15 @@ mod tests { let account_id = ALICE_ID.clone(); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - SetKeyValue::asset( - asset_id.clone(), - Name::from_str("Bytes")?, - vec![1_u32, 2_u32, 3_u32], - ) - .execute(&account_id, &mut state_transaction)?; + let key = Name::from_str("Bytes")?; + SetKeyValue::asset(asset_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) + .execute(&account_id, &mut state_transaction)?; let asset = state_transaction.world.asset(&asset_id)?; let AssetValue::Store(store) = &asset.value else { panic!("expected store asset"); }; - let bytes = store.get(&"Bytes".parse::().expect("Valid")).cloned(); - assert_eq!( - bytes, - Some(MetadataValueBox::Vec(vec![ - 1_u32.into(), - 2_u32.into(), - 3_u32.into(), - ])) - ); + let value = store.get(&key).cloned(); + assert_eq!(value, Some(vec![1_u32, 2_u32, 3_u32,].into())); Ok(()) } @@ -314,28 +289,13 @@ mod tests { let mut state_block = state.block(); let mut state_transaction = state_block.transaction(); let account_id = ALICE_ID.clone(); - SetKeyValue::account( - account_id.clone(), - Name::from_str("Bytes")?, - vec![1_u32, 2_u32, 3_u32], - ) - .execute(&account_id, &mut state_transaction)?; + let key = Name::from_str("Bytes")?; + SetKeyValue::account(account_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) + .execute(&account_id, &mut state_transaction)?; let bytes = state_transaction .world - .map_account(&account_id, |account| { - account - .metadata() - .get(&Name::from_str("Bytes").expect("Valid")) - .cloned() - })?; - assert_eq!( - bytes, - Some(MetadataValueBox::Vec(vec![ - 1_u32.into(), - 2_u32.into(), - 3_u32.into(), - ])) - ); + .map_account(&account_id, |account| account.metadata().get(&key).cloned())?; + assert_eq!(bytes, Some(vec![1_u32, 2_u32, 3_u32,].into())); Ok(()) } @@ -347,26 +307,20 @@ mod tests { let mut state_transaction = state_block.transaction(); let definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let account_id = ALICE_ID.clone(); + let key = Name::from_str("Bytes")?; SetKeyValue::asset_definition( definition_id.clone(), - Name::from_str("Bytes")?, + key.clone(), vec![1_u32, 2_u32, 3_u32], ) .execute(&account_id, &mut state_transaction)?; - let bytes = state_transaction + let value = state_transaction .world .asset_definition(&definition_id)? .metadata() - .get(&Name::from_str("Bytes")?) + .get(&key) .cloned(); - assert_eq!( - bytes, - Some(MetadataValueBox::Vec(vec![ - 1_u32.into(), - 2_u32.into(), - 3_u32.into(), - ])) - ); + assert_eq!(value, Some(vec![1_u32, 2_u32, 3_u32,].into())); Ok(()) } @@ -378,26 +332,16 @@ mod tests { let mut state_transaction = state_block.transaction(); let domain_id = DomainId::from_str("wonderland")?; let account_id = ALICE_ID.clone(); - SetKeyValue::domain( - domain_id.clone(), - Name::from_str("Bytes")?, - vec![1_u32, 2_u32, 3_u32], - ) - .execute(&account_id, &mut state_transaction)?; + let key = Name::from_str("Bytes")?; + SetKeyValue::domain(domain_id.clone(), key.clone(), vec![1_u32, 2_u32, 3_u32]) + .execute(&account_id, &mut state_transaction)?; let bytes = state_transaction .world .domain(&domain_id)? .metadata() - .get(&Name::from_str("Bytes")?) + .get(&key) .cloned(); - assert_eq!( - bytes, - Some(MetadataValueBox::Vec(vec![ - 1_u32.into(), - 2_u32.into(), - 3_u32.into(), - ])) - ); + assert_eq!(bytes, Some(vec![1_u32, 2_u32, 3_u32,].into())); Ok(()) } @@ -497,7 +441,7 @@ mod tests { let tx = TransactionBuilder::new(chain_id.clone(), SAMPLE_GENESIS_ACCOUNT_ID.clone()) .with_instructions(instructions) .sign(SAMPLE_GENESIS_ACCOUNT_KEYPAIR.private_key()); - let tx_limits = state_block.transaction_executor().transaction_limits; + let tx_limits = state_block.transaction_executor().limits; assert!(matches!( AcceptedTransaction::accept(tx, &chain_id, tx_limits), Err(AcceptTransactionFail::UnexpectedGenesisAccountSignature) diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index a7f295cba25..4702533ad8c 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -165,10 +165,11 @@ impl_lazy! { iroha_data_model::account::Account, iroha_data_model::domain::Domain, iroha_data_model::block::BlockHeader, - iroha_data_model::metadata::MetadataValueBox, + iroha_primitives::json::JsonString, iroha_data_model::query::TransactionQueryOutput, iroha_data_model::executor::ExecutorDataModel, iroha_data_model::trigger::Trigger, + iroha_data_model::parameter::Parameters, } /// Query Request statefully validated on the Iroha node side. @@ -256,6 +257,7 @@ impl ValidQuery for QueryBox { FindAssetDefinitionKeyValueByIdAndKey, FindTriggerKeyValueByIdAndKey, FindExecutorDataModel, + FindAllParameters, } FindAllAccounts, @@ -281,7 +283,6 @@ impl ValidQuery for QueryBox { FindAllRoles, FindAllRoleIds, FindRolesByAccountId, - FindAllParameters, } } } @@ -291,9 +292,9 @@ mod tests { use std::str::FromStr as _; use iroha_crypto::{Hash, HashOf, KeyPair}; - use iroha_data_model::{ - metadata::MetadataValueBox, query::error::FindError, transaction::TransactionLimits, - }; + use iroha_data_model::{parameter::TransactionParameters, query::error::FindError}; + use iroha_primitives::json::JsonString; + use nonzero_ext::nonzero; use test_samples::{gen_account_in, ALICE_ID, ALICE_KEYPAIR}; use tokio::test; @@ -331,12 +332,11 @@ mod tests { ) .is_none()); - let mut store = Metadata::new(); + let mut store = Metadata::default(); store - .insert_with_limits( + .insert( Name::from_str("Bytes").expect("Valid"), - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - MetadataLimits::new(10, 100), + vec![1_u32, 2_u32, 3_u32], ) .unwrap(); let asset_id = AssetId::new(asset_definition_id, account.id().clone()); @@ -347,12 +347,8 @@ mod tests { } fn world_with_test_account_with_metadata() -> Result { - let mut metadata = Metadata::new(); - metadata.insert_with_limits( - Name::from_str("Bytes")?, - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - MetadataLimits::new(10, 100), - )?; + let mut metadata = Metadata::default(); + metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); let mut domain = Domain::new(DomainId::from_str("wonderland")?).build(&ALICE_ID); let account = Account::new(ALICE_ID.clone()) @@ -377,16 +373,16 @@ mod tests { let state = State::new(world_with_test_domains(), kura.clone(), query_handle); { let mut state_block = state.block(); - let limits = TransactionLimits { - max_instruction_number: 1, - max_wasm_size_bytes: 0, + let limits = TransactionParameters { + max_instructions: nonzero!(1000_u64), + smart_contract_size: nonzero!(1024_u64), }; - let huge_limits = TransactionLimits { - max_instruction_number: 1000, - max_wasm_size_bytes: 0, + let huge_limits = TransactionParameters { + max_instructions: nonzero!(1000_u64), + smart_contract_size: nonzero!(1024_u64), }; - state_block.config.transaction_limits = limits; + state_block.world.parameters.transaction = limits; let valid_tx = { let instructions: [InstructionBox; 0] = []; @@ -396,9 +392,9 @@ mod tests { AcceptedTransaction::accept(tx, &chain_id, limits)? }; let invalid_tx = { - let isi = Fail::new("fail".to_owned()); + let fail_isi = Unregister::domain("dummy".parse().unwrap()); let tx = TransactionBuilder::new(chain_id.clone(), ALICE_ID.clone()) - .with_instructions([isi.clone(), isi]) + .with_instructions([fail_isi.clone(), fail_isi]) .sign(ALICE_KEYPAIR.private_key()); AcceptedTransaction::accept(tx, &chain_id, huge_limits)? }; @@ -448,10 +444,7 @@ mod tests { let asset_id = AssetId::new(asset_definition_id, ALICE_ID.clone()); let bytes = FindAssetKeyValueByIdAndKey::new(asset_id, Name::from_str("Bytes")?) .execute(&state.view())?; - assert_eq!( - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - bytes, - ); + assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) } @@ -463,10 +456,7 @@ mod tests { let bytes = FindAccountKeyValueByIdAndKey::new(ALICE_ID.clone(), Name::from_str("Bytes")?) .execute(&state.view())?; - assert_eq!( - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - bytes, - ); + assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) } @@ -561,7 +551,7 @@ mod tests { .with_instructions(instructions) .sign(ALICE_KEYPAIR.private_key()); - let tx_limits = state_block.transaction_executor().transaction_limits; + let tx_limits = state_block.transaction_executor().limits; let va_tx = AcceptedTransaction::accept(tx, &chain_id, tx_limits)?; let (peer_public_key, _) = KeyPair::random().into_parts(); @@ -606,12 +596,8 @@ mod tests { async fn domain_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let state = { - let mut metadata = Metadata::new(); - metadata.insert_with_limits( - Name::from_str("Bytes")?, - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - MetadataLimits::new(10, 100), - )?; + let mut metadata = Metadata::default(); + metadata.insert(Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32]); let mut domain = Domain::new(DomainId::from_str("wonderland")?) .with_metadata(metadata) .build(&ALICE_ID); @@ -633,10 +619,7 @@ mod tests { let domain_id = DomainId::from_str("wonderland")?; let key = Name::from_str("Bytes")?; let bytes = FindDomainKeyValueByIdAndKey::new(domain_id, key).execute(&state.view())?; - assert_eq!( - MetadataValueBox::Vec(vec![1_u32.into(), 2_u32.into(), 3_u32.into()]), - bytes, - ); + assert_eq!(JsonString::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); Ok(()) } } diff --git a/core/src/smartcontracts/isi/triggers/mod.rs b/core/src/smartcontracts/isi/triggers/mod.rs index aeb80783ea6..03e15e11d27 100644 --- a/core/src/smartcontracts/isi/triggers/mod.rs +++ b/core/src/smartcontracts/isi/triggers/mod.rs @@ -42,7 +42,7 @@ pub mod isi { } let last_block_estimation = state_transaction.latest_block().map(|block| { - block.header().timestamp() + block.header().creation_time() + Duration::from_millis(block.header().consensus_estimation_ms) }); @@ -99,7 +99,7 @@ pub mod isi { if !success { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: trigger_id.into(), } .into()); @@ -130,7 +130,7 @@ pub mod isi { Ok(()) } else { Err(RepetitionError { - instruction_type: InstructionType::Unregister, + instruction: InstructionType::Unregister, id: trigger_id.into(), } .into()) @@ -213,18 +213,15 @@ pub mod isi { ) -> Result<(), Error> { let trigger_id = self.object; - let trigger_metadata_limits = state_transaction.config.account_metadata_limits; state_transaction .world .triggers .inspect_by_id_mut(&trigger_id, |action| { - action.metadata_mut().insert_with_limits( - self.key.clone(), - self.value.clone(), - trigger_metadata_limits, - ) + action + .metadata_mut() + .insert(self.key.clone(), self.value.clone()) }) - .ok_or(FindError::Trigger(trigger_id.clone()))??; + .ok_or(FindError::Trigger(trigger_id.clone()))?; state_transaction .world @@ -320,10 +317,10 @@ pub mod isi { pub mod query { //! Queries associated to triggers. use iroha_data_model::{ - metadata::MetadataValueBox, query::error::QueryExecutionFail as Error, trigger::{Trigger, TriggerId}, }; + use iroha_primitives::json::JsonString; use super::*; use crate::{prelude::*, smartcontracts::triggers::set::SetReadOnly, state::StateReadOnly}; @@ -364,7 +361,7 @@ pub mod query { impl ValidQuery for FindTriggerKeyValueByIdAndKey { #[metrics(+"find_trigger_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); @@ -389,7 +386,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> eyre::Result + 'state>, Error> { - let account_id = self.account_id.clone(); + let account_id = self.account.clone(); Ok(Box::new( state_ro @@ -417,7 +414,7 @@ pub mod query { &self, state_ro: &'state impl StateReadOnly, ) -> eyre::Result + 'state>, Error> { - let domain_id = self.domain_id.clone(); + let domain_id = self.domain.clone(); Ok(Box::new( state_ro diff --git a/core/src/smartcontracts/isi/triggers/specialized.rs b/core/src/smartcontracts/isi/triggers/specialized.rs index 5409cf8a070..a3deafa5d83 100644 --- a/core/src/smartcontracts/isi/triggers/specialized.rs +++ b/core/src/smartcontracts/isi/triggers/specialized.rs @@ -45,7 +45,7 @@ impl SpecializedAction { // TODO: At this point the authority is meaningless. authority, filter, - metadata: Metadata::new(), + metadata: Metadata::default(), } } } diff --git a/core/src/smartcontracts/isi/tx.rs b/core/src/smartcontracts/isi/tx.rs index e10d8b0f594..886c5a507a6 100644 --- a/core/src/smartcontracts/isi/tx.rs +++ b/core/src/smartcontracts/isi/tx.rs @@ -87,7 +87,7 @@ impl ValidQuery for FindTransactionsByAccountId { &self, state_ro: &'state impl StateReadOnly, ) -> Result + 'state>, QueryExecutionFail> { - let account_id = self.account_id.clone(); + let account_id = self.account.clone(); Ok(Box::new( state_ro diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index ee421f1afe2..ff65fdee954 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -20,11 +20,12 @@ pub mod isi { use eyre::Result; use iroha_data_model::{ isi::error::{InstructionExecutionError, InvalidParameterError, RepetitionError}, + parameter::{CustomParameter, Parameter}, prelude::*, query::error::FindError, Level, }; - use iroha_primitives::unique_vec::PushResult; + use iroha_primitives::{json::JsonString, unique_vec::PushResult}; use super::*; @@ -41,7 +42,7 @@ pub mod isi { if let PushResult::Duplicate(duplicate) = world.trusted_peers_ids.push(peer_id.clone()) { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::PeerId(duplicate), } .into()); @@ -84,11 +85,6 @@ pub mod isi { let domain: Domain = self.object.build(authority); let domain_id = domain.id().clone(); - domain_id - .name - .validate_len(state_transaction.config.ident_length_limits) - .map_err(Error::from)?; - if domain_id == *iroha_genesis::GENESIS_DOMAIN_ID { return Err(InstructionExecutionError::InvariantViolation( "Not allowed to register genesis domain".to_owned(), @@ -98,14 +94,13 @@ pub mod isi { let world = &mut state_transaction.world; if world.domains.get(&domain_id).is_some() { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::DomainId(domain_id), } .into()); } world.domains.insert(domain_id, domain.clone()); - world.emit_events(Some(DomainEvent::Created(domain))); Ok(()) @@ -145,6 +140,13 @@ pub mod isi { .map(|account| account.id().clone()) .collect(); for account in remove_accounts { + state_transaction + .world + .account_permissions + .remove(account.clone()); + + state_transaction.world.remove_account_roles(&account); + state_transaction.world.accounts.remove(account); } @@ -176,7 +178,7 @@ pub mod isi { if state_transaction.world.roles.get(role.id()).is_some() { return Err(RepetitionError { - instruction_type: InstructionType::Register, + instruction: InstructionType::Register, id: IdBox::RoleId(role.id), } .into()); @@ -246,8 +248,8 @@ pub mod isi { if !role.permissions.insert(permission.clone()) { return Err(RepetitionError { - instruction_type: InstructionType::Grant, - id: permission.id.into(), + instruction: InstructionType::Grant, + id: permission.into(), } .into()); } @@ -256,7 +258,7 @@ pub mod isi { .world .emit_events(Some(RoleEvent::PermissionAdded(RolePermissionChanged { role: role_id, - permission: permission.id, + permission, }))); Ok(()) @@ -272,21 +274,20 @@ pub mod isi { ) -> Result<(), Error> { let role_id = self.destination; let permission = self.object; - let permission_id = permission.id.clone(); let Some(role) = state_transaction.world.roles.get_mut(&role_id) else { return Err(FindError::Role(role_id).into()); }; if !role.permissions.remove(&permission) { - return Err(FindError::Permission(permission_id).into()); + return Err(FindError::Permission(permission).into()); } state_transaction .world .emit_events(Some(RoleEvent::PermissionRemoved(RolePermissionChanged { role: role_id, - permission: permission_id, + permission, }))); Ok(()) @@ -300,43 +301,70 @@ pub mod isi { _authority: &AccountId, state_transaction: &mut StateTransaction<'_, '_>, ) -> Result<(), Error> { - let parameter = self.parameter; - let parameter_id = parameter.id.clone(); - - if !state_transaction.world.parameters.swap_remove(¶meter) { - return Err(FindError::Parameter(parameter_id).into()); + macro_rules! set_parameter { + ($($container:ident($param:ident.$field:ident) => $single:ident::$variant:ident),* $(,)?) => { + match self.0 { $( + Parameter::$container(iroha_data_model::parameter::$single::$variant(next)) => { + let prev = core::mem::replace( + &mut state_transaction.world.parameters.$param.$field, + next, + ); + + state_transaction.world.emit_events( + Some(ConfigurationEvent::Changed(ParameterChanged { + old_value: Parameter::$container(iroha_data_model::parameter::$single::$variant( + prev, + )), + new_value: Parameter::$container(iroha_data_model::parameter::$single::$variant( + next, + )), + })) + ); + })* + Parameter::Custom(next) => { + let prev = state_transaction + .world + .parameters + .custom + .insert(next.id.clone(), next.clone()) + .unwrap_or_else(|| { + iroha_logger::error!( + "{}: Initial parameter value not set during executor migration", + next.id + ); + + CustomParameter { + id: next.id.clone(), + payload: JsonString::default(), + } + }); + + state_transaction + .world + .emit_events(Some(ConfigurationEvent::Changed(ParameterChanged { + old_value: Parameter::Custom(prev), + new_value: Parameter::Custom(next), + }))); + } + } + }; } - state_transaction.world.parameters.insert(parameter.clone()); - state_transaction - .world - .emit_events(Some(ConfigurationEvent::Changed(parameter_id))); - state_transaction.try_apply_core_parameter(parameter); - Ok(()) - } - } + set_parameter!( + Sumeragi(sumeragi.block_time_ms) => SumeragiParameter::BlockTimeMs, + Sumeragi(sumeragi.commit_time_ms) => SumeragiParameter::CommitTimeMs, - impl Execute for NewParameter { - #[metrics(+"new_parameter")] - fn execute( - self, - _authority: &AccountId, - state_transaction: &mut StateTransaction<'_, '_>, - ) -> Result<(), Error> { - let parameter = self.parameter; - let parameter_id = parameter.id.clone(); + Block(block.max_transactions) => BlockParameter::MaxTransactions, - if !state_transaction.world.parameters.insert(parameter.clone()) { - return Err(RepetitionError { - instruction_type: InstructionType::NewParameter, - id: IdBox::ParameterId(parameter_id), - } - .into()); - } - state_transaction - .world - .emit_events(Some(ConfigurationEvent::Created(parameter_id))); - state_transaction.try_apply_core_parameter(parameter); + Transaction(transaction.max_instructions) => TransactionParameter::MaxInstructions, + Transaction(transaction.smart_contract_size) => TransactionParameter::SmartContractSize, + + SmartContract(smart_contract.fuel) => SmartContractParameter::Fuel, + SmartContract(smart_contract.memory) => SmartContractParameter::Memory, + + Executor(executor.fuel) => SmartContractParameter::Fuel, + Executor(executor.memory) => SmartContractParameter::Memory, + ); Ok(()) } @@ -400,7 +428,7 @@ pub mod isi { pub mod query { use eyre::Result; use iroha_data_model::{ - parameter::Parameter, + parameter::Parameters, peer::Peer, prelude::*, query::error::{FindError, QueryExecutionFail as Error}, @@ -478,11 +506,8 @@ pub mod query { impl ValidQuery for FindAllParameters { #[metrics("find_all_parameters")] - fn execute<'state>( - &self, - state_ro: &'state impl StateReadOnly, - ) -> Result + 'state>, Error> { - Ok(Box::new(state_ro.world().parameters_iter().cloned())) + fn execute(&self, state_ro: &impl StateReadOnly) -> Result { + Ok(state_ro.world().parameters().clone()) } } } diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 9ab00dcbcf3..733deb3ccd2 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -2,17 +2,17 @@ //! `WebAssembly` VM Smartcontracts can be written in Rust, compiled //! to wasm format and submitted in a transaction -use std::borrow::Borrow; +use std::{borrow::Borrow, num::NonZeroU64}; use error::*; use import::traits::{ExecuteOperations as _, GetExecutorPayloads as _, SetDataModel as _}; -use iroha_config::parameters::actual::WasmRuntime as Config; use iroha_data_model::{ account::AccountId, executor::{self, ExecutorDataModel, MigrationResult}, isi::InstructionBox, + parameter::SmartContractParameters as Config, prelude::*, - query::{QueryBox, QueryId, QueryOutputBox, QueryRequest, SmartContractQuery}, + query::{cursor::QueryId, QueryBox, QueryOutputBox, QueryRequest, SmartContractQuery}, smart_contract::payloads::{self, Validate}, BatchedResponse, Level as LogLevel, ValidationFail, }; @@ -299,12 +299,12 @@ struct LimitsExecutor { /// Number of instructions in the smartcontract instruction_count: u64, /// Max allowed number of instructions in the smartcontract - max_instruction_count: u64, + max_instruction_count: NonZeroU64, } impl LimitsExecutor { /// Create new [`LimitsExecutor`] - pub fn new(max_instruction_count: u64) -> Self { + pub fn new(max_instruction_count: NonZeroU64) -> Self { Self { instruction_count: 0, max_instruction_count, @@ -320,7 +320,7 @@ impl LimitsExecutor { pub fn check_instruction_limits(&mut self) -> Result<(), ValidationFail> { self.instruction_count += 1; - if self.instruction_count > self.max_instruction_count { + if self.instruction_count > self.max_instruction_count.get() { return Err(ValidationFail::TooComplex); } @@ -344,8 +344,14 @@ pub mod state { /// Panics if failed to convert `u32` into `usize` which should not happen /// on any supported platform pub fn store_limits_from_config(config: &Config) -> StoreLimits { + let memory_size = config + .memory + .get() + .try_into() + .expect("`SmarContractParameters::memory` exceeds usize::MAX"); + StoreLimitsBuilder::new() - .memory_size(config.max_memory.get() as usize) + .memory_size(memory_size) .instances(1) .memories(1) .tables(1) @@ -738,7 +744,7 @@ impl Runtime> { store.limiter(|s| &mut s.store_limits); store - .set_fuel(self.config.fuel_limit) + .set_fuel(self.config.fuel.get()) .expect("Wasm Runtime config is malformed, this is a bug"); store @@ -812,7 +818,7 @@ where }?; match &batched { BatchedResponse::V1(batched) => { - if let Some(query_id) = &batched.cursor.query_id { + if let Some(query_id) = &batched.cursor.query { state.executed_queries.insert(query_id.clone()); } } @@ -822,7 +828,7 @@ where QueryRequest::Cursor(cursor) => { // In a normal situation we already have this `query_id` stored, // so that's a protection from malicious smart contract - if let Some(query_id) = &cursor.query_id { + if let Some(query_id) = &cursor.query { state.executed_queries.insert(query_id.clone()); } state @@ -899,7 +905,7 @@ impl<'wrld, 'block: 'wrld, 'state: 'block> Runtime, authority: AccountId, bytes: impl AsRef<[u8]>, - max_instruction_count: u64, + max_instruction_count: NonZeroU64, ) -> Result<()> { let span = wasm_log_span!("Smart contract validation", %authority); let state = state::SmartContract::new( @@ -1706,6 +1712,7 @@ impl GetExport for (&wasmtime::Instance, C) { #[cfg(test)] mod tests { use iroha_data_model::query::{predicate::PredicateBox, sorting::Sorting, Pagination}; + use nonzero_ext::nonzero; use parity_scale_codec::Encode; use test_samples::gen_account_in; use tokio::test; @@ -1893,7 +1900,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::Other(report)) = res.expect_err("Execution should fail") @@ -1942,7 +1954,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") @@ -1986,7 +2003,12 @@ mod tests { ); let mut runtime = RuntimeBuilder::::new().build()?; - let res = runtime.validate(&mut state.block().transaction(), authority, wat, 1); + let res = runtime.validate( + &mut state.block().transaction(), + authority, + wat, + nonzero!(1_u64), + ); if let Error::ExportFnCall(ExportFnCallError::HostExecution(report)) = res.expect_err("Execution should fail") diff --git a/core/src/state.rs b/core/src/state.rs index 0c00cb5467b..2c711280929 100644 --- a/core/src/state.rs +++ b/core/src/state.rs @@ -1,15 +1,9 @@ //! This module provides the [`State`] — an in-memory representation of the current blockchain state. use std::{ - borrow::Borrow, - collections::BTreeSet, - marker::PhantomData, - num::{NonZeroU32, NonZeroUsize}, - sync::Arc, - time::Duration, + collections::BTreeSet, marker::PhantomData, num::NonZeroUsize, sync::Arc, time::Duration, }; use eyre::Result; -use iroha_config::{base::util::Bytes, parameters::actual::ChainWide as Config}; use iroha_crypto::HashOf; use iroha_data_model::{ account::AccountId, @@ -22,7 +16,7 @@ use iroha_data_model::{ }, executor::ExecutorDataModel, isi::error::{InstructionExecutionError as Error, MathError}, - parameter::{Parameter, ParameterValueBox}, + parameter::Parameters, permission::Permissions, prelude::*, query::error::{FindError, QueryExecutionFail}, @@ -62,14 +56,14 @@ use crate::{ wasm, Execute, }, tx::TransactionExecutor, - Parameters, PeersIds, + PeersIds, }; /// The global entity consisting of `domains`, `triggers` and etc. /// For example registration of domain, will have this as an ISI target. #[derive(Default, Serialize)] pub struct World { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: Cell, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: Cell, @@ -93,8 +87,8 @@ pub struct World { /// Struct for block's aggregated changes pub struct WorldBlock<'world> { - /// Iroha config parameters. - pub(crate) parameters: CellBlock<'world, Parameters>, + /// Iroha on-chain parameters. + pub parameters: CellBlock<'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellBlock<'world, PeersIds>, /// Registered domains. @@ -119,7 +113,7 @@ pub struct WorldBlock<'world> { /// Struct for single transaction's aggregated changes pub struct WorldTransaction<'block, 'world> { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: CellTransaction<'block, 'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellTransaction<'block, 'world, PeersIds>, @@ -153,7 +147,7 @@ struct TransactionEventBuffer<'block> { /// Consistent point in time view of the [`World`] pub struct WorldView<'world> { - /// Iroha config parameters. + /// Iroha on-chain parameters. pub(crate) parameters: CellView<'world, Parameters>, /// Identifications of discovered trusted peers. pub(crate) trusted_peers_ids: CellView<'world, PeersIds>, @@ -180,13 +174,11 @@ pub struct WorldView<'world> { pub struct State { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: World, - /// Configuration of World State View. - pub config: Cell, /// Blockchain. // TODO: Cell is redundant here since block_hashes is very easy to rollback by just popping the last element pub block_hashes: Cell>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: Storage, usize>, + pub transactions: Storage, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. #[serde(skip)] pub engine: wasmtime::Engine, @@ -207,12 +199,10 @@ pub struct State { pub struct StateBlock<'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldBlock<'state>, - /// Configuration of World State View. - pub config: CellBlock<'state, Config>, /// Blockchain. pub block_hashes: CellBlock<'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageBlock<'state, HashOf, usize>, + pub transactions: StorageBlock<'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -229,12 +219,10 @@ pub struct StateBlock<'state> { pub struct StateTransaction<'block, 'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldTransaction<'block, 'state>, - /// Configuration of World State View. - pub config: CellTransaction<'block, 'state, Config>, /// Blockchain. pub block_hashes: CellTransaction<'block, 'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageTransaction<'block, 'state, HashOf, usize>, + pub transactions: StorageTransaction<'block, 'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -251,12 +239,10 @@ pub struct StateTransaction<'block, 'state> { pub struct StateView<'state> { /// The world. Contains `domains`, `triggers`, `roles` and other data representing the current state of the blockchain. pub world: WorldView<'state>, - /// Configuration of World State View. - pub config: CellView<'state, Config>, /// Blockchain. pub block_hashes: CellView<'state, Vec>>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: StorageView<'state, HashOf, usize>, + pub transactions: StorageView<'state, HashOf, NonZeroUsize>, /// Engine for WASM [`Runtime`](wasm::Runtime) to execute triggers. pub engine: &'state wasmtime::Engine, @@ -289,11 +275,11 @@ impl World { .into_iter() .map(|account| (account.id().clone(), account)) .collect(); - World { + Self { trusted_peers_ids: Cell::new(trusted_peers_ids), domains, accounts, - ..World::new() + ..Self::new() } } @@ -331,7 +317,7 @@ impl World { } } - /// Create point in time view of the [`World`] + /// Create point in time view of the [`Self`] pub fn view(&self) -> WorldView { WorldView { parameters: self.parameters.view(), @@ -573,26 +559,6 @@ pub trait WorldReadOnly { self.trusted_peers_ids().iter() } - /// Get all `Parameter`s registered in the world. - fn parameters_iter(&self) -> impl Iterator { - self.parameters().iter() - } - - /// Query parameter and convert it to a proper type - fn query_param, P: core::hash::Hash + Eq + ?Sized>( - &self, - param: &P, - ) -> Option - where - Parameter: Borrow

, - { - Parameters::get(self.parameters(), param) - .as_ref() - .map(|param| ¶m.val) - .cloned() - .and_then(|param_val| param_val.try_into().ok()) - } - /// Returns reference for trusted peer ids #[inline] fn peers_ids(&self) -> &PeersIds { @@ -745,6 +711,19 @@ impl WorldTransaction<'_, '_> { .map_or(false, |permissions| permissions.remove(token)) } + /// Remove all [`Role`]s from the [`Account`] + pub fn remove_account_roles(&mut self, account: &AccountId) { + let roles_to_remove = self + .account_roles_iter(account) + .cloned() + .map(|role| RoleIdWithOwner::new(account.clone(), role.clone())) + .collect::>(); + + for role in roles_to_remove { + self.account_roles.remove(role); + } + } + /// Get mutable reference to [`Asset`] /// /// # Errors @@ -886,7 +865,34 @@ impl WorldTransaction<'_, '_> { /// Set executor data model. pub fn set_executor_data_model(&mut self, executor_data_model: ExecutorDataModel) { - *self.executor_data_model.get_mut() = executor_data_model; + let prev_executor_data_model = + core::mem::replace(self.executor_data_model.get_mut(), executor_data_model); + + self.update_parameters(&prev_executor_data_model); + } + + fn update_parameters(&mut self, prev_executor_data_model: &ExecutorDataModel) { + let removed_parameters = prev_executor_data_model + .parameters + .keys() + .filter(|param_id| !self.executor_data_model.parameters.contains_key(param_id)); + let new_parameters = self + .executor_data_model + .parameters + .iter() + .filter(|(param_id, _)| !prev_executor_data_model.parameters.contains_key(param_id)); + + for param in removed_parameters { + iroha_logger::info!("{}: parameter removed", param); + self.parameters.custom.remove(param); + } + + for (param_id, param) in new_parameters { + self.parameters + .custom + .insert(param_id.clone(), param.clone()); + iroha_logger::info!("{}: parameter created", param); + } } /// Execute trigger with `trigger_id` as id and `authority` as owner @@ -965,21 +971,8 @@ impl State { #[must_use] #[inline] pub fn new(world: World, kura: Arc, query_handle: LiveQueryStoreHandle) -> Self { - // Added to remain backward compatible with other code primary in tests - Self::from_config(Config::default(), world, kura, query_handle) - } - - /// Construct [`State`] with specific [`Configuration`]. - #[inline] - pub fn from_config( - config: Config, - world: World, - kura: Arc, - query_handle: LiveQueryStoreHandle, - ) -> Self { Self { world, - config: Cell::new(config), transactions: Storage::new(), block_hashes: Cell::new(Vec::new()), new_tx_amounts: Arc::new(Mutex::new(Vec::new())), @@ -993,7 +986,6 @@ impl State { pub fn block(&self) -> StateBlock<'_> { StateBlock { world: self.world.block(), - config: self.config.block(), block_hashes: self.block_hashes.block(), transactions: self.transactions.block(), engine: &self.engine, @@ -1007,7 +999,6 @@ impl State { pub fn block_and_revert(&self) -> StateBlock<'_> { StateBlock { world: self.world.block_and_revert(), - config: self.config.block_and_revert(), block_hashes: self.block_hashes.block_and_revert(), transactions: self.transactions.block_and_revert(), engine: &self.engine, @@ -1021,7 +1012,6 @@ impl State { pub fn view(&self) -> StateView<'_> { StateView { world: self.world.view(), - config: self.config.view(), block_hashes: self.block_hashes.view(), transactions: self.transactions.view(), engine: &self.engine, @@ -1036,9 +1026,8 @@ impl State { #[allow(missing_docs)] pub trait StateReadOnly { fn world(&self) -> &impl WorldReadOnly; - fn config(&self) -> &Config; fn block_hashes(&self) -> &[HashOf]; - fn transactions(&self) -> &impl StorageReadOnly, usize>; + fn transactions(&self) -> &impl StorageReadOnly, NonZeroUsize>; fn engine(&self) -> &wasmtime::Engine; fn kura(&self) -> &Kura; fn query_handle(&self) -> &LiveQueryStoreHandle; @@ -1108,8 +1097,7 @@ pub trait StateReadOnly { fn block_with_tx(&self, hash: &HashOf) -> Option> { self.transactions() .get(hash) - .and_then(|&height| NonZeroUsize::new(height)) - .and_then(|height| self.kura().get_block_by_height(height)) + .and_then(|&height| self.kura().get_block_by_height(height)) } /// Returns [`Some`] milliseconds since the genesis block was @@ -1122,7 +1110,7 @@ pub trait StateReadOnly { let opt = self .kura() .get_block_by_height(nonzero_ext::nonzero!(1_usize)) - .map(|genesis_block| genesis_block.header().timestamp()); + .map(|genesis_block| genesis_block.header().creation_time()); if opt.is_none() { error!("Failed to get genesis block from Kura."); @@ -1140,7 +1128,7 @@ pub trait StateReadOnly { /// Get transaction executor fn transaction_executor(&self) -> TransactionExecutor { - TransactionExecutor::new(self.config().transaction_limits) + TransactionExecutor::new(self.world().parameters().transaction) } } @@ -1150,13 +1138,10 @@ macro_rules! impl_state_ro { fn world(&self) -> &impl WorldReadOnly { &self.world } - fn config(&self) -> &Config { - &self.config - } fn block_hashes(&self) -> &[HashOf] { &self.block_hashes } - fn transactions(&self) -> &impl StorageReadOnly, usize> { + fn transactions(&self) -> &impl StorageReadOnly, NonZeroUsize> { &self.transactions } fn engine(&self) -> &wasmtime::Engine { @@ -1184,7 +1169,6 @@ impl<'state> StateBlock<'state> { pub fn transaction(&mut self) -> StateTransaction<'_, 'state> { StateTransaction { world: self.world.trasaction(), - config: self.config.transaction(), block_hashes: self.block_hashes.transaction(), transactions: self.transactions.transaction(), engine: self.engine, @@ -1198,7 +1182,6 @@ impl<'state> StateBlock<'state> { pub fn commit(self) { self.transactions.commit(); self.block_hashes.commit(); - self.config.commit(); self.world.commit(); } @@ -1303,13 +1286,13 @@ impl<'state> StateBlock<'state> { let header = &latest_block.as_ref().header(); TimeInterval { - since: header.timestamp(), + since: header.creation_time(), length: header.consensus_estimation(), } }); let interval = TimeInterval { - since: block.as_ref().header().timestamp(), + since: block.as_ref().header().creation_time(), length: block.as_ref().header().consensus_estimation(), }; @@ -1374,101 +1357,9 @@ impl StateTransaction<'_, '_> { pub fn apply(self) { self.transactions.apply(); self.block_hashes.apply(); - self.config.apply(); self.world.apply(); } - /// If given [`Parameter`] represents some of the core chain-wide - /// parameters ([`Config`]), apply it - pub fn try_apply_core_parameter(&mut self, parameter: Parameter) { - use iroha_data_model::parameter::default::*; - - struct Reader(Option); - - impl Reader { - fn try_and_then>( - self, - id: &str, - fun: impl FnOnce(T), - ) -> Self { - if let Some(param) = self.0 { - if param.id().name().as_ref() == id { - if let Ok(value) = param.val.try_into() { - fun(value); - } - Self(None) - } else { - Self(Some(param)) - } - } else { - Self(None) - } - } - - fn try_and_write>( - self, - id: &str, - destination: &mut T, - ) -> Self { - self.try_and_then(id, |value| { - *destination = value; - }) - } - - fn try_and_write_duration(self, id: &str, destination: &mut Duration) -> Self { - self.try_and_then(id, |value| *destination = Duration::from_millis(value)) - } - - fn try_and_write_bytes(self, id: &str, destination: &mut Bytes) -> Self { - self.try_and_then(id, |value| *destination = Bytes(value)) - } - } - - Reader(Some(parameter)) - .try_and_then(MAX_TRANSACTIONS_IN_BLOCK, |value| { - if let Some(checked) = NonZeroU32::new(value) { - self.config.max_transactions_in_block = checked; - } - }) - .try_and_write_duration(BLOCK_TIME, &mut self.config.block_time) - .try_and_write_duration(COMMIT_TIME_LIMIT, &mut self.config.commit_time) - .try_and_write( - WSV_DOMAIN_METADATA_LIMITS, - &mut self.config.domain_metadata_limits, - ) - .try_and_write( - WSV_ASSET_DEFINITION_METADATA_LIMITS, - &mut self.config.asset_definition_metadata_limits, - ) - .try_and_write( - WSV_ACCOUNT_METADATA_LIMITS, - &mut self.config.account_metadata_limits, - ) - .try_and_write( - WSV_ASSET_METADATA_LIMITS, - &mut self.config.asset_metadata_limits, - ) - .try_and_write( - WSV_TRIGGER_METADATA_LIMITS, - &mut self.config.trigger_metadata_limits, - ) - .try_and_write( - WSV_IDENT_LENGTH_LIMITS, - &mut self.config.ident_length_limits, - ) - .try_and_write( - EXECUTOR_FUEL_LIMIT, - &mut self.config.executor_runtime.fuel_limit, - ) - .try_and_write_bytes( - EXECUTOR_MAX_MEMORY, - &mut self.config.executor_runtime.max_memory, - ) - .try_and_write(WASM_FUEL_LIMIT, &mut self.config.wasm_runtime.fuel_limit) - .try_and_write_bytes(WASM_MAX_MEMORY, &mut self.config.wasm_runtime.max_memory) - .try_and_write(TRANSACTION_LIMITS, &mut self.config.transaction_limits); - } - fn process_executable(&mut self, executable: &Executable, authority: AccountId) -> Result<()> { match executable { Executable::Instructions(instructions) => { @@ -1476,7 +1367,7 @@ impl StateTransaction<'_, '_> { } Executable::Wasm(bytes) => { let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_config(self.config.wasm_runtime) + .with_config(self.world().parameters().smart_contract) .with_engine(self.engine.clone()) // Cloning engine is cheap .build()?; wasm_runtime @@ -1518,7 +1409,7 @@ impl StateTransaction<'_, '_> { .expect("INTERNAL BUG: contract is not present") .clone(); let mut wasm_runtime = wasm::RuntimeBuilder::::new() - .with_config(self.config.wasm_runtime) + .with_config(self.world().parameters().smart_contract) .with_engine(self.engine.clone()) // Cloning engine is cheap .build()?; wasm_runtime @@ -1593,23 +1484,12 @@ mod range_bounds { } /// `DomainId` wrapper for fetching accounts beloning to a domain from the global store - #[derive(PartialEq, Eq, PartialOrd, Copy, Clone)] + #[derive(PartialEq, Eq, Ord, PartialOrd, Copy, Clone)] pub struct AccountIdDomainCompare<'a> { domain_id: &'a DomainId, signatory: MinMaxExt<&'a PublicKey>, } - // Sorting needed to be flipped for the storage lookup to work. - impl Ord for AccountIdDomainCompare<'_> { - fn cmp(&self, other: &AccountIdDomainCompare<'_>) -> std::cmp::Ordering { - if self.domain_id == other.domain_id { - other.signatory.cmp(&self.signatory) - } else { - other.domain_id.cmp(self.domain_id) - } - } - } - /// Bounds for range quired over accounts by domain pub struct AccountByDomainBounds<'a> { start: AccountIdDomainCompare<'a>, @@ -1875,7 +1755,6 @@ pub(crate) mod deserialize { M: MapAccess<'de>, { let mut world = None; - let mut config = None; let mut block_hashes = None; let mut transactions = None; @@ -1891,9 +1770,6 @@ pub(crate) mod deserialize { "world" => { world = Some(map.next_value_seed(wasm_seed.cast::())?); } - "config" => { - config = Some(map.next_value()?); - } "block_hashes" => { block_hashes = Some(map.next_value()?); } @@ -1906,7 +1782,6 @@ pub(crate) mod deserialize { Ok(State { world: world.ok_or_else(|| serde::de::Error::missing_field("world"))?, - config: config.ok_or_else(|| serde::de::Error::missing_field("config"))?, block_hashes: block_hashes .ok_or_else(|| serde::de::Error::missing_field("block_hashes"))?, transactions: transactions @@ -1921,7 +1796,7 @@ pub(crate) mod deserialize { deserializer.deserialize_struct( "WorldState", - &["world", "config", "block_hashes", "transactions"], + &["world", "block_hashes", "transactions"], StateVisitor { loader: self }, ) } @@ -1930,6 +1805,8 @@ pub(crate) mod deserialize { #[cfg(test)] mod tests { + use core::num::NonZeroU64; + use iroha_data_model::block::BlockPayload; use test_samples::gen_account_in; @@ -1963,7 +1840,7 @@ mod tests { let mut block_hashes = vec![]; for i in 1..=BLOCK_CNT { let block = new_dummy_block_with_payload(|payload| { - payload.header.height = i as u64; + payload.header.height = NonZeroU64::new(i as u64).unwrap(); payload.header.prev_block_hash = block_hashes.last().copied(); }); @@ -1988,7 +1865,7 @@ mod tests { for i in 1..=BLOCK_CNT { let block = new_dummy_block_with_payload(|payload| { - payload.header.height = i as u64; + payload.header.height = NonZeroU64::new(i as u64).unwrap(); }); let _events = state_block.apply(&block).unwrap(); @@ -1999,7 +1876,7 @@ mod tests { &state_block .all_blocks() .skip(7) - .map(|block| block.header().height()) + .map(|block| block.header().height().get()) .collect::>(), &[8, 9, 10] ); @@ -2015,15 +1892,46 @@ mod tests { RoleIdWithOwner::new(gen_account_in("wonderland").0, "4".parse().unwrap()), RoleIdWithOwner::new(gen_account_in("0").0, "5".parse().unwrap()), RoleIdWithOwner::new(gen_account_in("1").0, "6".parse().unwrap()), - ]; - let map = BTreeSet::from(roles); + ] + .map(|role| (role, ())); + let map = Storage::from_iter(roles); - let range = map + let view = map.view(); + let range = view .range(RoleIdByAccountBounds::new(&account_id)) .collect::>(); assert_eq!(range.len(), 2); - for role in range { + for (role, ()) in range { assert_eq!(&role.account, &account_id); } } + + #[test] + fn account_domain_range() { + let accounts = [ + gen_account_in("wonderland").0, + gen_account_in("wonderland").0, + gen_account_in("a").0, + gen_account_in("b").0, + gen_account_in("z").0, + gen_account_in("z").0, + ] + .map(|account| (account, ())); + let map = Storage::from_iter(accounts); + + let domain_id = "kingdom".parse().unwrap(); + let view = map.view(); + let range = view.range(AccountByDomainBounds::new(&domain_id)); + assert_eq!(range.count(), 0); + + let domain_id = "wonderland".parse().unwrap(); + let view = map.view(); + let range = view + .range(AccountByDomainBounds::new(&domain_id)) + .collect::>(); + assert_eq!(range.len(), 2); + for (account, ()) in range { + assert_eq!(&account.domain, &domain_id); + } + } } diff --git a/core/src/sumeragi/main_loop.rs b/core/src/sumeragi/main_loop.rs index 861bb556f66..f8c2d8dd17b 100644 --- a/core/src/sumeragi/main_loop.rs +++ b/core/src/sumeragi/main_loop.rs @@ -21,14 +21,6 @@ pub struct Sumeragi { pub peer_id: PeerId, /// An actor that sends events pub events_sender: EventsSender, - /// Time by which a newly created block should be committed. Prevents malicious nodes - /// from stalling the network by not participating in consensus - pub commit_time: Duration, - /// Time by which a new block should be created regardless if there were enough transactions or not. - /// Used to force block commits when there is a small influx of new transactions. - pub block_time: Duration, - /// The maximum number of transactions in the block - pub max_txs_in_block: usize, /// Kura instance used for IO pub kura: Arc, /// [`iroha_p2p::Network`] actor address @@ -122,12 +114,6 @@ impl Sumeragi { self.network.update_topology(UpdateTopology(peers)); } - /// The maximum time a sumeragi round can take to produce a block when - /// there are no faulty peers in the a set. - fn pipeline_time(&self) -> Duration { - self.block_time + self.commit_time - } - fn send_event(&self, event: impl Into) { let _ = self.events_sender.send(event.into()); } @@ -318,6 +304,8 @@ impl Sumeragi { "Genesis contains invalid transactions" ); + self.topology = Topology::new(genesis.as_ref().commit_topology().cloned()); + let msg = BlockCreated::from(&genesis); let genesis = genesis .commit(&self.topology) @@ -345,8 +333,6 @@ impl Sumeragi { let state_events = state_block.apply_without_execution(&block); - // Parameters are updated before updating public copy of sumeragi - self.update_params(&state_block); self.cache_transaction(&state_block); self.topology @@ -383,12 +369,6 @@ impl Sumeragi { self.was_commit = true; } - fn update_params(&mut self, state_block: &StateBlock<'_>) { - self.block_time = state_block.config.block_time; - self.commit_time = state_block.config.commit_time; - self.max_txs_in_block = state_block.config.max_transactions_in_block.get() as usize; - } - fn cache_transaction(&mut self, state_block: &StateBlock<'_>) { self.transaction_cache.retain(|tx| { !state_block.has_transaction(tx.as_ref().hash()) && !self.queue.is_expired(tx) @@ -404,6 +384,15 @@ impl Sumeragi { ) -> Option> { let mut state_block = state.block(); + if state_block.height() == 1 && block.header().height.get() == 1 { + // Consider our peer has genesis, + // and some other peer has genesis and broadcast it to our peer, + // then we can ignore such genesis block because we already has genesis. + // Note: `ValidBlock::validate` also checks it, + // but we don't want warning to be printed since this is correct behaviour. + return None; + } + ValidBlock::validate( block, topology, @@ -807,7 +796,14 @@ impl Sumeragi { #[cfg(debug_assertions)] if is_genesis_peer && self.debug_force_soft_fork { - std::thread::sleep(self.pipeline_time() * 2); + let pipeline_time = voting_block + .state_block + .world + .parameters() + .sumeragi + .pipeline_time(); + + std::thread::sleep(pipeline_time * 2); } else { let msg = BlockCommitted::from(&committed_block); self.broadcast_packet(msg); @@ -835,8 +831,17 @@ impl Sumeragi { ) { assert_eq!(self.role(), Role::Leader); - let tx_cache_full = self.transaction_cache.len() >= self.max_txs_in_block; - let deadline_reached = self.round_start_time.elapsed() > self.block_time; + let max_transactions: NonZeroUsize = state + .world + .view() + .parameters + .block + .max_transactions + .try_into() + .expect("INTERNAL BUG: transactions in block exceed usize::MAX"); + let block_time = state.world.view().parameters.sumeragi.block_time(); + let tx_cache_full = self.transaction_cache.len() >= max_transactions.get(); + let deadline_reached = self.round_start_time.elapsed() > block_time; let tx_cache_non_empty = !self.transaction_cache.is_empty(); if tx_cache_full || (deadline_reached && tx_cache_non_empty) { @@ -853,7 +858,8 @@ impl Sumeragi { .unpack(|e| self.send_event(e)); let created_in = create_block_start_time.elapsed(); - if created_in > self.pipeline_time() / 2 { + let pipeline_time = state.world.view().parameters().sumeragi.pipeline_time(); + if created_in > pipeline_time / 2 { warn!( role=%self.role(), peer_id=%self.peer_id, @@ -999,7 +1005,7 @@ pub(crate) fn run( let mut should_sleep = false; let mut view_change_proof_chain = ProofChain::default(); // Duration after which a view change is suggested - let mut view_change_time = sumeragi.pipeline_time(); + let mut view_change_time = state.world.view().parameters().sumeragi.pipeline_time(); // Instant when the previous view change or round happened. let mut last_view_change_time = Instant::now(); @@ -1029,7 +1035,14 @@ pub(crate) fn run( sumeragi.queue.get_transactions_for_block( &state_view, - sumeragi.max_txs_in_block, + state + .world + .view() + .parameters + .block + .max_transactions + .try_into() + .expect("INTERNAL BUG: transactions in block exceed usize::MAX"), &mut sumeragi.transaction_cache, ); @@ -1042,7 +1055,7 @@ pub(crate) fn run( reset_state( &sumeragi.peer_id, - sumeragi.pipeline_time(), + state.world.view().parameters().sumeragi.pipeline_time(), view_change_index, &mut sumeragi.was_commit, &mut sumeragi.topology, @@ -1127,12 +1140,12 @@ pub(crate) fn run( // NOTE: View change must be periodically suggested until it is accepted. // Must be initialized to pipeline time but can increase by chosen amount - view_change_time += sumeragi.pipeline_time(); + view_change_time += state.world.view().parameters().sumeragi.pipeline_time(); } reset_state( &sumeragi.peer_id, - sumeragi.pipeline_time(), + state.world.view().parameters().sumeragi.pipeline_time(), view_change_index, &mut sumeragi.was_commit, &mut sumeragi.topology, @@ -1224,7 +1237,7 @@ enum BlockSyncError { }, BlockNotProperHeight { peer_height: usize, - block_height: usize, + block_height: NonZeroUsize, }, } @@ -1235,18 +1248,18 @@ fn handle_block_sync<'state, F: Fn(PipelineEventBox)>( genesis_account: &AccountId, handle_events: &F, ) -> Result, (SignedBlock, BlockSyncError)> { - let block_height = block + let block_height: NonZeroUsize = block .header() .height .try_into() .expect("INTERNAL BUG: Block height exceeds usize::MAX"); let state_height = state.view().height(); - let (mut state_block, soft_fork) = if state_height + 1 == block_height { + let (mut state_block, soft_fork) = if state_height + 1 == block_height.get() { // NOTE: Normal branch for adding new block on top of current (state.block(), false) - } else if state_height == block_height && block_height > 1 { + } else if state_height == block_height.get() && block_height.get() > 1 { // NOTE: Soft fork branch for replacing current block with valid one let latest_block = state @@ -1319,6 +1332,7 @@ fn handle_block_sync<'state, F: Fn(PipelineEventBox)>( #[cfg(test)] mod tests { use iroha_genesis::GENESIS_DOMAIN_ID; + use nonzero_ext::nonzero; use test_samples::gen_account_in; use tokio::test; @@ -1358,19 +1372,16 @@ mod tests { // Create "genesis" block // Creating an instruction - let fail_box = Fail::new("Dummy isi".to_owned()); + let fail_isi = Unregister::domain("dummy".parse().unwrap()); let mut state_block = state.block(); // Making two transactions that have the same instruction let tx = TransactionBuilder::new(chain_id.clone(), alice_id.clone()) - .with_instructions([fail_box]) + .with_instructions([fail_isi]) .sign(alice_keypair.private_key()); - let tx = AcceptedTransaction::accept( - tx, - chain_id, - state_block.transaction_executor().transaction_limits, - ) - .expect("Valid"); + let tx = + AcceptedTransaction::accept(tx, chain_id, state_block.transaction_executor().limits) + .expect("Valid"); // Creating a block of two identical transactions and validating it let block = BlockBuilder::new(vec![tx.clone(), tx], topology.clone(), Vec::new()) @@ -1402,7 +1413,7 @@ mod tests { let tx1 = AcceptedTransaction::accept( tx1, chain_id, - state_block.transaction_executor().transaction_limits, + state_block.transaction_executor().limits, ) .map(Into::into) .expect("Valid"); @@ -1412,7 +1423,7 @@ mod tests { let tx2 = AcceptedTransaction::accept( tx2, chain_id, - state_block.transaction_executor().transaction_limits, + state_block.transaction_executor().limits, ) .map(Into::into) .expect("Valid"); @@ -1500,20 +1511,26 @@ mod tests { // Change block height let block = clone_and_modify_payload(&block, &leader_private_key, |payload| { - payload.header.height = 42; + payload.header.height = nonzero!(42_u64); }); let result = handle_block_sync(&chain_id, block, &state, &genesis_public_key, &|_| {}); + assert!(matches!( result, - Err(( - _, - BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 42 - } - )) - )) + Err((_, BlockSyncError::BlockNotProperHeight { .. })) + )); + if let Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height, + block_height, + }, + )) = result + { + assert_eq!(peer_height, 1); + assert_eq!(block_height, nonzero!(42_usize)); + } } #[test] @@ -1644,19 +1661,25 @@ mod tests { // Soft-fork on genesis block is not possible let block = clone_and_modify_payload(&block, &leader_private_key, |payload| { payload.header.view_change_index = 42; - payload.header.height = 1; + payload.header.height = nonzero!(1_u64); }); let result = handle_block_sync(&chain_id, block, &state, &genesis_public_key, &|_| {}); + assert!(matches!( result, - Err(( - _, - BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 1, - } - )) - )) + Err((_, BlockSyncError::BlockNotProperHeight { .. })) + )); + if let Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height, + block_height, + }, + )) = result + { + assert_eq!(peer_height, 1); + assert_eq!(block_height, nonzero!(1_usize)); + } } } diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index 9763fe7324c..42a7921a617 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -211,9 +211,6 @@ impl SumeragiHandle { peer_id: peer_id.clone(), queue: Arc::clone(&queue), events_sender, - commit_time: state.view().config.commit_time, - block_time: state.view().config.block_time, - max_txs_in_block: state.view().config.max_transactions_in_block.get() as usize, kura: Arc::clone(&kura), network: network.clone(), control_message_receiver, diff --git a/core/src/tx.rs b/core/src/tx.rs index 6b9f04413ed..2b841f7bba2 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -14,7 +14,7 @@ pub use iroha_data_model::prelude::*; use iroha_data_model::{ isi::error::Mismatch, query::error::FindError, - transaction::{error::TransactionLimitError, TransactionLimits, TransactionPayload}, + transaction::{error::TransactionLimitError, TransactionPayload}, }; use iroha_logger::{debug, error}; use iroha_macro::FromVariant; @@ -95,7 +95,7 @@ impl AcceptedTransaction { pub fn accept( tx: SignedTransaction, expected_chain_id: &ChainId, - limits: TransactionLimits, + limits: TransactionParameters, ) -> Result { let actual_chain_id = tx.chain(); @@ -112,13 +112,19 @@ impl AcceptedTransaction { match &tx.instructions() { Executable::Instructions(instructions) => { - let instruction_count = instructions.len(); - if Self::len_u64(instruction_count) > limits.max_instruction_number { + let instruction_limit = limits + .max_instructions + .get() + .try_into() + .expect("INTERNAL BUG: max instructions exceeds usize::MAX"); + + if instructions.len() > instruction_limit { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { reason: format!( "Too many instructions in payload, max number is {}, but got {}", - limits.max_instruction_number, instruction_count + limits.max_instructions, + instructions.len() ), }, )); @@ -129,13 +135,21 @@ impl AcceptedTransaction { // // Should we allow infinite instructions in wasm? And deny only based on fuel and size Executable::Wasm(smart_contract) => { - let size_bytes = Self::len_u64(smart_contract.size_bytes()); - let max_wasm_size_bytes = limits.max_wasm_size_bytes; + let smart_contract_size_limit = limits + .smart_contract_size + .get() + .try_into() + .expect("INTERNAL BUG: smart contract size exceeds usize::MAX"); - if size_bytes > max_wasm_size_bytes { + if smart_contract.size_bytes() > smart_contract_size_limit { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { - reason: format!("Wasm binary too large, max size is {max_wasm_size_bytes}, but got {size_bytes}"), + reason: format!( + "WASM binary size is too large: max {}, got {} \ + (configured by \"Parameter::SmartContractLimits\")", + limits.smart_contract_size, + smart_contract.size_bytes() + ), }, )); } @@ -144,11 +158,6 @@ impl AcceptedTransaction { Ok(Self(tx)) } - - #[inline] - fn len_u64(instruction_count: usize) -> u64 { - u64::try_from(instruction_count).expect("`usize` should always fit into `u64`") - } } impl From for SignedTransaction { @@ -174,14 +183,16 @@ impl AsRef for AcceptedTransaction { /// Validation is skipped for genesis. #[derive(Clone, Copy)] pub struct TransactionExecutor { - /// [`TransactionLimits`] field - pub transaction_limits: TransactionLimits, + /// [`TransactionParameters`] field + pub limits: TransactionParameters, } impl TransactionExecutor { /// Construct [`TransactionExecutor`] - pub fn new(transaction_limits: TransactionLimits) -> Self { - Self { transaction_limits } + pub fn new(transaction_limits: TransactionParameters) -> Self { + Self { + limits: transaction_limits, + } } /// Move transaction lifecycle forward by checking if the @@ -244,7 +255,7 @@ impl TransactionExecutor { state_transaction, authority, wasm, - self.transaction_limits.max_instruction_number, + self.limits.max_instructions, ) }) .map_err(|error| WasmExecutionFail { diff --git a/core/test_network/src/lib.rs b/core/test_network/src/lib.rs index f35df530fe2..54e4fc01dc3 100644 --- a/core/test_network/src/lib.rs +++ b/core/test_network/src/lib.rs @@ -7,7 +7,7 @@ use futures::{prelude::*, stream::FuturesUnordered}; use iroha::{ client::{Client, QueryOutput}, config::Config as ClientConfig, - data_model::{isi::Instruction, peer::Peer as DataModelPeer, prelude::*, query::Query, Level}, + data_model::{isi::Instruction, peer::Peer as DataModelPeer, prelude::*, query::Query}, }; use iroha_config::parameters::actual::{Root as Config, Sumeragi, TrustedPeers}; pub use iroha_core::state::StateReadOnly; @@ -20,7 +20,7 @@ use iroha_primitives::{ unique_vec::UniqueVec, }; use irohad::Iroha; -use rand::{seq::IteratorRandom, thread_rng}; +use rand::{prelude::SliceRandom, seq::IteratorRandom, thread_rng}; use serde_json::json; use tempfile::TempDir; use test_samples::{ALICE_ID, ALICE_KEYPAIR, PEER_KEYPAIR, SAMPLE_GENESIS_ACCOUNT_KEYPAIR}; @@ -32,9 +32,9 @@ pub use unique_port; /// Network of peers pub struct Network { - /// Genesis peer which sends genesis block to everyone - pub genesis: Peer, - /// Peers excluding the `genesis` peer. Use [`Network::peers`] function to get all instead. + /// First peer, guaranteed to be online and submit genesis block. + pub first_peer: Peer, + /// Peers excluding the `first_peer`. Use [`Network::peers`] function to get all instead. /// /// [`BTreeMap`] is used in order to have deterministic order of peers. pub peers: BTreeMap, @@ -138,50 +138,139 @@ impl TestGenesis for GenesisBlock { } } -impl Network { - /// Collect the freeze handles from all the peers in the network. - #[cfg(debug_assertions)] - pub fn get_freeze_status_handles(&self) -> Vec { - self.peers() - .filter_map(|peer| peer.irohad.as_ref()) - .map(|iroha| iroha.freeze_status()) - .cloned() - .collect() +pub struct NetworkBuilder { + n_peers: u32, + port: Option, + config: Option, + /// Number of offline peers. + /// By default all peers are online. + offline_peers: Option, + /// Number of peers which will submit genesis. + /// By default only first peer submits genesis. + genesis_peers: Option, +} + +impl NetworkBuilder { + pub fn new(n_peers: u32, port: Option) -> Self { + assert_ne!(n_peers, 0); + Self { + n_peers, + port, + config: None, + offline_peers: None, + genesis_peers: None, + } } - /// Starts network with peers with default configuration and - /// specified options in a new async runtime. Returns its info - /// and client for connecting to it. - pub fn start_test_with_runtime( - n_peers: u32, - start_port: Option, - ) -> (Runtime, Self, Client) { - let rt = Runtime::test(); - let (network, client) = rt.block_on(Self::start_test(n_peers, start_port)); - (rt, network, client) + #[must_use] + pub fn with_config(mut self, config: Config) -> Self { + self.config = Some(config); + self } - /// Starts network with peers with default configuration and - /// specified options. Returns its info and client for connecting - /// to it. - pub async fn start_test(n_peers: u32, start_port: Option) -> (Self, Client) { - Self::start_test_with_offline(n_peers, 0, start_port).await + #[must_use] + pub fn with_offline_peers(mut self, offline_peers: u32) -> Self { + assert!(offline_peers < self.n_peers); + self.offline_peers = Some(offline_peers); + self } - /// Starts network with peers with default configuration and - /// specified options. Returns its info and client for connecting - /// to it. - pub async fn start_test_with_offline_and_set_n_shifts( - n_peers: u32, - offline_peers: u32, - start_port: Option, - ) -> (Self, Client) { - let mut config = Config::test(); - config.logger.level = Level::INFO; - let network = - Network::new_with_offline_peers(Some(config), n_peers, offline_peers, start_port) - .await - .expect("Failed to init peers"); + #[must_use] + pub fn with_genesis_peers(mut self, genesis_peers: u32) -> Self { + assert!(0 < genesis_peers && genesis_peers <= self.n_peers); + self.genesis_peers = Some(genesis_peers); + self + } + + /// Creates new network with options provided. + pub async fn create(self) -> Network { + let (builders, mut peers) = self.prepare_peers(); + + let peer_infos = self.generate_peer_infos(); + let mut config = self.config.unwrap_or_else(Config::test); + let topology = peers.iter().map(|peer| peer.id.clone()).collect::>(); + config.sumeragi.trusted_peers.value_mut().others = UniqueVec::from_iter(topology.clone()); + let genesis_block = GenesisBlock::test(topology); + + let futures = FuturesUnordered::new(); + for ((builder, peer), peer_info) in builders + .into_iter() + .zip(peers.iter_mut()) + .zip(peer_infos.iter()) + { + match peer_info { + PeerInfo::Offline => { /* peer offline, do nothing */ } + PeerInfo::Online { is_genesis } => { + let future = builder + .with_config(config.clone()) + .with_into_genesis(is_genesis.then(|| genesis_block.clone())) + .start_with_peer(peer); + futures.push(future); + } + } + } + futures.collect::<()>().await; + time::sleep(Duration::from_millis(500) * (self.n_peers + 1)).await; + + assert_eq!(peer_infos[0], PeerInfo::Online { is_genesis: true }); + let first_peer = peers.remove(0); + let other_peers = peers + .into_iter() + .map(|peer| (peer.id.clone(), peer)) + .collect::>(); + Network { + first_peer, + peers: other_peers, + } + } + + fn prepare_peers(&self) -> (Vec, Vec) { + let mut builders = (0..self.n_peers) + .map(|n| { + let mut builder = PeerBuilder::new(); + if let Some(port) = self.port { + let offset: u16 = (n * 5) + .try_into() + .expect("The `n_peers` is too large to fit into `u16`"); + builder = builder.with_port(port + offset) + } + builder + }) + .collect::>(); + let peers = builders + .iter_mut() + .map(PeerBuilder::build) + .collect::>>() + .expect("Failed to init peers"); + (builders, peers) + } + + fn generate_peer_infos(&self) -> Vec { + let n_peers = self.n_peers as usize; + let n_offline_peers = self.offline_peers.unwrap_or(0) as usize; + let n_genesis_peers = self.genesis_peers.unwrap_or(1) as usize; + assert!(n_genesis_peers + n_offline_peers <= n_peers); + + let mut peers = (0..n_peers).collect::>(); + let mut result = vec![PeerInfo::Online { is_genesis: false }; n_peers]; + + // First n_genesis_peers will be genesis peers. + // Last n_offline_peers will be offline peers. + // First peer must be online and submit genesis so don't shuffle it. + peers[1..].shuffle(&mut thread_rng()); + for &peer in &peers[0..n_genesis_peers] { + result[peer] = PeerInfo::Online { is_genesis: true }; + } + for &peer in peers.iter().rev().take(n_offline_peers) { + result[peer] = PeerInfo::Offline; + } + result + } + + /// Creates new network with options provided. + /// Returns network and client for connecting to it. + pub async fn create_with_client(self) -> (Network, Client) { + let network = self.create().await; let client = Client::test( &Network::peers(&network) .choose(&mut thread_rng()) @@ -191,15 +280,40 @@ impl Network { (network, client) } + /// Creates new network with options provided in a new async runtime. + pub fn create_with_runtime(self) -> (Runtime, Network, Client) { + let rt = Runtime::test(); + let (network, client) = rt.block_on(self.create_with_client()); + (rt, network, client) + } +} + +// Auxiliary enum for `NetworkBuilder::create` implementation +#[derive(Debug, Clone, Eq, PartialEq)] +enum PeerInfo { + Online { is_genesis: bool }, + Offline, +} + +impl Network { + /// Collect the freeze handles from all the peers in the network. + #[cfg(debug_assertions)] + pub fn get_freeze_status_handles(&self) -> Vec { + self.peers() + .filter_map(|peer| peer.irohad.as_ref()) + .map(|iroha| iroha.freeze_status()) + .cloned() + .collect() + } + /// Starts network with peers with default configuration and - /// specified options. Returns its info and client for connecting - /// to it. - pub async fn start_test_with_offline( + /// specified options in a new async runtime. Returns its info + /// and client for connecting to it. + pub fn start_test_with_runtime( n_peers: u32, - offline_peers: u32, start_port: Option, - ) -> (Self, Client) { - Self::start_test_with_offline_and_set_n_shifts(n_peers, offline_peers, start_port).await + ) -> (Runtime, Self, Client) { + NetworkBuilder::new(n_peers, start_port).create_with_runtime() } /// Adds peer to network and waits for it to start block @@ -227,86 +341,9 @@ impl Network { (peer, peer_client) } - /// Creates new network with some offline peers - /// - /// # Panics - /// - If loading an environment configuration fails when - /// no default configuration was provided. - /// - If keypair generation fails. - /// - /// # Errors - /// - (RARE) Creating new peers and collecting into a [`HashMap`] fails. - /// - Creating new [`Peer`] instance fails. - pub async fn new_with_offline_peers( - default_config: Option, - n_peers: u32, - offline_peers: u32, - start_port: Option, - ) -> Result { - let mut builders = core::iter::repeat_with(PeerBuilder::new) - .enumerate() - .map(|(n, builder)| { - if let Some(port) = start_port { - let offset: u16 = (n * 5) - .try_into() - .expect("The `n_peers` is too large to fit into `u16`"); - builder.with_port(port + offset) - } else { - builder - } - }) - .take(n_peers as usize) - .collect::>(); - let mut peers = builders - .iter_mut() - .map(PeerBuilder::build) - .collect::>>()?; - - let mut config = default_config.unwrap_or_else(Config::test); - let topology = peers.iter().map(|peer| peer.id.clone()).collect::>(); - config.sumeragi.trusted_peers.value_mut().others = - UniqueVec::from_iter(peers.iter().map(|peer| peer.id.clone())); - - let mut genesis_peer = peers.remove(0); - let genesis_builder = builders - .remove(0) - .with_config(config.clone()) - .with_genesis(GenesisBlock::test(topology)); - - // Offset by one to account for genesis - let online_peers = n_peers - offline_peers - 1; - let rng = &mut rand::thread_rng(); - let futures = FuturesUnordered::new(); - - futures.push(genesis_builder.start_with_peer(&mut genesis_peer)); - - for (builder, peer) in builders - .into_iter() - .zip(peers.iter_mut()) - .choose_multiple(rng, online_peers as usize) - { - let peer = builder - .with_config(config.clone()) - .with_into_genesis(None) - .start_with_peer(peer); - futures.push(peer); - } - futures.collect::<()>().await; - - time::sleep(Duration::from_millis(500) * (n_peers + 1)).await; - - Ok(Self { - genesis: genesis_peer, - peers: peers - .into_iter() - .map(|peer| (peer.id.clone(), peer)) - .collect::>(), - }) - } - /// Returns all peers. pub fn peers(&self) -> impl Iterator + '_ { - std::iter::once(&self.genesis).chain(self.peers.values()) + std::iter::once(&self.first_peer).chain(self.peers.values()) } /// Get active clients @@ -318,8 +355,8 @@ impl Network { /// Get peer by its Id. pub fn peer_by_id(&self, id: &PeerId) -> Option<&Peer> { - self.peers.get(id).or(if self.genesis.id == *id { - Some(&self.genesis) + self.peers.get(id).or(if self.first_peer.id == *id { + Some(&self.first_peer) } else { None }) @@ -628,13 +665,9 @@ impl PeerBuilder { /// Create and start a peer, create a client and connect it to the peer and return both. pub async fn start_with_client(self) -> (Peer, Client) { - let config = self.config.clone().unwrap_or_else(Config::test); - let peer = self.start().await; - let client = Client::test(&peer.api_address); - - time::sleep(config.chain_wide.pipeline_time()).await; + time::sleep(::pipeline_time()).await; (peer, client) } @@ -781,7 +814,8 @@ impl TestConfig for Config { } fn pipeline_time() -> Duration { - Self::test().chain_wide.pipeline_time() + let defaults = iroha_data_model::parameter::SumeragiParameters::default(); + defaults.block_time() + defaults.commit_time() } fn block_sync_gossip_time() -> Duration { diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index c9a64c64537..baaa45daa31 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -85,29 +85,29 @@ pub fn impl_id_eq_ord_hash(emitter: &mut Emitter, input: &syn::DeriveInput) -> T quote! { #identifiable_derive - impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::PartialOrd for #name #ty_generics #where_clause where Self: crate::Identifiable { #[inline] fn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> { Some(self.cmp(other)) } } - impl #impl_generics ::core::cmp::Ord for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::Ord for #name #ty_generics #where_clause where Self: crate::Identifiable { fn cmp(&self, other: &Self) -> ::core::cmp::Ordering { - self.id().cmp(other.id()) + ::id(self).cmp(::id(other)) } } - impl #impl_generics ::core::cmp::Eq for #name #ty_generics #where_clause where Self: Identifiable {} - impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::cmp::Eq for #name #ty_generics #where_clause where Self: crate::Identifiable {} + impl #impl_generics ::core::cmp::PartialEq for #name #ty_generics #where_clause where Self: crate::Identifiable { fn eq(&self, other: &Self) -> bool { - self.id() == other.id() + ::id(self) == ::id(other) } } - impl #impl_generics ::core::hash::Hash for #name #ty_generics #where_clause where Self: Identifiable { + impl #impl_generics ::core::hash::Hash for #name #ty_generics #where_clause where Self: crate::Identifiable { fn hash(&self, state: &mut H) { - self.id().hash(state); + ::id(self).hash(state) } } } @@ -119,7 +119,7 @@ fn derive_identifiable(emitter: &mut Emitter, input: &IdDeriveInput) -> TokenStr let (id_type, id_expr) = get_id_type(emitter, input); quote! { - impl #impl_generics Identifiable for #name #ty_generics #where_clause { + impl #impl_generics crate::Identifiable for #name #ty_generics #where_clause { type Id = #id_type; #[inline] @@ -142,8 +142,8 @@ fn get_id_type(emitter: &mut Emitter, input: &IdDeriveInput) -> (syn::Type, syn: } IdAttr::Transparent => { return ( - parse_quote! {<#ty as Identifiable>::Id}, - parse_quote! {Identifiable::id(&self.#field_name)}, + parse_quote! {<#ty as crate::Identifiable>::Id}, + parse_quote! {crate::Identifiable::id(&self.#field_name)}, ); } IdAttr::Missing => { diff --git a/data_model/derive/tests/has_origin_generics.rs b/data_model/derive/tests/has_origin_generics.rs index a1090a312cc..69724714bb1 100644 --- a/data_model/derive/tests/has_origin_generics.rs +++ b/data_model/derive/tests/has_origin_generics.rs @@ -16,12 +16,6 @@ struct Object { id: ObjectId, } -impl Object { - fn id(&self) -> &ObjectId { - &self.id - } -} - #[allow(clippy::enum_variant_names)] // it's a test, duh #[derive(Debug, HasOrigin)] #[has_origin(origin = Object)] diff --git a/data_model/src/account.rs b/data_model/src/account.rs index 0e99bf6d495..c8daf5c8cb3 100644 --- a/data_model/src/account.rs +++ b/data_model/src/account.rs @@ -4,7 +4,6 @@ use alloc::{format, string::String, vec::Vec}; use core::str::FromStr; use derive_more::{Constructor, DebugCustom, Display}; -use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; @@ -16,11 +15,13 @@ use crate::{ asset::{Asset, AssetDefinitionId, AssetsMap}, domain::prelude::*, metadata::Metadata, - HasMetadata, Identifiable, ParseError, PublicKey, Registered, + HasMetadata, ParseError, PublicKey, Registered, }; #[model] mod model { + use getset::Getters; + use super::*; /// Identification of [`Account`] by the combination of the [`PublicKey`] as its sole signatory and the [`Domain`](crate::domain::Domain) it belongs to. @@ -66,16 +67,7 @@ mod model { /// Account entity is an authority which is used to execute `Iroha Special Instructions`. #[derive( - Debug, - Display, - Clone, - IdEqOrdHash, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, + Debug, Display, Clone, IdEqOrdHash, Decode, Encode, Deserialize, Serialize, IntoSchema, )] #[allow(clippy::multiple_inherent_impl)] #[display(fmt = "({id})")] // TODO: Add more? @@ -234,10 +226,10 @@ pub mod prelude { } #[cfg(test)] +#[cfg(feature = "transparent_api")] mod tests { use super::*; - #[cfg(feature = "transparent_api")] #[test] fn parse_account_id() { const SIGNATORY: &str = diff --git a/data_model/src/asset.rs b/data_model/src/asset.rs index 6b3972c1e49..7e4e1c43f9b 100644 --- a/data_model/src/asset.rs +++ b/data_model/src/asset.rs @@ -7,7 +7,6 @@ use core::{fmt, str::FromStr}; use std::collections::btree_map; use derive_more::{Constructor, DebugCustom, Display}; -use getset::{CopyGetters, Getters}; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_primitives::numeric::{Numeric, NumericSpec, NumericSpecParseError}; use iroha_schema::IntoSchema; @@ -17,8 +16,8 @@ use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; use crate::{ - account::prelude::*, domain::prelude::*, ipfs::IpfsPath, metadata::Metadata, HasMetadata, - Identifiable, Name, ParseError, Registered, + account::prelude::*, domain::prelude::*, ipfs::IpfsPath, metadata::Metadata, HasMetadata, Name, + ParseError, Registered, }; /// API to work with collections of [`Id`] : [`Asset`] mappings. @@ -34,6 +33,7 @@ pub type AssetTotalQuantityMap = btree_map::BTreeMap #[model] mod model { + use getset::{CopyGetters, Getters}; use iroha_macro::FromVariant; use super::*; @@ -114,7 +114,7 @@ mod model { Serialize, IntoSchema, )] - #[display(fmt = "{id} {value_type}{mintable}")] + #[display(fmt = "{id} {type_}{mintable}")] #[allow(clippy::multiple_inherent_impl)] #[ffi_type] pub struct AssetDefinition { @@ -122,7 +122,7 @@ mod model { pub id: AssetDefinitionId, /// Type of [`AssetValue`] #[getset(get_copy = "pub")] - pub value_type: AssetValueType, + pub type_: AssetType, /// Is the asset mintable #[getset(get_copy = "pub")] pub mintable: Mintable, @@ -164,13 +164,13 @@ mod model { #[derive( Debug, Display, Clone, IdEqOrdHash, Decode, Encode, Deserialize, Serialize, IntoSchema, )] - #[display(fmt = "{id} {mintable}{value_type}")] + #[display(fmt = "{id} {mintable}{type_}")] #[ffi_type] pub struct NewAssetDefinition { /// The identification associated with the asset definition builder. pub id: AssetDefinitionId, /// The type value associated with the asset definition builder. - pub value_type: AssetValueType, + pub type_: AssetType, /// The mintablility associated with the asset definition builder. pub mintable: Mintable, /// IPFS link to the [`AssetDefinition`] logo @@ -196,7 +196,7 @@ mod model { )] #[ffi_type] #[repr(u8)] - pub enum AssetValueType { + pub enum AssetType { /// Asset's qualitative value. #[display(fmt = "{_0}")] Numeric(NumericSpec), @@ -268,12 +268,12 @@ mod model { } } -/// Error occurred while parsing `AssetValueType` +/// Error occurred while parsing `AssetType` #[derive(Debug, displaydoc::Display, Clone)] #[cfg_attr(feature = "std", derive(thiserror::Error))] #[repr(u8)] -pub enum AssetValueTypeParseError { - /// `AssetValueType` should be either `Store` or `Numeric` +pub enum AssetTypeParseError { + /// `AssetType` should be either `Store` or `Numeric` WrongVariant, /// Error occurred while parsing `Numeric` variant: {_0} Numeric(#[cfg_attr(feature = "std", source)] NumericSpecParseError), @@ -283,22 +283,22 @@ impl AssetDefinition { /// Construct builder for [`AssetDefinition`] identifiable by [`Id`]. #[must_use] #[inline] - pub fn new(id: AssetDefinitionId, value_type: AssetValueType) -> ::With { - ::With::new(id, value_type) + pub fn new(id: AssetDefinitionId, type_: AssetType) -> ::With { + ::With::new(id, type_) } /// Construct builder for [`AssetDefinition`] identifiable by [`Id`]. #[must_use] #[inline] pub fn numeric(id: AssetDefinitionId) -> ::With { - ::With::new(id, AssetValueType::Numeric(NumericSpec::default())) + ::With::new(id, AssetType::Numeric(NumericSpec::default())) } /// Construct builder for [`AssetDefinition`] identifiable by [`Id`]. #[must_use] #[inline] pub fn store(id: AssetDefinitionId) -> ::With { - ::With::new(id, AssetValueType::Store) + ::With::new(id, AssetType::Store) } } @@ -314,10 +314,10 @@ impl Asset { impl NewAssetDefinition { /// Create a [`NewAssetDefinition`], reserved for internal use. - fn new(id: AssetDefinitionId, value_type: AssetValueType) -> Self { + fn new(id: AssetDefinitionId, type_: AssetType) -> Self { Self { id, - value_type, + type_, mintable: Mintable::Infinitely, logo: None, metadata: Metadata::default(), @@ -356,12 +356,10 @@ impl HasMetadata for AssetDefinition { impl AssetValue { /// Returns the asset type as a string. - pub const fn value_type(&self) -> AssetValueType { + pub const fn type_(&self) -> AssetType { match *self { - Self::Numeric(numeric) => { - AssetValueType::Numeric(NumericSpec::fractional(numeric.scale())) - } - Self::Store(_) => AssetValueType::Store, + Self::Numeric(numeric) => AssetType::Numeric(NumericSpec::fractional(numeric.scale())), + Self::Store(_) => AssetType::Store, } } /// Returns true if this value is zero, false if it contains [`Metadata`] or positive value @@ -446,8 +444,8 @@ impl FromStr for AssetId { } } -impl FromStr for AssetValueType { - type Err = AssetValueTypeParseError; +impl FromStr for AssetType { + type Err = AssetTypeParseError; fn from_str(s: &str) -> Result { match s { @@ -455,8 +453,8 @@ impl FromStr for AssetValueType { s if s.starts_with("Numeric") => s .parse::() .map(Self::Numeric) - .map_err(AssetValueTypeParseError::Numeric), - _ => Err(AssetValueTypeParseError::WrongVariant), + .map_err(AssetTypeParseError::Numeric), + _ => Err(AssetTypeParseError::WrongVariant), } } } @@ -478,7 +476,7 @@ impl Registered for AssetDefinition { /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ - Asset, AssetDefinition, AssetDefinitionId, AssetId, AssetValue, AssetValueType, Mintable, + Asset, AssetDefinition, AssetDefinitionId, AssetId, AssetType, AssetValue, Mintable, NewAssetDefinition, }; } diff --git a/data_model/src/block.rs b/data_model/src/block.rs index 6e7c5814519..f355b462a4b 100644 --- a/data_model/src/block.rs +++ b/data_model/src/block.rs @@ -14,6 +14,7 @@ use iroha_data_model_derive::model; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::{declare_versioned, version_with_scale}; +use nonzero_ext::nonzero; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -22,6 +23,8 @@ use crate::{events::prelude::*, peer, peer::PeerId, transaction::prelude::*}; #[model] mod model { + use core::num::NonZeroU64; + use getset::{CopyGetters, Getters}; use super::*; @@ -52,7 +55,7 @@ mod model { pub struct BlockHeader { /// Number of blocks in the chain including this block. #[getset(get_copy = "pub")] - pub height: u64, + pub height: NonZeroU64, /// Hash of the previous block in the chain. #[getset(get_copy = "pub")] pub prev_block_hash: Option>, @@ -61,7 +64,7 @@ mod model { pub transactions_hash: HashOf>, /// Creation timestamp (unix time in milliseconds). #[getset(skip)] - pub timestamp_ms: u64, + pub creation_time_ms: u64, /// Value of view change index. Used to resolve soft forks. #[getset(skip)] pub view_change_index: u32, @@ -143,12 +146,12 @@ impl BlockHeader { #[inline] #[cfg(feature = "transparent_api")] pub const fn is_genesis(&self) -> bool { - self.height == 1 + self.height.get() == 1 } /// Creation timestamp - pub const fn timestamp(&self) -> Duration { - Duration::from_millis(self.timestamp_ms) + pub const fn creation_time(&self) -> Duration { + Duration::from_millis(self.creation_time_ms) } /// Consensus estimation @@ -291,13 +294,13 @@ impl SignedBlock { .hash() .expect("Tree is not empty"); let first_transaction = &genesis_transactions[0]; - let timestamp_ms = u64::try_from(first_transaction.creation_time().as_millis()) + let creation_time_ms = u64::try_from(first_transaction.creation_time().as_millis()) .expect("Must fit since Duration was created from u64 in creation_time()"); let header = BlockHeader { - height: 1, + height: nonzero!(1_u64), prev_block_hash: None, transactions_hash, - timestamp_ms, + creation_time_ms, view_change_index: 0, consensus_estimation_ms: 0, }; @@ -345,7 +348,7 @@ mod candidate { fn validate(self) -> Result { self.validate_signatures()?; self.validate_header()?; - if self.payload.header.height == 1 { + if self.payload.header.height.get() == 1 { self.validate_genesis()?; } @@ -390,7 +393,7 @@ mod candidate { } fn validate_signatures(&self) -> Result<(), &'static str> { - if self.signatures.is_empty() && self.payload.header.height != 1 { + if self.signatures.is_empty() && self.payload.header.height.get() != 1 { return Err("Block missing signatures"); } diff --git a/data_model/src/domain.rs b/data_model/src/domain.rs index db01cd686c1..fba99212f45 100644 --- a/data_model/src/domain.rs +++ b/data_model/src/domain.rs @@ -4,7 +4,6 @@ use alloc::{format, string::String, vec::Vec}; use derive_more::{Constructor, Display, FromStr}; -use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; use iroha_primitives::numeric::Numeric; use iroha_schema::IntoSchema; @@ -23,6 +22,8 @@ use crate::{ #[model] mod model { + use getset::Getters; + use super::*; /// Identification of a [`Domain`]. diff --git a/data_model/src/events/data/events.rs b/data_model/src/events/data/events.rs index 766872cc0d2..92a841a0e61 100644 --- a/data_model/src/events/data/events.rs +++ b/data_model/src/events/data/events.rs @@ -3,11 +3,10 @@ use getset::Getters; use iroha_data_model_derive::{model, EventSet, HasOrigin}; -use iroha_primitives::numeric::Numeric; +use iroha_primitives::{json::JsonString, numeric::Numeric}; pub use self::model::*; use super::*; -use crate::metadata::MetadataValueBox; macro_rules! data_event { ($item:item) => { @@ -61,7 +60,7 @@ mod model { pub struct MetadataChanged { pub target: Id, pub key: Name, - pub value: MetadataValueBox, + pub value: JsonString, } /// Event @@ -129,16 +128,16 @@ mod asset { pub enum AssetDefinitionEvent { #[has_origin(asset_definition => asset_definition.id())] Created(AssetDefinition), - MintabilityChanged(AssetDefinitionId), - #[has_origin(ownership_changed => &ownership_changed.asset_definition)] - OwnerChanged(AssetDefinitionOwnerChanged), Deleted(AssetDefinitionId), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(AssetDefinitionMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataRemoved(AssetDefinitionMetadataChanged), + MintabilityChanged(AssetDefinitionId), #[has_origin(total_quantity_changed => &total_quantity_changed.asset_definition)] TotalQuantityChanged(AssetDefinitionTotalQuantityChanged), + #[has_origin(ownership_changed => &ownership_changed.asset_definition)] + OwnerChanged(AssetDefinitionOwnerChanged), } } @@ -244,14 +243,12 @@ mod role { #[has_origin(role => role.id())] Created(Role), Deleted(RoleId), - /// [`Permission`]s with particular [`PermissionId`] - /// were removed from the role. - #[has_origin(permission_removed => &permission_removed.role)] - PermissionRemoved(RolePermissionChanged), - /// [`Permission`]s with particular [`PermissionId`] - /// were removed added to the role. + /// [`Permission`] were added to the role. #[has_origin(permission_added => &permission_added.role)] PermissionAdded(RolePermissionChanged), + /// [`Permission`] were removed from the role. + #[has_origin(permission_removed => &permission_removed.role)] + PermissionRemoved(RolePermissionChanged), } } @@ -280,7 +277,7 @@ mod role { pub role: RoleId, // TODO: Skipped temporarily because of FFI #[getset(skip)] - pub permission: PermissionId, + pub permission: Permission, } } } @@ -298,21 +295,19 @@ mod account { data_event! { #[has_origin(origin = Account)] pub enum AccountEvent { - #[has_origin(asset_event => &asset_event.origin().account)] - Asset(AssetEvent), #[has_origin(account => account.id())] Created(Account), Deleted(AccountId), - AuthenticationAdded(AccountId), - AuthenticationRemoved(AccountId), + #[has_origin(asset_event => &asset_event.origin().account)] + Asset(AssetEvent), #[has_origin(permission_changed => &permission_changed.account)] PermissionAdded(AccountPermissionChanged), #[has_origin(permission_changed => &permission_changed.account)] PermissionRemoved(AccountPermissionChanged), #[has_origin(role_changed => &role_changed.account)] - RoleRevoked(AccountRoleChanged), - #[has_origin(role_changed => &role_changed.account)] RoleGranted(AccountRoleChanged), + #[has_origin(role_changed => &role_changed.account)] + RoleRevoked(AccountRoleChanged), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(AccountMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] @@ -345,7 +340,7 @@ mod account { pub account: AccountId, // TODO: Skipped temporarily because of FFI #[getset(skip)] - pub permission: PermissionId, + pub permission: Permission, } /// Depending on the wrapping event, [`AccountRoleChanged`] represents the granted or revoked role @@ -373,7 +368,7 @@ mod account { impl AccountPermissionChanged { /// Get permission id - pub fn permission(&self) -> &PermissionId { + pub fn permission(&self) -> &Permission { &self.permission } } @@ -390,13 +385,13 @@ mod domain { data_event! { #[has_origin(origin = Domain)] pub enum DomainEvent { - #[has_origin(account_event => &account_event.origin().domain)] - Account(AccountEvent), - #[has_origin(asset_definition_event => &asset_definition_event.origin().domain)] - AssetDefinition(AssetDefinitionEvent), #[has_origin(domain => domain.id())] Created(Domain), Deleted(DomainId), + #[has_origin(asset_definition_event => &asset_definition_event.origin().domain)] + AssetDefinition(AssetDefinitionEvent), + #[has_origin(account_event => &account_event.origin().domain)] + Account(AccountEvent), #[has_origin(metadata_changed => &metadata_changed.target)] MetadataInserted(DomainMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target)] @@ -489,14 +484,54 @@ mod trigger { } mod config { + pub use self::model::*; use super::*; + use crate::parameter::Parameter; - data_event! { - #[has_origin(origin = Parameter)] + #[model] + mod model { + use super::*; + + /// Changed parameter event + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] + pub struct ParameterChanged { + /// Previous value for the parameter + pub old_value: Parameter, + /// Next value for the parameter + pub new_value: Parameter, + } + + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + EventSet, + FromVariant, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] pub enum ConfigurationEvent { - Changed(ParameterId), - Created(ParameterId), - Deleted(ParameterId), + Changed(ParameterChanged), } } } @@ -618,7 +653,7 @@ impl MetadataChanged { } /// Getter for `value` - pub fn value(&self) -> &MetadataValueBox { + pub fn value(&self) -> &JsonString { &self.value } } @@ -631,7 +666,7 @@ pub mod prelude { AssetDefinitionOwnerChanged, AssetDefinitionTotalQuantityChanged, AssetEvent, AssetEventSet, }, - config::{ConfigurationEvent, ConfigurationEventSet}, + config::{ConfigurationEvent, ConfigurationEventSet, ParameterChanged}, domain::{DomainEvent, DomainEventSet, DomainOwnerChanged}, executor::{ExecutorEvent, ExecutorEventSet, ExecutorUpgrade}, peer::{PeerEvent, PeerEventSet}, diff --git a/data_model/src/events/data/filters.rs b/data_model/src/events/data/filters.rs index 86bb1a886f3..0bda7058181 100644 --- a/data_model/src/events/data/filters.rs +++ b/data_model/src/events/data/filters.rs @@ -223,8 +223,6 @@ mod model { IntoSchema, )] pub struct ConfigurationEventFilter { - /// If specified matches only events originating from this configuration - pub(super) id_matcher: Option, /// Matches only event from this set pub(super) event_set: ConfigurationEventSet, } @@ -598,18 +596,10 @@ impl ConfigurationEventFilter { /// Creates a new [`ConfigurationEventFilter`] accepting all [`ConfigurationEvent`]s. pub const fn new() -> Self { Self { - id_matcher: None, event_set: ConfigurationEventSet::all(), } } - /// Modifies a [`ConfigurationEventFilter`] to accept only [`ConfigurationEvent`]s originating from ids matching `id_matcher`. - #[must_use] - pub fn for_parameter(mut self, id_matcher: ParameterId) -> Self { - self.id_matcher = Some(id_matcher); - self - } - /// Modifies a [`ConfigurationEventFilter`] to accept only [`ConfigurationEvent`]s of types matching `event_set`. #[must_use] pub const fn for_events(mut self, event_set: ConfigurationEventSet) -> Self { @@ -629,12 +619,6 @@ impl super::EventFilter for ConfigurationEventFilter { type Event = super::ConfigurationEvent; fn matches(&self, event: &Self::Event) -> bool { - if let Some(id_matcher) = &self.id_matcher { - if id_matcher != event.origin() { - return false; - } - } - if !self.event_set.matches(event) { return false; } diff --git a/data_model/src/events/pipeline.rs b/data_model/src/events/pipeline.rs index 731e194c505..bbecb5ac47f 100644 --- a/data_model/src/events/pipeline.rs +++ b/data_model/src/events/pipeline.rs @@ -2,6 +2,7 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; +use core::num::NonZeroU64; use iroha_crypto::HashOf; use iroha_data_model_derive::model; @@ -84,7 +85,7 @@ mod model { #[getset(get = "pub")] pub hash: HashOf, #[getset(get_copy = "pub")] - pub block_height: Option, + pub block_height: Option, #[getset(get = "pub")] pub status: TransactionStatus, } @@ -181,7 +182,7 @@ mod model { #[ffi_type] pub struct BlockEventFilter { #[getset(get_copy = "pub")] - pub height: Option, + pub height: Option, #[getset(get = "pub")] pub status: Option, } @@ -205,7 +206,7 @@ mod model { pub struct TransactionEventFilter { #[getset(get = "pub")] pub hash: Option>, - pub block_height: Option>, + pub block_height: Option>, #[getset(get = "pub")] pub status: Option, } @@ -223,7 +224,7 @@ impl BlockEventFilter { /// Match only block with the given height #[must_use] - pub fn for_height(mut self, height: u64) -> Self { + pub fn for_height(mut self, height: NonZeroU64) -> Self { self.height = Some(height); self } @@ -249,7 +250,7 @@ impl TransactionEventFilter { /// Match only transactions with the given block height #[must_use] - pub fn for_block_height(mut self, block_height: Option) -> Self { + pub fn for_block_height(mut self, block_height: Option) -> Self { self.block_height = Some(block_height); self } @@ -270,7 +271,7 @@ impl TransactionEventFilter { /// Block height // TODO: Derive with getset - pub fn block_height(&self) -> Option> { + pub fn block_height(&self) -> Option> { self.block_height } } @@ -345,19 +346,20 @@ mod tests { use alloc::{string::ToString as _, vec, vec::Vec}; use iroha_crypto::Hash; + use nonzero_ext::nonzero; use super::{super::EventFilter, *}; use crate::{transaction::error::TransactionRejectionReason::*, ValidationFail}; impl BlockHeader { - fn dummy(height: u64) -> Self { + fn dummy(height: NonZeroU64) -> Self { Self { height, prev_block_hash: None, transactions_hash: HashOf::from_untyped_unchecked(Hash::prehashed( [1_u8; Hash::LENGTH], )), - timestamp_ms: 0, + creation_time_ms: 0, view_change_index: 0, consensus_estimation_ms: 0, } @@ -375,7 +377,7 @@ mod tests { .into(), TransactionEvent { hash: HashOf::from_untyped_unchecked(Hash::prehashed([0_u8; Hash::LENGTH])), - block_height: Some(3), + block_height: Some(nonzero!(3_u64)), status: TransactionStatus::Rejected(Box::new(Validation( ValidationFail::TooComplex, ))), @@ -388,7 +390,7 @@ mod tests { } .into(), BlockEvent { - header: BlockHeader::dummy(7), + header: BlockHeader::dummy(nonzero!(7_u64)), hash: HashOf::from_untyped_unchecked(Hash::prehashed([7_u8; Hash::LENGTH])), status: BlockStatus::Committed, } @@ -418,7 +420,7 @@ mod tests { .into(), TransactionEvent { hash: HashOf::from_untyped_unchecked(Hash::prehashed([0_u8; Hash::LENGTH])), - block_height: Some(3), + block_height: Some(nonzero!(3_u64)), status: TransactionStatus::Rejected(Box::new(Validation( ValidationFail::TooComplex, ))), @@ -439,7 +441,7 @@ mod tests { vec![BlockEvent { status: BlockStatus::Committed, hash: HashOf::from_untyped_unchecked(Hash::prehashed([7_u8; Hash::LENGTH])), - header: BlockHeader::dummy(7), + header: BlockHeader::dummy(nonzero!(7_u64)), } .into()], ); diff --git a/data_model/src/executor.rs b/data_model/src/executor.rs index 4e030b1c3b6..5309c6ed477 100644 --- a/data_model/src/executor.rs +++ b/data_model/src/executor.rs @@ -5,21 +5,22 @@ use alloc::{collections::BTreeSet, format, string::String, vec::Vec}; #[cfg(feature = "std")] use std::collections::BTreeSet; -use derive_more::{Constructor, Display}; -use getset::Getters; use iroha_data_model_derive::model; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; +use iroha_primitives::json::JsonString; +use iroha_schema::{Ident, IntoSchema}; pub use self::model::*; -use crate::{permission::PermissionId, transaction::WasmSmartContract, JsonString}; +use crate::transaction::WasmSmartContract; #[model] mod model { - use iroha_schema::Ident; + use derive_more::{Constructor, Display}; + use getset::Getters; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; use super::*; + use crate::parameter::CustomParameters; /// executor that checks if an operation satisfies some conditions. /// @@ -79,19 +80,15 @@ mod model { #[ffi_type] #[display(fmt = "{self:?}")] pub struct ExecutorDataModel { - /// Permission tokens supported by the executor. - /// - /// These IDs refer to the types in the schema. - pub permissions: BTreeSet, - /// Type id in the schema. - /// Corresponds to payload of `InstructionBox::Custom`. - /// - /// Note that technically it is not needed - /// (custom instructions can be used without specifying it), - /// however it is recommended to set it, - /// so clients could retrieve it through Iroha API. - pub custom_instruction: Option, - /// Data model JSON schema, typically produced by [`IntoSchema`]. + /// Corresponds to the [`Parameter::Custom`]. + /// Holds the initial value of the parameter + pub parameters: CustomParameters, + /// Corresponds to the [`InstructionBox::Custom`]. + /// Any type that implements [`Instruction`] should be listed here. + pub instructions: BTreeSet, + /// Ids of permission tokens supported by the executor. + pub permissions: BTreeSet, + /// Schema of executor defined data types (instructions, parameters, permissions) pub schema: JsonString, } @@ -103,7 +100,7 @@ mod model { // currently it fails for all fields impl ExecutorDataModel { /// Getter - pub fn permissions(&self) -> &BTreeSet { + pub fn permissions(&self) -> &BTreeSet { &self.permissions } diff --git a/data_model/src/ipfs.rs b/data_model/src/ipfs.rs index 635900ba5c2..e6dbaca5c76 100644 --- a/data_model/src/ipfs.rs +++ b/data_model/src/ipfs.rs @@ -4,18 +4,19 @@ use alloc::{format, string::String, vec::Vec}; use core::str::FromStr; -use derive_more::Display; use iroha_data_model_derive::model; use iroha_primitives::conststr::ConstString; -use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode, Input}; -use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; use crate::ParseError; #[model] mod model { + use derive_more::Display; + use iroha_schema::IntoSchema; + use serde_with::{DeserializeFromStr, SerializeDisplay}; + use super::*; /// Represents path in IPFS. Performs checks to ensure path validity. diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index c7024cb8d2f..67ac4876d24 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; use strum::EnumDiscriminants; pub use self::{model::*, transparent::*}; -use super::{metadata::MetadataValueBox, prelude::*}; +use super::prelude::*; use crate::{seal, Level, Registered}; /// Marker trait designating instruction. @@ -21,7 +21,10 @@ use crate::{seal, Level, Registered}; /// Instructions allows to change the state of `Iroha`. /// All possible instructions are implementors of this trait, excluding /// [`InstructionBox`] which is just a wrapper. -pub trait Instruction: Into + seal::Sealed { +pub trait Instruction: Into {} + +/// Marker trait for built-in queries +pub trait BuiltInInstruction: Instruction + seal::Sealed { /// [`Encode`] [`Self`] as [`InstructionBox`]. /// /// Used to avoid an unnecessary clone @@ -111,22 +114,20 @@ mod model { #[debug(fmt = "{_0:?}")] SetParameter(SetParameter), #[debug(fmt = "{_0:?}")] - NewParameter(NewParameter), - #[debug(fmt = "{_0:?}")] Upgrade(Upgrade), #[debug(fmt = "{_0:?}")] Log(Log), - #[debug(fmt = "{_0:?}")] - Custom(Custom), #[debug(fmt = "{_0:?}")] - Fail(Fail), + Custom(CustomInstruction), } } macro_rules! impl_instruction { ($($ty:ty),+ $(,)?) => { $( - impl Instruction for $ty { + impl Instruction for $ty {} + + impl BuiltInInstruction for $ty { fn encode_as_instruction_box(&self) -> Vec { InstructionBoxRef::from(self).encode() } @@ -174,23 +175,24 @@ impl_instruction! { Revoke, Revoke, SetParameter, - NewParameter, Upgrade, ExecuteTrigger, Log, - Custom, - Fail, } -impl Instruction for InstructionBox { +impl Instruction for InstructionBox {} +impl Instruction for CustomInstruction {} +impl BuiltInInstruction for InstructionBox { fn encode_as_instruction_box(&self) -> Vec { self.encode() } } mod transparent { + use iroha_primitives::json::JsonString; + use super::*; - use crate::{account::NewAccount, domain::NewDomain, metadata::Metadata, JsonString}; + use crate::{account::NewAccount, domain::NewDomain, metadata::Metadata}; macro_rules! isi { ($($meta:meta)* $item:item) => { @@ -248,31 +250,16 @@ mod transparent { }; } - isi! { + iroha_data_model_derive::model_single! { /// Generic instruction for setting a chain-wide config parameter. - #[derive(Constructor, Display)] - #[display(fmt = "SET `{parameter}`")] + #[derive(Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, Constructor)] + #[derive(parity_scale_codec::Decode, parity_scale_codec::Encode)] + #[derive(serde::Deserialize, serde::Serialize)] + #[derive(iroha_schema::IntoSchema)] + #[display(fmt = "SET `{_0}`")] #[serde(transparent)] #[repr(transparent)] - pub struct SetParameter { - /// The configuration parameter being changed. - #[serde(flatten)] - pub parameter: Parameter, - } - } - - isi! { - /// Sized structure for all possible on-chain configuration parameters when they are first created. - /// Generic instruction for setting a chain-wide config parameter. - #[derive(Constructor, Display)] - #[display(fmt = "SET `{parameter}`")] - #[serde(transparent)] - #[repr(transparent)] - pub struct NewParameter { - /// Parameter to be changed. - #[serde(flatten)] - pub parameter: Parameter, - } + pub struct SetParameter(pub Parameter); } isi! { @@ -284,13 +271,13 @@ mod transparent { /// Key. pub key: Name, /// Value. - pub value: MetadataValueBox, + pub value: JsonString, } } impl SetKeyValue { /// Constructs a new [`SetKeyValue`] for a [`Domain`] with the given `key` and `value`. - pub fn domain(domain_id: DomainId, key: Name, value: impl Into) -> Self { + pub fn domain(domain_id: DomainId, key: Name, value: impl Into) -> Self { Self { object: domain_id, key, @@ -301,11 +288,7 @@ mod transparent { impl SetKeyValue { /// Constructs a new [`SetKeyValue`] for an [`Account`] with the given `key` and `value`. - pub fn account( - account_id: AccountId, - key: Name, - value: impl Into, - ) -> Self { + pub fn account(account_id: AccountId, key: Name, value: impl Into) -> Self { Self { object: account_id, key, @@ -319,7 +302,7 @@ mod transparent { pub fn asset_definition( asset_definition_id: AssetDefinitionId, key: Name, - value: impl Into, + value: impl Into, ) -> Self { Self { object: asset_definition_id, @@ -331,7 +314,7 @@ mod transparent { impl SetKeyValue { /// Constructs a new [`SetKeyValue`] for an [`Asset`] with the given `key` and `value`. - pub fn asset(asset_id: AssetId, key: Name, value: impl Into) -> Self { + pub fn asset(asset_id: AssetId, key: Name, value: impl Into) -> Self { Self { object: asset_id, key, @@ -342,11 +325,7 @@ mod transparent { impl SetKeyValue { /// Constructs a new [`SetKeyValue`] for a [`Trigger`] with the given `key` and `value`. - pub fn trigger( - trigger_id: TriggerId, - key: Name, - value: impl Into, - ) -> Self { + pub fn trigger(trigger_id: TriggerId, key: Name, value: impl Into) -> Self { Self { object: trigger_id, key, @@ -786,7 +765,7 @@ mod transparent { pub fn asset_store(asset_id: AssetId, to: AccountId) -> Self { Self { source: asset_id, - object: Metadata::new(), + object: Metadata::default(), destination: to, } } @@ -821,18 +800,6 @@ mod transparent { => TransferBoxRef<'a> => InstructionBoxRef<'a>[Transfer] } - isi! { - /// Utilitary instruction to fail execution and submit an error `message`. - #[derive(Constructor, Display)] - #[display(fmt = "FAIL `{message}`")] - #[serde(transparent)] - #[repr(transparent)] - pub struct Fail { - /// Message to submit. - pub message: String, - } - } - isi! { /// Generic instruction for granting permission to an entity. #[schema(bounds = "O: IntoSchema, D: Identifiable, D::Id: IntoSchema")] @@ -994,23 +961,25 @@ mod transparent { } isi! { - /// Custom instruction with arbitrary payload. - /// Should be handled in custom executor, where it will be translated to usual ISIs. + /// Blockchain specific instruction (defined in the executor). /// Can be used to extend instruction set or add expression system. - /// See `executor_custom_instructions_simple` and `executor_custom_instructions_complex` - /// examples in `client/tests/integration/smartcontracts`. /// - /// Note: If using custom instructions, it is recommended - /// to set `ExecutorDataModel::custom_instruction` in custom executor `migrate` entrypoint. + /// Note: If using custom instructions remember to set (during the executor migration) + /// [`ExecutorDataModel::instructions`] + /// + /// # Examples + /// + /// Check `executor_custom_instructions_simple` and `executor_custom_instructions_complex` + /// integration tests #[derive(Display)] #[display(fmt = "CUSTOM({payload})")] - pub struct Custom { + pub struct CustomInstruction { /// Custom payload pub payload: JsonString, } } - impl Custom { + impl CustomInstruction { /// Constructor pub fn new(payload: impl Into) -> Self { Self { @@ -1250,8 +1219,7 @@ pub mod error { pub use self::model::*; use super::InstructionType; use crate::{ - asset::AssetValueType, - metadata, + asset::AssetType, query::error::{FindError, QueryExecutionFail}, IdBox, }; @@ -1301,14 +1269,6 @@ pub mod error { Mintability(#[cfg_attr(feature = "std", source)] MintabilityError), /// Illegal math operation Math(#[cfg_attr(feature = "std", source)] MathError), - /// Metadata error - Metadata(#[cfg_attr(feature = "std", source)] metadata::MetadataError), - /// Execution failed: {0} - Fail( - #[skip_from] - #[skip_try_from] - String, - ), /// Invalid instruction parameter InvalidParameter(#[cfg_attr(feature = "std", source)] InvalidParameterError), /// Iroha invariant violation: {0} @@ -1394,18 +1354,12 @@ pub mod error { #[ffi_type] pub enum TypeError { /// Asset Ids correspond to assets with different underlying types, {0} - AssetValueType(#[cfg_attr(feature = "std", source)] Mismatch), + AssetType(#[cfg_attr(feature = "std", source)] Mismatch), /// Numeric asset value type was expected, received: {0} - NumericAssetValueTypeExpected( - #[skip_from] - #[skip_try_from] - AssetValueType, - ), - /// Store asset value type was expected, received: {0} - StoreAssetValueTypeExpected( + NumericAssetTypeExpected( #[skip_from] #[skip_try_from] - AssetValueType, + AssetType, ), } @@ -1504,7 +1458,7 @@ pub mod error { TimeTriggerInThePast, } - /// Repetition of of `{instruction_type}` for id `{id}` + /// Repetition of of `{instruction}` for id `{id}` #[derive( Debug, displaydoc::Display, @@ -1523,7 +1477,7 @@ pub mod error { #[ffi_type] pub struct RepetitionError { /// Instruction type - pub instruction_type: InstructionType, + pub instruction: InstructionType, /// Id of the object being repeated pub id: IdBox, } @@ -1539,8 +1493,8 @@ pub mod error { /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ - AssetTransferBox, Burn, BurnBox, Custom, ExecuteTrigger, Fail, Grant, GrantBox, - InstructionBox, Log, Mint, MintBox, NewParameter, Register, RegisterBox, RemoveKeyValue, + AssetTransferBox, Burn, BurnBox, CustomInstruction, ExecuteTrigger, Grant, GrantBox, + InstructionBox, Log, Mint, MintBox, Register, RegisterBox, RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SetKeyValue, SetKeyValueBox, SetParameter, Transfer, TransferBox, Unregister, UnregisterBox, Upgrade, }; diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index aecffef2df4..5b7417b53fa 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -9,25 +9,17 @@ extern crate alloc; #[cfg(not(feature = "std"))] -use alloc::{ - boxed::Box, - format, - string::{String, ToString}, - vec::Vec, -}; -use core::{fmt, fmt::Debug, ops::RangeInclusive, str::FromStr}; +use alloc::{boxed::Box, format, string::String, vec::Vec}; -use derive_more::{Constructor, Display, From, FromStr}; -use getset::Getters; +use derive_more::{Constructor, Display}; use iroha_crypto::PublicKey; -use iroha_data_model_derive::{model, EnumRef, IdEqOrdHash}; +use iroha_data_model_derive::{model, EnumRef}; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::{declare_versioned, version_with_scale}; use parity_scale_codec::{Decode, Encode}; use prelude::Executable; use serde::{Deserialize, Serialize}; -use serde_with::{DeserializeFromStr, SerializeDisplay}; use strum::FromRepr; pub use self::model::*; @@ -43,6 +35,7 @@ pub mod ipfs; pub mod isi; pub mod metadata; pub mod name; +pub mod parameter; pub mod peer; pub mod permission; pub mod query; @@ -117,12 +110,9 @@ mod seal { Revoke, SetParameter, - NewParameter, Upgrade, ExecuteTrigger, Log, - Custom, - Fail, // Boxed queries QueryBox, @@ -189,8 +179,8 @@ pub struct EnumTryAsError { } // Manual implementation because this allow annotation does not affect `Display` derive -impl fmt::Display for EnumTryAsError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> fmt::Result { +impl core::fmt::Display for EnumTryAsError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!( f, "Expected: {}\nGot: {:?}", @@ -211,414 +201,16 @@ impl EnumTryAsError { } #[cfg(feature = "std")] -impl std::error::Error for EnumTryAsError {} - -pub mod parameter { - //! Structures, traits and impls related to `Paramater`s. - - use core::borrow::Borrow; - - use iroha_primitives::numeric::Numeric; - - pub use self::model::*; - use super::*; - use crate::isi::InstructionBox; - - /// Set of parameter names currently used by Iroha - #[allow(missing_docs)] - pub mod default { - pub const MAX_TRANSACTIONS_IN_BLOCK: &str = "MaxTransactionsInBlock"; - pub const BLOCK_TIME: &str = "BlockTime"; - pub const COMMIT_TIME_LIMIT: &str = "CommitTimeLimit"; - pub const TRANSACTION_LIMITS: &str = "TransactionLimits"; - pub const WSV_DOMAIN_METADATA_LIMITS: &str = "WSVDomainMetadataLimits"; - pub const WSV_ASSET_DEFINITION_METADATA_LIMITS: &str = "WSVAssetDefinitionMetadataLimits"; - pub const WSV_ACCOUNT_METADATA_LIMITS: &str = "WSVAccountMetadataLimits"; - pub const WSV_ASSET_METADATA_LIMITS: &str = "WSVAssetMetadataLimits"; - pub const WSV_TRIGGER_METADATA_LIMITS: &str = "WSVTriggerMetadataLimits"; - pub const WSV_IDENT_LENGTH_LIMITS: &str = "WSVIdentLengthLimits"; - pub const EXECUTOR_FUEL_LIMIT: &str = "ExecutorFuelLimit"; - pub const EXECUTOR_MAX_MEMORY: &str = "ExecutorMaxMemory"; - pub const WASM_FUEL_LIMIT: &str = "WASMFuelLimit"; - pub const WASM_MAX_MEMORY: &str = "WASMMaxMemory"; - } - - #[model] - mod model { - use super::*; - - #[derive( - Debug, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type(local)] - pub enum ParameterValueBox { - TransactionLimits(transaction::TransactionLimits), - MetadataLimits(metadata::Limits), - LengthLimits(LengthLimits), - Numeric( - #[skip_from] - #[skip_try_from] - Numeric, - ), - } - - /// Identification of a [`Parameter`]. - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - Getters, - FromStr, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{name}")] - #[getset(get = "pub")] - #[serde(transparent)] - #[repr(transparent)] - #[ffi_type(opaque)] - pub struct ParameterId { - /// [`Name`] unique to a [`Parameter`]. - pub name: Name, - } - - #[derive( - Debug, - Display, - Clone, - Constructor, - IdEqOrdHash, - Decode, - Encode, - DeserializeFromStr, - SerializeDisplay, - IntoSchema, - )] - #[display(fmt = "?{id}={val}")] - /// A chain-wide configuration parameter and its value. - #[ffi_type] - pub struct Parameter { - /// Unique [`Id`] of the [`Parameter`]. - pub id: ParameterId, - /// Current value of the [`Parameter`]. - pub val: ParameterValueBox, - } - } - - // TODO: Maybe derive - impl core::fmt::Display for ParameterValueBox { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::MetadataLimits(v) => core::fmt::Display::fmt(&v, f), - Self::TransactionLimits(v) => core::fmt::Display::fmt(&v, f), - Self::LengthLimits(v) => core::fmt::Display::fmt(&v, f), - Self::Numeric(v) => core::fmt::Display::fmt(&v, f), - } - } - } - - impl> From for ParameterValueBox { - fn from(value: T) -> Self { - Self::Numeric(value.into()) - } - } - - impl TryFrom for u32 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: ParameterValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let ParameterValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } - } - - impl TryFrom for u64 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: ParameterValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let ParameterValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } - } - - impl Parameter { - /// Current value of the [`Parameter`]. - pub fn val(&self) -> &ParameterValueBox { - &self.val - } - } - - impl Borrow for ParameterId { - fn borrow(&self) -> &str { - self.name.borrow() - } - } - - impl Borrow for Parameter { - fn borrow(&self) -> &str { - self.id.borrow() - } - } - - impl FromStr for Parameter { - type Err = ParseError; - - fn from_str(string: &str) -> Result { - if let Some((parameter_id_candidate, val_candidate)) = string.rsplit_once('=') { - if let Some(parameter_id_candidate) = parameter_id_candidate.strip_prefix('?') { - let param_id: ParameterId = - parameter_id_candidate.parse().map_err(|_| ParseError { - reason: "Failed to parse the `param_id` part of the `Parameter`.", - })?; - if let Some((val, ty)) = val_candidate.rsplit_once('_') { - let val = match ty { - // Shorthand for `LengthLimits` - "LL" => { - let (lower, upper) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Two comma-separated values are expected.", - })?; - let lower = lower.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Invalid lower `u32` bound.", - })?; - let upper = upper.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `LengthLimits`. Invalid upper `u32` bound.", - })?; - LengthLimits::new(lower, upper).into() - } - // Shorthand for `TransactionLimits` - "TL" => { - let (max_instr, max_wasm_size) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. Two comma-separated values are expected.", - })?; - let max_instr = max_instr.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. `max_instruction_number` field should be a valid `u64`.", - })?; - let max_wasm_size = max_wasm_size.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `TransactionLimits`. `max_wasm_size_bytes` field should be a valid `u64`.", - })?; - transaction::TransactionLimits::new( - max_instr, - max_wasm_size, - ).into() - } - // Shorthand for `MetadataLimits` - "ML" => { - let (lower, upper) = val.rsplit_once(',').ok_or( ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Two comma-separated values are expected.", - })?; - let lower = lower.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Invalid `u32` in `capacity` field.", - })?; - let upper = upper.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `MetadataLimits`. Invalid `u32` in `max_entry_len` field.", - })?; - metadata::Limits::new(lower, upper).into() - } - _ => return Err(ParseError { - reason: - "Unsupported type provided for the `val` part of the `Parameter`.", - }), - }; - Ok(Self::new(param_id, val)) - } else { - let val = val_candidate.parse::().map_err(|_| ParseError { - reason: - "Failed to parse the `val` part of the `Parameter` as `Numeric`.", - })?; - - Ok(Self::new(param_id, val.into())) - } - } else { - Err(ParseError { - reason: "`param_id` part of `Parameter` must start with `?`", - }) - } - } else { - Err(ParseError { - reason: "The `Parameter` string did not contain the `=` character.", - }) - } - } - } - - /// Convenience tool for setting parameters - #[derive(Default)] - #[must_use] - pub struct ParametersBuilder { - parameters: Vec, - } - - /// Error associated with parameters builder - #[derive(From, Debug, Display, Copy, Clone)] - pub enum ParametersBuilderError { - /// Error emerged during parsing of parameter id - Parse(ParseError), - } - - #[cfg(feature = "std")] - impl std::error::Error for ParametersBuilderError {} - - impl ParametersBuilder { - /// Construct [`Self`] - pub fn new() -> Self { - Self::default() - } - - /// Add [`Parameter`] to self - /// - /// # Errors - /// - [`ParameterId`] parsing failed - pub fn add_parameter( - mut self, - parameter_id: &str, - val: impl Into, - ) -> Result { - let parameter = Parameter { - id: parameter_id.parse()?, - val: val.into(), - }; - self.parameters.push(parameter); - Ok(self) - } - - /// Create sequence isi for setting parameters - pub fn into_set_parameters(self) -> Vec { - self.parameters - .into_iter() - .map(isi::SetParameter::new) - .map(Into::into) - .collect() - } - - /// Create sequence isi for creating parameters - pub fn into_create_parameters(self) -> Vec { - self.parameters - .into_iter() - .map(isi::NewParameter::new) - .map(Into::into) - .collect() - } - } - - pub mod prelude { - //! Prelude: re-export of most commonly used traits, structs and macros in this crate. - - pub use super::{Parameter, ParameterId}; - } - - #[cfg(test)] - mod tests { - use super::*; - use crate::{ - prelude::{numeric, MetadataLimits}, - transaction::TransactionLimits, - }; - - const INVALID_PARAM: [&str; 4] = [ - "", - "Block?SyncGossipPeriod=20000", - "?BlockSyncGossipPeriod20000", - "?BlockSyncGossipPeriod=20000_u32", - ]; - - #[test] - fn test_invalid_parameter_str() { - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[0]), - Err(err) if err.reason == "The `Parameter` string did not contain the `=` character." - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[1]), - Err(err) if err.reason == "`param_id` part of `Parameter` must start with `?`" - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[2]), - Err(err) if err.to_string() == "The `Parameter` string did not contain the `=` character." - )); - assert!(matches!( - parameter::Parameter::from_str(INVALID_PARAM[3]), - Err(err) if err.to_string() == "Unsupported type provided for the `val` part of the `Parameter`." - )); - } - - #[test] - fn test_parameter_serialize_deserialize_consistent() { - let parameters = [ - Parameter::new( - ParameterId::from_str("TransactionLimits") - .expect("Failed to parse `ParameterId`"), - TransactionLimits::new(42, 24).into(), - ), - Parameter::new( - ParameterId::from_str("MetadataLimits").expect("Failed to parse `ParameterId`"), - MetadataLimits::new(42, 24).into(), - ), - Parameter::new( - ParameterId::from_str("LengthLimits").expect("Failed to parse `ParameterId`"), - LengthLimits::new(24, 42).into(), - ), - Parameter::new( - ParameterId::from_str("Int").expect("Failed to parse `ParameterId`"), - numeric!(42).into(), - ), - ]; - - for parameter in parameters { - assert_eq!( - parameter, - serde_json::to_string(¶meter) - .and_then(|parameter| serde_json::from_str(¶meter)) - .unwrap_or_else(|_| panic!( - "Failed to de/serialize parameter {:?}", - ¶meter - )) - ); - } - } - } +impl std::error::Error + for EnumTryAsError +{ } #[model] #[allow(clippy::redundant_pub_crate)] mod model { + use getset::Getters; + use super::*; /// Unique id of blockchain @@ -688,9 +280,9 @@ mod model { /// [`RoleId`](`role::RoleId`) variant. RoleId(role::RoleId), /// [`Permission`](`permission::Permission`) variant. - PermissionId(permission::PermissionId), - /// [`ParameterId`](`parameter::ParameterId`) variant. - ParameterId(parameter::ParameterId), + Permission(permission::Permission), + /// [`CustomParameter`](`parameter::CustomParameter`) variant. + CustomParameterId(parameter::CustomParameterId), } /// Sized container for all possible entities. @@ -735,35 +327,8 @@ mod model { Trigger(trigger::Trigger), /// [`Role`](`role::Role`) variant. Role(role::Role), - /// [`Parameter`](`parameter::Parameter`) variant. - Parameter(parameter::Parameter), - } - - /// Limits of length of the identifiers (e.g. in [`domain::Domain`], [`account::Account`], [`asset::AssetDefinition`]) in number of chars - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{min},{max}_LL")] - #[getset(get = "pub")] - #[ffi_type] - pub struct LengthLimits { - /// Minimal length in number of chars (inclusive). - pub(super) min: u32, - /// Maximal length in number of chars (inclusive). - pub(super) max: u32, + /// [`CustomParameter`](`parameter::CustomParameter`) variant. + CustomParameter(parameter::CustomParameter), } /// Operation validation failed. @@ -876,69 +441,6 @@ mod model { /// in the next request to continue fetching results of the original query pub cursor: crate::query::cursor::ForwardCursor, } - - /// String containing serialized valid JSON. - /// - /// This string is guaranteed to be parsed as JSON. - #[derive(Display, Debug, Clone, Encode, Decode, Ord, PartialOrd, Eq, PartialEq, IntoSchema)] - #[ffi_type(unsafe {robust})] - #[repr(transparent)] - #[display(fmt = "{}", "0")] - pub struct JsonString(pub(super) String); -} - -impl JsonString { - /// Create without checking whether the input is a valid JSON string. - /// - /// The caller must guarantee that the value is valid. - pub fn from_string_unchecked(value: String) -> Self { - Self(value) - } -} - -impl Default for JsonString { - fn default() -> Self { - // NOTE: empty string isn't valid JSON - Self("null".to_string()) - } -} - -impl From<&serde_json::Value> for JsonString { - fn from(value: &serde_json::Value) -> Self { - Self(value.to_string()) - } -} - -impl From for JsonString { - fn from(value: serde_json::Value) -> Self { - Self::from(&value) - } -} - -impl AsRef for JsonString { - fn as_ref(&self) -> &str { - &self.0 - } -} - -impl<'de> serde::de::Deserialize<'de> for JsonString { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let json = serde_json::Value::deserialize(deserializer)?; - Ok(Self::from(&json)) - } -} - -impl serde::ser::Serialize for JsonString { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let json = serde_json::Value::from_str(&self.0).map_err(serde::ser::Error::custom)?; - json.serialize(serializer) - } } macro_rules! impl_encode_as_id_box { @@ -974,9 +476,8 @@ impl_encode_as_id_box! { asset::AssetDefinitionId, asset::AssetId, trigger::TriggerId, - permission::PermissionId, + permission::Permission, role::RoleId, - parameter::ParameterId, } impl_encode_as_identifiable_box! { @@ -991,7 +492,6 @@ impl_encode_as_identifiable_box! { asset::Asset, trigger::Trigger, role::Role, - parameter::Parameter, } impl Decode for ChainId { @@ -1030,7 +530,7 @@ impl IdentifiableBox { IdentifiableBox::Asset(a) => a.id().clone().into(), IdentifiableBox::Trigger(a) => a.id().clone().into(), IdentifiableBox::Role(a) => a.id().clone().into(), - IdentifiableBox::Parameter(a) => a.id().clone().into(), + IdentifiableBox::CustomParameter(a) => a.id().clone().into(), } } } @@ -1081,20 +581,6 @@ pub trait Registered: Identifiable { type With; } -impl LengthLimits { - /// Constructor. - pub const fn new(min: u32, max: u32) -> Self { - Self { min, max } - } -} - -impl From for RangeInclusive { - #[inline] - fn from(limits: LengthLimits) -> Self { - RangeInclusive::new(limits.min, limits.max) - } -} - declare_versioned!( BatchedResponse serde::Deserialize<'de>> 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema @@ -1150,13 +636,16 @@ mod ffi { pub mod prelude { //! Prelude: re-export of most commonly used traits, structs and macros in this crate. pub use iroha_crypto::PublicKey; - pub use iroha_primitives::numeric::{numeric, Numeric, NumericSpec}; + pub use iroha_primitives::{ + json::*, + numeric::{numeric, Numeric, NumericSpec}, + }; pub use super::{ account::prelude::*, asset::prelude::*, domain::prelude::*, events::prelude::*, executor::prelude::*, isi::prelude::*, metadata::prelude::*, name::prelude::*, parameter::prelude::*, peer::prelude::*, permission::prelude::*, query::prelude::*, role::prelude::*, transaction::prelude::*, trigger::prelude::*, ChainId, EnumTryAsError, - HasMetadata, IdBox, Identifiable, IdentifiableBox, LengthLimits, ValidationFail, + HasMetadata, IdBox, Identifiable, IdentifiableBox, ValidationFail, }; } diff --git a/data_model/src/metadata.rs b/data_model/src/metadata.rs index 110a13fd2cb..8a5efe050aa 100644 --- a/data_model/src/metadata.rs +++ b/data_model/src/metadata.rs @@ -1,35 +1,28 @@ //! Metadata: key-value pairs that can be attached to accounts, transactions and assets. #[cfg(not(feature = "std"))] -use alloc::{ - collections::btree_map, - format, - string::{String, ToString}, - vec::Vec, -}; +use alloc::{collections::BTreeMap, format, string::String, vec::Vec}; use core::borrow::Borrow; #[cfg(feature = "std")] -use std::collections::btree_map; +use std::{collections::BTreeMap, vec::Vec}; -use derive_more::Display; use iroha_data_model_derive::model; -use iroha_macro::FromVariant; -use iroha_primitives::numeric::Numeric; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; +use iroha_primitives::json::JsonString; pub use self::model::*; -use crate::Name; +use crate::prelude::Name; /// A path slice, composed of [`Name`]s. -pub type Path = [Name]; -/// Collection of parameters by their names. -pub type UnlimitedMetadata = btree_map::BTreeMap; +pub type Path = [Name]; #[model] mod model { + use derive_more::Display; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use super::*; /// Collection of parameters by their names with checked insertion. @@ -53,344 +46,33 @@ mod model { #[serde(transparent)] #[display(fmt = "Metadata")] #[allow(clippy::multiple_inherent_impl)] - pub struct Metadata(pub(super) btree_map::BTreeMap); - - /// Metadata value - #[derive( - Debug, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type(opaque)] - #[allow(missing_docs)] - pub enum MetadataValueBox { - Bool(bool), - String(String), - Name(Name), - Bytes(Vec), - Numeric(Numeric), - LimitedMetadata(Metadata), - - Vec( - #[skip_from] - #[skip_try_from] - Vec, - ), - } - - /// Limits for [`Metadata`]. - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - #[display(fmt = "{capacity},{max_entry_len}_ML")] - pub struct Limits { - /// Maximum number of entries - pub capacity: u32, - /// Maximum length of entry - pub max_entry_len: u32, - } - - /// Metadata related errors. - #[derive( - Debug, - displaydoc::Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type(local)] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - pub enum MetadataError { - /// Path specification empty - EmptyPath, - /// Metadata entry is too big - EntryTooBig(#[cfg_attr(feature = "std", source)] SizeError), - /// Metadata exceeds overall length limit - MaxCapacity(#[cfg_attr(feature = "std", source)] SizeError), - /// `{0}`: path segment not found, i.e. nothing was found at that key - MissingSegment(Name), - /// `{0}`: path segment not an instance of metadata - InvalidSegment(Name), - } - - /// Size limits exhaustion error - #[derive( - Debug, - Display, - Copy, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - #[cfg_attr(feature = "std", derive(thiserror::Error))] - #[display(fmt = "Limits are {limits}, while the actual value is {actual}")] - pub struct SizeError { - /// The limits that were set for this entry - pub limits: Limits, - /// The actual *entry* size in bytes - pub actual: u64, - } -} - -impl Limits { - /// Constructor. - pub const fn new(capacity: u32, max_entry_len: u32) -> Limits { - Limits { - capacity, - max_entry_len, - } - } -} - -impl From for MetadataValueBox { - fn from(value: u32) -> Self { - Self::Numeric(value.into()) - } -} - -impl From for MetadataValueBox { - fn from(value: u64) -> Self { - Self::Numeric(value.into()) - } -} - -impl TryFrom for u32 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: MetadataValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let MetadataValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } -} - -impl TryFrom for u64 { - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: MetadataValueBox) -> Result { - use iroha_macro::error::ErrorTryFromEnum; - - let MetadataValueBox::Numeric(numeric) = value else { - return Err(ErrorTryFromEnum::default()); - }; - - numeric.try_into().map_err(|_| ErrorTryFromEnum::default()) - } -} - -impl> From> for MetadataValueBox { - fn from(values: Vec) -> MetadataValueBox { - MetadataValueBox::Vec(values.into_iter().map(Into::into).collect()) - } -} - -impl TryFrom for Vec -where - MetadataValueBox: TryInto, -{ - type Error = iroha_macro::error::ErrorTryFromEnum; - - fn try_from(value: MetadataValueBox) -> Result { - if let MetadataValueBox::Vec(vec) = value { - return vec - .into_iter() - .map(TryInto::try_into) - .collect::, _>>() - .map_err(|_e| Self::Error::default()); - } - - Err(Self::Error::default()) - } -} - -impl core::fmt::Display for MetadataValueBox { - // TODO: Maybe derive - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - MetadataValueBox::Bool(v) => core::fmt::Display::fmt(&v, f), - MetadataValueBox::String(v) => core::fmt::Display::fmt(&v, f), - MetadataValueBox::Name(v) => core::fmt::Display::fmt(&v, f), - MetadataValueBox::Numeric(v) => core::fmt::Display::fmt(&v, f), - MetadataValueBox::Bytes(v) => write!(f, "{v:?}"), - MetadataValueBox::Vec(v) => { - // TODO: Remove so we can derive. - let list_of_display: Vec<_> = v.iter().map(ToString::to_string).collect(); - // this prints with quotation marks, which is fine 90% - // of the time, and helps delineate where a display of - // one value stops and another one begins. - write!(f, "{list_of_display:?}") - } - MetadataValueBox::LimitedMetadata(v) => core::fmt::Display::fmt(&v, f), - } - } -} - -#[allow(clippy::len_without_is_empty)] -impl MetadataValueBox { - /// Number of underneath expressions. - fn len(&self) -> usize { - use MetadataValueBox::*; - - match self { - Bool(_) | String(_) | Name(_) | Bytes(_) | Numeric(_) => 1, - Vec(v) => v.iter().map(Self::len).sum::() + 1, - LimitedMetadata(data) => data.nested_len() + 1, - } - } + pub struct Metadata(pub(super) BTreeMap); } impl Metadata { - /// Constructor. - #[inline] - pub fn new() -> Self { - Self(UnlimitedMetadata::new()) - } - - /// Get the (expensive) cumulative length of all [`Value`]s housed - /// in this map. - pub fn nested_len(&self) -> usize { - self.0.values().map(|v| 1 + v.len()).sum() - } - - /// Get metadata given path. If the path is malformed, or - /// incorrect (if e.g. any of interior path segments are not - /// [`Metadata`] instances return `None`. Else borrow the value - /// corresponding to that path. - pub fn nested_get(&self, path: &Path) -> Option<&MetadataValueBox> { - let key = path.last()?; - let mut map = &self.0; - for k in path.iter().take(path.len() - 1) { - map = match map.get(k)? { - MetadataValueBox::LimitedMetadata(data) => &data.0, - _ => return None, - }; - } - map.get(key) - } - /// Check if the internal map contains the given key. pub fn contains(&self, key: &Name) -> bool { self.0.contains_key(key) } /// Iterate over key/value pairs stored in the internal map. - pub fn iter(&self) -> impl ExactSizeIterator { + pub fn iter(&self) -> impl ExactSizeIterator { self.0.iter() } /// Get the `Some(&Value)` associated to `key`. Return `None` if not found. #[inline] - pub fn get(&self, key: &K) -> Option<&MetadataValueBox> + pub fn get(&self, key: &K) -> Option<&JsonString> where Name: Borrow, { self.0.get(key) } - fn len_u64(&self) -> u64 { - self.0 - .len() - .try_into() - .expect("`usize` should always fit into `u64`") - } - - /// Insert the given [`Value`] into the given path. If the path is - /// complete, check the limits and only then insert. The creation - /// of the path is the responsibility of the user. - /// - /// # Errors - /// - If the path is empty. - /// - If one of the intermediate keys is absent. - /// - If some intermediate key is a leaf node. - pub fn nested_insert_with_limits( - &mut self, - path: &Path, - value: impl Into, - limits: Limits, - ) -> Result, MetadataError> { - if self.0.len() >= limits.capacity as usize { - return Err(MetadataError::MaxCapacity(SizeError { - limits, - actual: self.len_u64(), - })); - } - let key = path.last().ok_or(MetadataError::EmptyPath)?; - let mut layer = self; - for k in path.iter().take(path.len() - 1) { - layer = match layer - .0 - .get_mut(k) - .ok_or_else(|| MetadataError::MissingSegment(k.clone()))? - { - MetadataValueBox::LimitedMetadata(data) => data, - _ => return Err(MetadataError::InvalidSegment(k.clone())), - }; - } - layer.insert_with_limits(key.clone(), value, limits) - } - /// Insert [`Value`] under the given key. Returns `Some(value)` /// if the value was already present, `None` otherwise. - /// - /// # Errors - /// Fails if `max_entry_len` or `capacity` from `limits` are exceeded. - pub fn insert_with_limits( - &mut self, - key: Name, - value: impl Into, - limits: Limits, - ) -> Result, MetadataError> { - let value = value.into(); - - if self.0.len() >= limits.capacity as usize && !self.0.contains_key(&key) { - return Err(MetadataError::MaxCapacity(SizeError { - limits, - actual: self.len_u64(), - })); - } - check_size_limits(&key, value.clone(), limits)?; - Ok(self.0.insert(key, value)) + pub fn insert(&mut self, key: Name, value: impl Into) -> Option { + self.0.insert(key, value.into()) } } @@ -400,200 +82,15 @@ impl Metadata { /// `Some(value)` at the key if the key was previously in the /// map, else `None`. #[inline] - pub fn remove(&mut self, key: &K) -> Option + pub fn remove(&mut self, key: &K) -> Option where Name: Borrow, { self.0.remove(key) } - - /// Remove leaf node in metadata, given path. If the path is - /// malformed, or incorrect (if e.g. any of interior path segments - /// are not [`Metadata`] instances) return `None`. Else return the - /// owned value corresponding to that path. - pub fn nested_remove(&mut self, path: &Path) -> Option { - let key = path.last()?; - let mut map = &mut self.0; - for k in path.iter().take(path.len() - 1) { - map = match map.get_mut(k)? { - MetadataValueBox::LimitedMetadata(data) => &mut data.0, - _ => return None, - }; - } - map.remove(key) - } -} - -fn check_size_limits( - key: &Name, - value: MetadataValueBox, - limits: Limits, -) -> Result<(), MetadataError> { - let entry_bytes: Vec = (key, value).encode(); - let byte_size = entry_bytes.len(); - if byte_size > limits.max_entry_len as usize { - return Err(MetadataError::EntryTooBig(SizeError { - limits, - actual: byte_size - .try_into() - .expect("`usize` should always fit into `u64`"), - })); - } - Ok(()) } pub mod prelude { //! Prelude: re-export most commonly used traits, structs and macros from this module. - pub use super::{Limits as MetadataLimits, Metadata, UnlimitedMetadata}; -} - -#[cfg(test)] -mod tests { - #[cfg(not(feature = "std"))] - use alloc::{borrow::ToOwned as _, vec}; - use core::str::FromStr as _; - - use super::*; - use crate::ParseError; - - /// Error used in testing to make text more readable using the `?` operator. - #[derive(Debug, Display, Clone, FromVariant)] - pub enum TestError { - Parse(ParseError), - Metadata(MetadataError), - } - - #[test] - fn nested_fns_ignore_empty_path() { - let mut metadata = Metadata::new(); - let empty_path = vec![]; - assert!(metadata.nested_get(&empty_path).is_none()); - assert!(metadata - .nested_insert_with_limits(&empty_path, "0".to_owned(), Limits::new(12, 12)) - .is_err()); - #[cfg(feature = "transparent_api")] - assert!(metadata.nested_remove(&empty_path).is_none()); - } - - #[test] - #[cfg(feature = "transparent_api")] - fn nesting_inserts_removes() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(1024, 1024); - // TODO: If we allow a `unsafe`, we could create the path. - metadata - .insert_with_limits(Name::from_str("0")?, Metadata::new(), limits) - .expect("Valid"); - metadata - .nested_insert_with_limits( - &[Name::from_str("0")?, Name::from_str("1")?], - Metadata::new(), - limits, - ) - .expect("Valid"); - let path = [ - Name::from_str("0")?, - Name::from_str("1")?, - Name::from_str("2")?, - ]; - metadata - .nested_insert_with_limits(&path, "Hello World".to_owned(), limits) - .expect("Valid"); - assert_eq!( - *metadata.nested_get(&path).expect("Valid"), - MetadataValueBox::from("Hello World".to_owned()) - ); - assert_eq!(metadata.nested_len(), 6); // Three nested path segments. - metadata.nested_remove(&path); - assert!(metadata.nested_get(&path).is_none()); - Ok(()) - } - - #[test] - fn non_existent_path_segment_fails() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(10, 15); - metadata.insert_with_limits(Name::from_str("0")?, Metadata::new(), limits)?; - metadata.nested_insert_with_limits( - &[Name::from_str("0")?, Name::from_str("1")?], - Metadata::new(), - limits, - )?; - let path = vec![ - Name::from_str("0")?, - Name::from_str("1")?, - Name::from_str("2")?, - ]; - metadata.nested_insert_with_limits(&path, "Hello World".to_owned(), limits)?; - let bad_path = vec![ - Name::from_str("0")?, - Name::from_str("3")?, - Name::from_str("2")?, - ]; - assert!(metadata - .nested_insert_with_limits(&bad_path, "Hello World".to_owned(), limits) - .is_err()); - assert!(metadata.nested_get(&bad_path).is_none()); - #[cfg(feature = "transparent_api")] - assert!(metadata.nested_remove(&bad_path).is_none()); - Ok(()) - } - - #[test] - fn nesting_respects_limits() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(10, 14); - // TODO: If we allow a `unsafe`, we could create the path. - metadata.insert_with_limits(Name::from_str("0")?, Metadata::new(), limits)?; - metadata - .nested_insert_with_limits( - &[Name::from_str("0")?, Name::from_str("1")?], - Metadata::new(), - limits, - ) - .expect("Valid"); - let path = vec![ - Name::from_str("0")?, - Name::from_str("1")?, - Name::from_str("2")?, - ]; - let failing_insert = - metadata.nested_insert_with_limits(&path, "Hello World".to_owned(), limits); - - assert!(failing_insert.is_err()); - Ok(()) - } - - #[test] - fn insert_exceeds_entry_size() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(10, 5); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, "2".to_owned(), limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, "23456".to_owned(), limits) - .is_err()); - Ok(()) - } - - #[test] - // This test is a good candidate for both property-based and parameterised testing - fn insert_exceeds_len() -> Result<(), TestError> { - let mut metadata = Metadata::new(); - let limits = Limits::new(2, 5); - assert!(metadata - .insert_with_limits(Name::from_str("1")?, "0".to_owned(), limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("2")?, "0".to_owned(), limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("2")?, "1".to_owned(), limits) - .is_ok()); - assert!(metadata - .insert_with_limits(Name::from_str("3")?, "0".to_owned(), limits) - .is_err()); - Ok(()) - } + pub use super::Metadata; } diff --git a/data_model/src/name.rs b/data_model/src/name.rs index 6094cb7acf4..bf87b77a275 100644 --- a/data_model/src/name.rs +++ b/data_model/src/name.rs @@ -2,20 +2,21 @@ //! and related implementations and trait implementations. #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use core::{borrow::Borrow, ops::RangeInclusive, str::FromStr}; +use core::{borrow::Borrow, str::FromStr}; -use derive_more::{DebugCustom, Display}; use iroha_data_model_derive::model; use iroha_primitives::conststr::ConstString; -use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode, Input}; use serde::{Deserialize, Serialize}; pub use self::model::*; -use crate::{isi::error::InvalidParameterError, ParseError}; +use crate::ParseError; #[model] mod model { + use derive_more::{DebugCustom, Display}; + use iroha_schema::IntoSchema; + use super::*; /// `Name` struct represents the type of Iroha Entities names, such as @@ -41,27 +42,6 @@ mod model { } impl Name { - /// Check if `range` contains the number of chars in the inner `ConstString` of this [`Name`]. - /// - /// # Errors - /// Fails if `range` does not - pub fn validate_len( - &self, - range: impl Into>, - ) -> Result<(), InvalidParameterError> { - let range = range.into(); - let Ok(true) = &self - .0 - .chars() - .count() - .try_into() - .map(|len| range.contains(&len)) - else { - return Err(InvalidParameterError::NameLength); - }; - Ok(()) - } - /// Check if `candidate` string would be valid [`Name`]. /// /// # Errors diff --git a/data_model/src/parameter.rs b/data_model/src/parameter.rs new file mode 100644 index 00000000000..4ad59afa219 --- /dev/null +++ b/data_model/src/parameter.rs @@ -0,0 +1,680 @@ +//! Structures, traits and impls related to `Paramater`s. +#[cfg(not(feature = "std"))] +use alloc::{collections::btree_map, format, string::String, vec::Vec}; +use core::{num::NonZeroU64, time::Duration}; +#[cfg(feature = "std")] +use std::collections::btree_map; + +use iroha_data_model_derive::model; +use iroha_primitives::json::JsonString; +use nonzero_ext::nonzero; + +pub use self::model::*; +use crate::name::Name; + +/// Collection of [`CustomParameter`]s +pub(crate) type CustomParameters = btree_map::BTreeMap; + +#[model] +mod model { + use derive_more::{Constructor, Display, FromStr}; + use getset::{CopyGetters, Getters}; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use strum::EnumDiscriminants; + + use super::*; + + /// Id of a custom parameter + #[derive( + Debug, + Display, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + FromStr, + Constructor, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type] + pub struct CustomParameterId(pub Name); + + /// Limits that govern consensus operation + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[display(fmt = "{block_time_ms},{commit_time_ms}_SL")] + pub struct SumeragiParameters { + /// Maximal amount of time (in milliseconds) a peer will wait before forcing creation of a new block. + /// + /// A block is created if this limit or [`BlockParameters::max_transactions`] limit is reached, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub block_time_ms: u64, + /// Time (in milliseconds) a peer will wait for a block to be committed. + /// + /// If this period expires the block will request a view change + pub commit_time_ms: u64, + } + + /// Single Sumeragi parameter + /// + /// Check [`SumeragiParameters`] for more details + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Serialize, + Deserialize, + IntoSchema, + )] + pub enum SumeragiParameter { + BlockTimeMs(u64), + CommitTimeMs(u64), + } + + /// Limits that a block must obey to be accepted. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{max_transactions}_BL")] + #[getset(get_copy = "pub")] + pub struct BlockParameters { + /// Maximal number of transactions in a block. + /// + /// A block is created if this limit is reached or [`SumeragiParameters::block_time_ms`] has expired, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub max_transactions: NonZeroU64, + } + + /// Single block parameter + /// + /// Check [`BlockParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum BlockParameter { + MaxTransactions(NonZeroU64), + } + + /// Limits that a transaction must obey to be accepted. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{max_instructions},{smart_contract_size}_TL")] + #[getset(get_copy = "pub")] + pub struct TransactionParameters { + /// Maximum number of instructions per transaction + pub max_instructions: NonZeroU64, + /// Maximum size of wasm binary in bytes + pub smart_contract_size: NonZeroU64, + } + + /// Single transaction parameter + /// + /// Check [`TransactionParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum TransactionParameter { + MaxInstructions(NonZeroU64), + SmartContractSize(NonZeroU64), + } + + /// Limits that a smart contract must obey at runtime to considered valid. + #[derive( + Debug, + Display, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + CopyGetters, + Encode, + Serialize, + IntoSchema, + )] + #[display(fmt = "{fuel},{memory}_SCL")] + #[getset(get_copy = "pub")] + pub struct SmartContractParameters { + /// Maximum amount of fuel that a smart contract can consume + pub fuel: NonZeroU64, + /// Maximum amount of memory that a smart contract can use + pub memory: NonZeroU64, + } + + /// Single smart contract parameter + /// + /// Check [`SmartContractParameters`] for more details + #[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, + )] + pub enum SmartContractParameter { + Fuel(NonZeroU64), + Memory(NonZeroU64), + } + + /// Blockchain specific parameter defined in the executor + #[derive( + Debug, Display, Clone, IdEqOrdHash, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] + #[ffi_type] + #[display(fmt = "{id}({payload})")] + pub struct CustomParameter { + /// Unique id of the parameter. + pub id: CustomParameterId, + /// Payload containing actual value. + /// + /// It is JSON-encoded, and its structure must correspond to the structure of + /// the type defined in [`crate::executor::ExecutorDataModel`]. + pub payload: JsonString, + } + + /// Set of all current blockchain parameter values + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Getters, + CopyGetters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + pub struct Parameters { + /// Sumeragi parameters + #[getset(get_copy = "pub")] + pub sumeragi: SumeragiParameters, + /// Block parameters + #[getset(get_copy = "pub")] + pub block: BlockParameters, + /// Transaction parameters + #[getset(get_copy = "pub")] + pub transaction: TransactionParameters, + /// Executor parameters + #[getset(get_copy = "pub")] + pub executor: SmartContractParameters, + /// Smart contract parameters + #[getset(get_copy = "pub")] + pub smart_contract: SmartContractParameters, + /// Collection of blockchain specific parameters + #[getset(get = "pub")] + pub custom: CustomParameters, + } + + /// Single blockchain parameter. + /// + /// Check [`Parameters`] for more details + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + EnumDiscriminants, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[ffi_type(opaque)] + pub enum Parameter { + Sumeragi(SumeragiParameter), + Block(BlockParameter), + Transaction(TransactionParameter), + SmartContract(SmartContractParameter), + Executor(SmartContractParameter), + Custom(CustomParameter), + } +} + +impl core::fmt::Display for Parameter { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Sumeragi(v) => core::fmt::Display::fmt(&v, f), + Self::Block(v) => core::fmt::Display::fmt(&v, f), + Self::Transaction(v) => core::fmt::Display::fmt(&v, f), + Self::SmartContract(v) | Self::Executor(v) => core::fmt::Display::fmt(&v, f), + Self::Custom(v) => write!(f, "{}({})", v.id, v.payload), + } + } +} + +impl SumeragiParameters { + /// Maximal amount of time (in milliseconds) a peer will wait before forcing creation of a new block. + /// + /// A block is created if this limit or [`BlockParameters::max_transactions`] limit is reached, + /// whichever comes first. Regardless of the limits, an empty block is never created. + pub fn block_time(&self) -> Duration { + Duration::from_millis(self.block_time_ms) + } + + /// Time (in milliseconds) a peer will wait for a block to be committed. + /// + /// If this period expires the block will request a view change + pub fn commit_time(&self) -> Duration { + Duration::from_millis(self.commit_time_ms) + } + + /// Maximal amount of time it takes to commit a block + #[cfg(feature = "transparent_api")] + pub fn pipeline_time(&self) -> Duration { + self.block_time() + self.commit_time() + } + + /// Estimation of consensus duration + #[cfg(feature = "transparent_api")] + pub fn consensus_estimation(&self) -> Duration { + self.block_time() + (self.commit_time() / 2) + } +} + +impl Default for SumeragiParameters { + fn default() -> Self { + pub const DEFAULT_BLOCK_TIME: u64 = 2_000; + pub const DEFAULT_COMMIT_TIME: u64 = 4_000; + + Self { + block_time_ms: DEFAULT_BLOCK_TIME, + commit_time_ms: DEFAULT_COMMIT_TIME, + } + } +} +impl Default for BlockParameters { + fn default() -> Self { + /// Default value for [`Parameters::MaxTransactionsInBlock`] + pub const DEFAULT_TRANSACTIONS_IN_BLOCK: NonZeroU64 = nonzero!(2_u64.pow(9)); + + Self::new(DEFAULT_TRANSACTIONS_IN_BLOCK) + } +} + +impl Default for TransactionParameters { + fn default() -> Self { + const DEFAULT_INSTRUCTION_NUMBER: NonZeroU64 = nonzero!(2_u64.pow(12)); + const DEFAULT_SMART_CONTRACT_SIZE: NonZeroU64 = nonzero!(4 * 2_u64.pow(20)); + + Self::new(DEFAULT_INSTRUCTION_NUMBER, DEFAULT_SMART_CONTRACT_SIZE) + } +} + +impl Default for SmartContractParameters { + fn default() -> Self { + const DEFAULT_FUEL: NonZeroU64 = nonzero!(55_000_000_u64); + const DEFAULT_MEMORY: NonZeroU64 = nonzero!(55_000_000_u64); + + Self { + fuel: DEFAULT_FUEL, + memory: DEFAULT_MEMORY, + } + } +} + +impl SumeragiParameters { + /// Construct [`Self`] + pub fn new(block_time: Duration, commit_time: Duration) -> Self { + Self { + block_time_ms: block_time + .as_millis() + .try_into() + .expect("INTERNAL BUG: Time should fit into u64"), + commit_time_ms: commit_time + .as_millis() + .try_into() + .expect("INTERNAL BUG: Time should fit into u64"), + } + } +} + +impl BlockParameters { + /// Construct [`Self`] + pub const fn new(max_transactions: NonZeroU64) -> Self { + Self { max_transactions } + } +} + +impl TransactionParameters { + /// Construct [`Self`] + pub const fn new(max_instructions: NonZeroU64, smart_contract_size: NonZeroU64) -> Self { + Self { + max_instructions, + smart_contract_size, + } + } +} + +impl CustomParameterId { + /// Getter for name + pub fn name(&self) -> &Name { + &self.0 + } +} + +impl CustomParameter { + /// Constructor + pub fn new(id: CustomParameterId, payload: impl Into) -> Self { + Self { + id, + payload: payload.into(), + } + } + + /// Getter + // TODO: derive with getset once FFI impl is fixed + pub fn payload(&self) -> &JsonString { + &self.payload + } +} + +mod candidate { + use core::num::NonZeroUsize; + + use parity_scale_codec::{Decode, Input}; + use serde::Deserialize; + + use super::*; + + #[derive(Decode, Deserialize)] + enum TransactionParameterCandidate { + MaxInstructions(NonZeroU64), + SmartContractSize(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct TransactionParametersCandidate { + max_instructions: NonZeroU64, + smart_contract_size: NonZeroU64, + } + + #[derive(Decode, Deserialize)] + enum BlockParameterCandidate { + MaxTransactions(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct BlockParametersCandidate { + max_transactions: NonZeroU64, + } + + #[derive(Decode, Deserialize)] + enum SmartContractParameterCandidate { + Fuel(NonZeroU64), + Memory(NonZeroU64), + } + + #[derive(Decode, Deserialize)] + struct SmartContractParametersCandidate { + fuel: NonZeroU64, + memory: NonZeroU64, + } + + impl BlockParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::MaxTransactions(max_transactions) => { + let _ = NonZeroUsize::try_from(max_transactions) + .map_err(|_| "BlockParameter::MaxTransactions exceeds usize::MAX")?; + + BlockParameter::MaxTransactions(max_transactions) + } + }) + } + } + + impl BlockParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.max_transactions) + .map_err(|_| "BlockParameters::max_transactions exceeds usize::MAX")?; + + Ok(BlockParameters { + max_transactions: self.max_transactions, + }) + } + } + + impl TransactionParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::MaxInstructions(max_instructions) => { + let _ = NonZeroUsize::try_from(max_instructions) + .map_err(|_| "TransactionParameter::MaxInstructions exceeds usize::MAX")?; + TransactionParameter::MaxInstructions(max_instructions) + } + Self::SmartContractSize(smart_contract_size) => { + let _ = NonZeroUsize::try_from(smart_contract_size).map_err(|_| { + "TransactionParameter::SmartContractSize exceeds usize::MAX" + })?; + TransactionParameter::SmartContractSize(smart_contract_size) + } + }) + } + } + + impl TransactionParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.max_instructions) + .map_err(|_| "TransactionParameters::max_instructions exceeds usize::MAX")?; + + let _ = NonZeroUsize::try_from(self.smart_contract_size) + .map_err(|_| "TransactionParameters::smart_contract_size exceeds usize::MAX")?; + + Ok(TransactionParameters { + max_instructions: self.max_instructions, + smart_contract_size: self.smart_contract_size, + }) + } + } + + impl SmartContractParameterCandidate { + fn validate(self) -> Result { + Ok(match self { + Self::Fuel(fuel) => SmartContractParameter::Fuel(fuel), + Self::Memory(memory) => { + NonZeroUsize::try_from(memory) + .map_err(|_| "SmartContractParameter::Memory exceeds usize::MAX")?; + SmartContractParameter::Memory(memory) + } + }) + } + } + + impl SmartContractParametersCandidate { + fn validate(self) -> Result { + let _ = NonZeroUsize::try_from(self.memory) + .map_err(|_| "SmartContractParameters::memory exceeds usize::MAX")?; + + Ok(SmartContractParameters { + fuel: self.fuel, + memory: self.memory, + }) + } + } + + impl Decode for BlockParameter { + fn decode(input: &mut I) -> Result { + BlockParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for BlockParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + BlockParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for BlockParameters { + fn decode(input: &mut I) -> Result { + BlockParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for BlockParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + BlockParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for TransactionParameter { + fn decode(input: &mut I) -> Result { + TransactionParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for TransactionParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + TransactionParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for TransactionParameters { + fn decode(input: &mut I) -> Result { + TransactionParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + + impl<'de> Deserialize<'de> for TransactionParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + TransactionParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for SmartContractParameter { + fn decode(input: &mut I) -> Result { + SmartContractParameterCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + impl<'de> Deserialize<'de> for SmartContractParameter { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + SmartContractParameterCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } + + impl Decode for SmartContractParameters { + fn decode(input: &mut I) -> Result { + SmartContractParametersCandidate::decode(input)? + .validate() + .map_err(Into::into) + } + } + impl<'de> Deserialize<'de> for SmartContractParameters { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; + + SmartContractParametersCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) + } + } +} +pub mod prelude { + //! Prelude: re-export of most commonly used traits, structs and macros in this crate. + + pub use super::{Parameter, Parameters, SmartContractParameters, TransactionParameters}; +} diff --git a/data_model/src/peer.rs b/data_model/src/peer.rs index ffacb39dd98..72ab6b9a8e8 100644 --- a/data_model/src/peer.rs +++ b/data_model/src/peer.rs @@ -9,18 +9,19 @@ use core::{ }; use derive_more::Display; -use iroha_data_model_derive::{model, IdEqOrdHash}; +use iroha_data_model_derive::model; use iroha_primitives::addr::SocketAddr; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; pub use self::model::*; -use crate::{Identifiable, PublicKey, Registered}; +use crate::{PublicKey, Registered}; #[model] mod model { use getset::Getters; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; use super::*; diff --git a/data_model/src/permission.rs b/data_model/src/permission.rs index 253c1784d37..04bab19588e 100644 --- a/data_model/src/permission.rs +++ b/data_model/src/permission.rs @@ -5,50 +5,21 @@ use alloc::{collections::BTreeSet, format, string::String, vec::Vec}; use std::collections::BTreeSet; use iroha_data_model_derive::model; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; +use iroha_primitives::json::JsonString; +use iroha_schema::{Ident, IntoSchema}; pub use self::model::*; -use crate::name::Name; -/// Collection of [`Token`]s +/// Collection of [`Permission`]s pub type Permissions = BTreeSet; -use super::*; - #[model] mod model { - use super::*; + use derive_more::Display; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; - /// Identifies a [`Permission`]. - /// The executor defines available permission names. - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - Constructor, - FromStr, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[getset(get = "pub")] - #[serde(transparent)] - #[repr(transparent)] - #[ffi_type(opaque)] - pub struct PermissionId { - /// Should be unique. - pub name: Name, - } + use super::*; /// Stored proof of the account having a permission for a certain action. #[derive( @@ -64,32 +35,34 @@ mod model { Serialize, IntoSchema, Display, - Getters, )] #[ffi_type] - #[display(fmt = "PERMISSION `{id}` = `{payload}`")] - #[getset(get = "pub")] + #[display(fmt = "{name}({payload})")] pub struct Permission { /// Refers to a type defined in [`crate::executor::ExecutorDataModel`]. - pub id: PermissionId, + pub name: Ident, /// Payload containing actual value. /// /// It is JSON-encoded, and its structure must correspond to the structure of /// the type defined in [`crate::executor::ExecutorDataModel`]. - #[getset(skip)] pub payload: JsonString, } } impl Permission { /// Constructor - pub fn new(id: PermissionId, payload: impl Into) -> Self { + pub fn new(name: Ident, payload: impl Into) -> Self { Self { - id, + name, payload: payload.into(), } } + /// Refers to a type defined in [`crate::executor::ExecutorDataModel`]. + pub fn name(&self) -> &str { + &self.name + } + /// Getter // TODO: derive with getset once FFI impl is fixed pub fn payload(&self) -> &JsonString { @@ -99,5 +72,5 @@ impl Permission { pub mod prelude { //! The prelude re-exports most commonly used traits, structs and macros from this crate. - pub use super::{Permission, PermissionId}; + pub use super::Permission; } diff --git a/data_model/src/query/cursor.rs b/data_model/src/query/cursor.rs index 3deaba04750..9f4b9ac6aad 100644 --- a/data_model/src/query/cursor.rs +++ b/data_model/src/query/cursor.rs @@ -16,11 +16,13 @@ use parity_scale_codec::{Decode, Encode, Input}; use serde::Serialize; pub use self::model::*; -use super::QueryId; -const QUERY_ID: &str = "query_id"; +const QUERY_ID: &str = "query"; const CURSOR: &str = "cursor"; +/// Unique id of a query +pub type QueryId = String; + #[model] mod model { use super::*; @@ -31,15 +33,15 @@ mod model { pub struct ForwardCursor { /// Unique ID of query. When provided in a query the query will look up if there /// is was already a query with a matching ID and resume returning result batches - pub query_id: Option, + pub query: Option, /// Pointer to the next element in the result set pub cursor: Option, } impl ForwardCursor { /// Create a new cursor. - pub const fn new(query_id: Option, cursor: Option) -> Self { - Self { query_id, cursor } + pub const fn new(query: Option, cursor: Option) -> Self { + Self { query, cursor } } } } @@ -51,7 +53,7 @@ mod candidate { #[derive(Decode, Deserialize)] struct ForwardCursorCandidate { - query_id: Option, + query: Option, cursor: Option, } @@ -62,9 +64,9 @@ mod candidate { { let candidate = ForwardCursorCandidate::deserialize(deserializer)?; - if let Some(query_id) = candidate.query_id { + if let Some(query_id) = candidate.query { Ok(ForwardCursor { - query_id: Some(query_id), + query: Some(query_id), cursor: candidate.cursor, }) } else if candidate.cursor.is_some() { @@ -79,9 +81,9 @@ mod candidate { fn decode(input: &mut I) -> Result { let candidate = ForwardCursorCandidate::decode(input)?; - if let Some(query_id) = candidate.query_id { + if let Some(query_id) = candidate.query { Ok(ForwardCursor { - query_id: Some(query_id), + query: Some(query_id), cursor: candidate.cursor, }) } else if candidate.cursor.is_some() { @@ -95,7 +97,7 @@ mod candidate { impl From for Vec<(&'static str, QueryId)> { fn from(cursor: ForwardCursor) -> Self { - match (cursor.query_id, cursor.cursor) { + match (cursor.query, cursor.cursor) { (Some(query_id), Some(cursor)) => { vec![(QUERY_ID, query_id), (CURSOR, cursor.to_string())] } diff --git a/data_model/src/query/mod.rs b/data_model/src/query/mod.rs index 92ec08b6750..4520069e0b3 100644 --- a/data_model/src/query/mod.rs +++ b/data_model/src/query/mod.rs @@ -15,7 +15,7 @@ pub use cursor::ForwardCursor; use derive_more::{Constructor, Display}; use iroha_crypto::{PublicKey, SignatureOf}; use iroha_data_model_derive::{model, EnumRef}; -use iroha_primitives::{numeric::Numeric, small::SmallVec}; +use iroha_primitives::{json::JsonString, numeric::Numeric, small::SmallVec}; use iroha_schema::IntoSchema; use iroha_version::prelude::*; use nonzero_ext::nonzero; @@ -33,7 +33,6 @@ use crate::{ account::{Account, AccountId}, block::{BlockHeader, SignedBlock}, events::TriggeringEventFilterBox, - metadata::MetadataValueBox, seal, transaction::{CommittedTransaction, SignedTransaction, TransactionPayload}, IdBox, Identifiable, IdentifiableBox, @@ -44,8 +43,6 @@ pub mod pagination; pub mod predicate; pub mod sorting; -const FETCH_SIZE: &str = "fetch_size"; - /// Default value for `fetch_size` parameter in queries. pub const DEFAULT_FETCH_SIZE: NonZeroU32 = nonzero!(10_u32); @@ -74,19 +71,6 @@ pub struct FetchSize { pub fetch_size: Option, } -impl FetchSize { - /// Converts self to iterator of tuples to be used in queries. - /// - /// The length of the output iterator is not constant and has either 0 or 1 value. - pub fn into_query_parameters( - self, - ) -> impl IntoIterator + Clone { - self.fetch_size - .map(|fetch_size| (FETCH_SIZE, fetch_size)) - .into_iter() - } -} - macro_rules! queries { ($($($meta:meta)* $item:item)+) => { pub use self::model::*; @@ -106,9 +90,6 @@ macro_rules! queries { }; } -/// Unique id of a query. -pub type QueryId = String; - /// Trait for typesafe query output pub trait Query: Into + seal::Sealed { /// Output type of query @@ -137,7 +118,7 @@ mod model { use strum::EnumDiscriminants; use super::*; - use crate::{block::SignedBlock, permission::PermissionId}; + use crate::block::SignedBlock; /// Sized container for all possible Queries. #[allow(clippy::enum_variant_names)] @@ -231,7 +212,8 @@ mod model { Identifiable(IdentifiableBox), Transaction(TransactionQueryOutput), Permission(crate::permission::Permission), - LimitedMetadata(MetadataValueBox), + Parameters(crate::parameter::Parameters), + Metadata(JsonString), Numeric(Numeric), BlockHeader(BlockHeader), Block(crate::block::SignedBlock), @@ -240,7 +222,7 @@ mod model { Vec( #[skip_from] #[skip_try_from] - Vec, + Vec, ), } @@ -401,7 +383,7 @@ impl_queries! { FindPermissionsByAccountId => Vec, FindAllAccounts => Vec, FindAccountById => crate::account::Account, - FindAccountKeyValueByIdAndKey => MetadataValueBox, + FindAccountKeyValueByIdAndKey => JsonString, FindAccountsByDomainId => Vec, FindAccountsWithAsset => Vec, FindAllAssets => Vec, @@ -415,16 +397,16 @@ impl_queries! { FindAssetsByDomainIdAndAssetDefinitionId => Vec, FindAssetQuantityById => Numeric, FindTotalAssetQuantityByAssetDefinitionId => Numeric, - FindAssetKeyValueByIdAndKey => MetadataValueBox, - FindAssetDefinitionKeyValueByIdAndKey => MetadataValueBox, + FindAssetKeyValueByIdAndKey => JsonString, + FindAssetDefinitionKeyValueByIdAndKey => JsonString, FindAllDomains => Vec, FindDomainById => crate::domain::Domain, - FindDomainKeyValueByIdAndKey => MetadataValueBox, + FindDomainKeyValueByIdAndKey => JsonString, FindAllPeers => Vec, - FindAllParameters => Vec, + FindAllParameters => crate::parameter::Parameters, FindAllActiveTriggerIds => Vec, FindTriggerById => crate::trigger::Trigger, - FindTriggerKeyValueByIdAndKey => MetadataValueBox, + FindTriggerKeyValueByIdAndKey => JsonString, FindTriggersByAuthorityId => Vec, FindTriggersByAuthorityDomainId => Vec, FindAllTransactions => Vec, @@ -448,17 +430,18 @@ impl core::fmt::Display for QueryOutputBox { // TODO: Maybe derive fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { - QueryOutputBox::Id(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Identifiable(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Transaction(_) => write!(f, "TransactionQueryOutput"), - QueryOutputBox::Permission(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Block(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::BlockHeader(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::Numeric(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::LimitedMetadata(v) => core::fmt::Display::fmt(&v, f), - QueryOutputBox::ExecutorDataModel(v) => core::fmt::Display::fmt(&v, f), - - QueryOutputBox::Vec(v) => { + Self::Id(v) => core::fmt::Display::fmt(&v, f), + Self::Identifiable(v) => core::fmt::Display::fmt(&v, f), + Self::Transaction(_) => write!(f, "TransactionQueryOutput"), + Self::Permission(v) => core::fmt::Display::fmt(&v, f), + Self::Parameters(v) => core::fmt::Debug::fmt(&v, f), + Self::Block(v) => core::fmt::Display::fmt(&v, f), + Self::BlockHeader(v) => core::fmt::Display::fmt(&v, f), + Self::Numeric(v) => core::fmt::Display::fmt(&v, f), + Self::Metadata(v) => core::fmt::Display::fmt(&v, f), + Self::ExecutorDataModel(v) => core::fmt::Display::fmt(&v, f), + + Self::Vec(v) => { // TODO: Remove so we can derive. let list_of_display: Vec<_> = v.iter().map(ToString::to_string).collect(); // this prints with quotation marks, which is fine 90% @@ -487,7 +470,7 @@ macro_rules! from_and_try_from_value_idbox { impl From<$ty> for QueryOutputBox { fn from(id: $ty) -> Self { - QueryOutputBox::Id(IdBox::$variant(id)) + Self::Id(IdBox::$variant(id)) } })+ }; @@ -509,7 +492,7 @@ macro_rules! from_and_try_from_value_identifiable { impl From<$ty> for QueryOutputBox { fn from(id: $ty) -> Self { - QueryOutputBox::Identifiable(IdentifiableBox::$variant(id)) + Self::Identifiable(IdentifiableBox::$variant(id)) } } )+ }; @@ -523,7 +506,6 @@ from_and_try_from_value_idbox!( AssetDefinitionId(crate::asset::AssetDefinitionId), TriggerId(crate::trigger::TriggerId), RoleId(crate::role::RoleId), - ParameterId(crate::parameter::ParameterId), // TODO: Should we wrap String with new type in order to convert like here? //from_and_try_from_value_idbox!((DomainName(Name), ErrorValueTryFromDomainName),); ); @@ -540,12 +522,11 @@ from_and_try_from_value_identifiable!( Asset(crate::asset::Asset), Trigger(crate::trigger::Trigger), Role(crate::role::Role), - Parameter(crate::parameter::Parameter), ); impl> From> for QueryOutputBox { - fn from(values: Vec) -> QueryOutputBox { - QueryOutputBox::Vec(values.into_iter().map(Into::into).collect()) + fn from(values: Vec) -> Self { + Self::Vec(values.into_iter().map(Into::into).collect()) } } @@ -692,7 +673,7 @@ pub mod account { use derive_more::Display; use parity_scale_codec::Encode; - use super::{MetadataValueBox, Query, QueryType}; + use super::{JsonString, Query, QueryType}; use crate::prelude::*; queries! { @@ -729,25 +710,25 @@ pub mod account { /// [`FindAccountsByDomainId`] Iroha Query gets [`Domain`]s id as input and /// finds all [`Account`]s under this [`Domain`]. #[derive(Display)] - #[display(fmt = "Find accounts under `{domain_id}` domain")] + #[display(fmt = "Find accounts under `{domain}` domain")] #[repr(transparent)] // SAFETY: `FindAccountsByDomainId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindAccountsByDomainId { /// `Id` of the domain under which accounts should be found. - pub domain_id: DomainId, + pub domain: DomainId, } /// [`FindAccountsWithAsset`] Iroha Query gets [`AssetDefinition`]s id as input and /// finds all [`Account`]s storing [`Asset`] with such definition. #[derive(Display)] - #[display(fmt = "Find accounts with `{asset_definition_id}` asset")] + #[display(fmt = "Find accounts with `{asset_definition}` asset")] #[repr(transparent)] // SAFETY: `FindAccountsWithAsset` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindAccountsWithAsset { /// `Id` of the definition of the asset which should be stored in founded accounts. - pub asset_definition_id: AssetDefinitionId, + pub asset_definition: AssetDefinitionId, } } @@ -772,7 +753,7 @@ pub mod asset { use iroha_primitives::numeric::Numeric; use parity_scale_codec::Encode; - use super::{MetadataValueBox, Query, QueryType}; + use super::{JsonString, Query, QueryType}; use crate::prelude::*; queries! { @@ -826,54 +807,54 @@ pub mod asset { /// [`FindAssetsByAccountId`] Iroha Query gets [`AccountId`] as input and find all [`Asset`]s /// owned by the [`Account`] in Iroha Peer. #[derive(Display)] - #[display(fmt = "Find assets owned by the `{account_id}` account")] + #[display(fmt = "Find assets owned by the `{account}` account")] #[repr(transparent)] // SAFETY: `FindAssetsByAccountId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindAssetsByAccountId { /// [`AccountId`] under which assets should be found. - pub account_id: AccountId, + pub account: AccountId, } /// [`FindAssetsByAssetDefinitionId`] Iroha Query gets [`AssetDefinitionId`] as input and /// finds all [`Asset`]s with this [`AssetDefinition`] in Iroha Peer. #[derive(Display)] - #[display(fmt = "Find assets with `{asset_definition_id}` asset definition")] + #[display(fmt = "Find assets with `{asset_definition}` asset definition")] #[repr(transparent)] // SAFETY: `FindAssetsByAssetDefinitionId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindAssetsByAssetDefinitionId { /// [`AssetDefinitionId`] with type of [`Asset`]s should be found. - pub asset_definition_id: AssetDefinitionId, + pub asset_definition: AssetDefinitionId, } /// [`FindAssetsByDomainId`] Iroha Query gets [`Domain`]s id as input and /// finds all [`Asset`]s under this [`Domain`] in Iroha [`Peer`]. #[derive(Display)] - #[display(fmt = "Find assets under the `{domain_id}` domain")] + #[display(fmt = "Find assets under the `{domain}` domain")] #[repr(transparent)] // SAFETY: `FindAssetsByDomainId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindAssetsByDomainId { /// `Id` of the domain under which assets should be found. - pub domain_id: DomainId, + pub domain: DomainId, } /// [`FindAssetsByDomainIdAndAssetDefinitionId`] Iroha Query gets [`DomainId`] and /// [`AssetDefinitionId`] as inputs and finds [`Asset`]s under the [`Domain`] /// with this [`AssetDefinition`] in Iroha [`Peer`]. #[derive(Display)] - #[display(fmt = "Find assets under the `{domain_id}` domain with `{asset_definition_id}` asset definition")] + #[display(fmt = "Find assets under the `{domain}` domain with `{asset_definition}` asset definition")] #[ffi_type] pub struct FindAssetsByDomainIdAndAssetDefinitionId { /// `Id` of the domain under which assets should be found. - pub domain_id: DomainId, + pub domain: DomainId, /// [`AssetDefinitionId`] assets of which type should be found. - pub asset_definition_id: AssetDefinitionId, + pub asset_definition: AssetDefinitionId, } /// [`FindAssetQuantityById`] Iroha Query gets [`AssetId`] as input and finds [`Asset::quantity`] - /// parameter's value if [`Asset`] is presented in Iroha Peer. + /// value if [`Asset`] is presented in Iroha Peer. #[derive(Display)] #[display(fmt = "Find quantity of the `{id}` asset")] #[repr(transparent)] @@ -945,7 +926,7 @@ pub mod domain { use derive_more::Display; use parity_scale_codec::Encode; - use super::{MetadataValueBox, Query, QueryType}; + use super::{JsonString, Query, QueryType}; use crate::prelude::*; queries! { @@ -1046,7 +1027,7 @@ pub mod trigger { use derive_more::Display; use parity_scale_codec::Encode; - use super::{MetadataValueBox, Query, QueryType}; + use super::{JsonString, Query, QueryType}; use crate::{ account::AccountId, domain::prelude::*, @@ -1088,24 +1069,24 @@ pub mod trigger { /// Find all triggers executable on behalf of the given account. #[derive(Display)] - #[display(fmt = "Find triggers executable on behalf of the `{account_id}` account")] + #[display(fmt = "Find triggers executable on behalf of the `{account}` account")] #[repr(transparent)] // SAFETY: `FindTriggersByAuthorityId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindTriggersByAuthorityId { /// [`AccountId`] specifies the authority behind the trigger execution. - pub account_id: AccountId, + pub account: AccountId, } /// Find all triggers whose authority belongs to the given domain. #[derive(Display)] - #[display(fmt = "Find triggers with authority under `{domain_id}` domain")] + #[display(fmt = "Find triggers with authority under `{domain}` domain")] #[repr(transparent)] // SAFETY: `FindTriggersByAuthorityDomainId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindTriggersByAuthorityDomainId { /// [`DomainId`] specifies the domain in which to search for triggers. - pub domain_id: DomainId, + pub domain: DomainId, } } @@ -1143,13 +1124,13 @@ pub mod transaction { /// [`FindTransactionsByAccountId`] Iroha Query finds all transactions included in a blockchain /// for the account #[derive(Display)] - #[display(fmt = "Find all transactions for `{account_id}` account")] + #[display(fmt = "Find all transactions for `{account}` account")] #[repr(transparent)] // SAFETY: `FindTransactionsByAccountId` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] pub struct FindTransactionsByAccountId { /// Signer's [`AccountId`] under which transactions should be found. - pub account_id: AccountId, + pub account: AccountId, } /// [`FindTransactionByHash`] Iroha Query finds a transaction (if any) @@ -1556,9 +1537,7 @@ pub mod error { /// Role with id `{0}` not found Role(RoleId), /// Failed to find [`Permission`] by id. - Permission(PermissionId), - /// Parameter with id `{0}` not found - Parameter(ParameterId), + Permission(Permission), /// Failed to find public key: `{0}` PublicKey(PublicKey), } @@ -1574,6 +1553,6 @@ pub mod prelude { account::prelude::*, asset::prelude::*, block::prelude::*, domain::prelude::*, executor::prelude::*, peer::prelude::*, permission::prelude::*, predicate::PredicateTrait, role::prelude::*, transaction::prelude::*, trigger::prelude::*, FetchSize, QueryBox, - QueryId, TransactionQueryOutput, + TransactionQueryOutput, }; } diff --git a/data_model/src/query/pagination.rs b/data_model/src/query/pagination.rs index 9b5fea05866..61943e7018f 100644 --- a/data_model/src/query/pagination.rs +++ b/data_model/src/query/pagination.rs @@ -19,9 +19,6 @@ use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -const PAGINATION_START: &str = "start"; -const PAGINATION_LIMIT: &str = "limit"; - /// Structure for pagination requests #[derive( Debug, @@ -50,25 +47,6 @@ pub struct Pagination { pub start: Option, } -impl Pagination { - /// Converts self to iterator of tuples to be used in queries - /// - /// The length of the output iterator is not constant and it's in (0..3) - pub fn into_query_parameters( - self, - ) -> impl IntoIterator + Clone { - let result_vec = match (self.start, self.limit) { - (Some(start), Some(limit)) => { - vec![(PAGINATION_LIMIT, limit.into()), (PAGINATION_START, start)] - } - (Some(start), None) => vec![(PAGINATION_START, start)], - (None, Some(limit)) => vec![(PAGINATION_LIMIT, limit.into())], - (None, None) => Vec::new(), - }; - result_vec.into_iter() - } -} - pub mod prelude { //! Prelude: re-export most commonly used traits, structs and macros from this module. pub use super::*; diff --git a/data_model/src/query/predicate.rs b/data_model/src/query/predicate.rs index 87b4e80917e..3846840af0b 100644 --- a/data_model/src/query/predicate.rs +++ b/data_model/src/query/predicate.rs @@ -473,8 +473,9 @@ impl Default for PredicateBox { #[cfg(test)] pub mod test { + use iroha_primitives::json::JsonString; + use super::{value, PredicateBox, PredicateSymbol, PredicateTrait as _}; - use crate::metadata::MetadataValueBox; #[test] fn boolean_predicate_symbol_conformity() { @@ -485,8 +486,8 @@ pub mod test { fn pass() { let t = PredicateBox::new(value::QueryOutputPredicate::Pass); let f = t.clone().negate(); - let v_t = MetadataValueBox::from(true).into(); - let v_f = MetadataValueBox::from(false).into(); + let v_t = JsonString::from(true).into(); + let v_f = JsonString::from(false).into(); println!("t: {t:?}, f: {f:?}"); assert!(t.applies(&v_t)); @@ -499,7 +500,7 @@ pub mod test { fn truth_table() { let t = PredicateBox::new(value::QueryOutputPredicate::Pass); let f = t.clone().negate(); - let v = MetadataValueBox::from(true).into(); + let v = JsonString::from(true).into(); assert!(!PredicateBox::and(t.clone(), f.clone()).applies(&v)); assert!(PredicateBox::and(t.clone(), t.clone()).applies(&v)); @@ -601,8 +602,8 @@ pub mod string { IdBox::PeerId(id) => self.applies(&id.to_string()), IdBox::TriggerId(id) => self.applies(&id.to_string()), IdBox::RoleId(id) => self.applies(&id.to_string()), - IdBox::PermissionId(id) => self.applies(&id.to_string()), - IdBox::ParameterId(id) => self.applies(&id.to_string()), + IdBox::Permission(id) => self.applies(&id.to_string()), + IdBox::CustomParameterId(id) => self.applies(&id.to_string()), } } } @@ -1162,7 +1163,7 @@ pub mod value { QueryOutputPredicate::Display(pred) => pred.applies(&input.to_string()), QueryOutputPredicate::TimeStamp(pred) => match input { QueryOutputBox::Block(block) => { - pred.applies(block.header().timestamp().as_millis()) + pred.applies(block.header().creation_time().as_millis()) } _ => false, }, @@ -1231,7 +1232,7 @@ pub mod value { use crate::{ account::{Account, AccountId}, domain::{Domain, DomainId}, - metadata::{Metadata, MetadataValueBox}, + metadata::Metadata, peer::{Peer, PeerId}, }; @@ -1239,6 +1240,7 @@ pub mod value { fn typing() { let alice: PublicKey = KeyPair::random().into_parts().0; let alice_id: AccountId = format!("{alice}@wonderland").parse().expect("Valid"); + let alice_json: JsonString = JsonString::new(alice_id.to_string()); { let pred = QueryOutputPredicate::Identifiable(string::StringPredicate::is( &alice_id.to_string(), @@ -1250,18 +1252,18 @@ pub mod value { Account::new(alice_id.clone()) ))) ); - assert!(!pred.applies(&MetadataValueBox::from(alice_id.to_string()).into())); + assert!(!pred.applies(&alice_json.clone().into())); assert!(!pred.applies(&QueryOutputBox::Vec(Vec::new()))); } { let pred = QueryOutputPredicate::Pass; println!("{pred:?}"); - assert!(pred.applies(&MetadataValueBox::from(alice_id.to_string()).into())); + assert!(pred.applies(&alice_json.clone().into())); } { let pred = QueryOutputPredicate::TimeStamp(numerical::SemiInterval::starting(0)); println!("{pred:?}"); - assert!(!pred.applies(&MetadataValueBox::from(alice_id.to_string()).into())); + assert!(!pred.applies(&alice_json.clone().into())); } { let pred = QueryOutputPredicate::Display(string::StringPredicate::is( @@ -1281,7 +1283,8 @@ pub mod value { let pred = QueryOutputPredicate::Numerical(numerical::SemiRange::Numeric( (numeric!(0), numeric!(42)).into(), )); - assert!(!pred.applies(&MetadataValueBox::from(alice_id.to_string()).into())); + + assert!(!pred.applies(&alice_json.into())); assert!(pred.applies(&numeric!(41).into())); } diff --git a/data_model/src/query/sorting.rs b/data_model/src/query/sorting.rs index 386e82a174b..82b2aeb595b 100644 --- a/data_model/src/query/sorting.rs +++ b/data_model/src/query/sorting.rs @@ -16,8 +16,6 @@ use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::{name::Name, prelude::*}; -const SORT_BY_KEY: &str = "sort_by_metadata_key"; - #[model] mod model { use super::*; @@ -41,17 +39,6 @@ impl Sorting { } } -impl Sorting { - /// Converts self to iterator of tuples to be used in queries - /// - /// The length of the output iterator is not constant and has either 0 or 1 value - pub fn into_query_parameters(self) -> impl IntoIterator + Clone { - self.sort_by_metadata_key - .map(|key| (SORT_BY_KEY, key)) - .into_iter() - } -} - pub mod prelude { //! Prelude: re-export most commonly used traits, structs and macros from this module. pub use super::*; diff --git a/data_model/src/role.rs b/data_model/src/role.rs index 45c6b53732a..834ef75e57b 100644 --- a/data_model/src/role.rs +++ b/data_model/src/role.rs @@ -3,21 +3,23 @@ #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use derive_more::{Constructor, Display, FromStr}; -use getset::Getters; -use iroha_data_model_derive::{model, IdEqOrdHash}; -use iroha_schema::IntoSchema; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; +use iroha_data_model_derive::model; pub use self::model::*; use crate::{ permission::{Permission, Permissions}, - Identifiable, Name, Registered, + Name, Registered, }; #[model] mod model { + use derive_more::{Constructor, Display, FromStr}; + use getset::Getters; + use iroha_data_model_derive::IdEqOrdHash; + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + use super::*; /// Identification of a role. diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index 7ced1cab628..6da46f26437 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -23,13 +23,13 @@ pub use self::model::*; use crate::{ account::AccountId, isi::{Instruction, InstructionBox}, - metadata::UnlimitedMetadata, + metadata::Metadata, ChainId, }; #[model] mod model { - use getset::{CopyGetters, Getters}; + use getset::Getters; use super::*; use crate::account::AccountId; @@ -114,34 +114,7 @@ mod model { /// Random value to make different hashes for transactions which occur repeatedly and simultaneously. pub nonce: Option, /// Store for additional information. - pub metadata: UnlimitedMetadata, - } - - /// Container for limits that transactions must obey. - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - CopyGetters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "{max_instruction_number},{max_wasm_size_bytes}_TL")] - #[getset(get_copy = "pub")] - #[ffi_type] - pub struct TransactionLimits { - /// Maximum number of instructions per transaction - pub max_instruction_number: u64, - /// Maximum size of wasm binary - pub max_wasm_size_bytes: u64, + pub metadata: Metadata, } /// Signature of transaction @@ -206,16 +179,6 @@ mod model { } } -impl TransactionLimits { - /// Construct [`Self`] - pub const fn new(max_instruction_number: u64, max_wasm_size_bytes: u64) -> Self { - Self { - max_instruction_number, - max_wasm_size_bytes, - } - } -} - impl FromIterator for Executable { fn from_iter>(iter: T) -> Self { Self::Instructions(iter.into_iter().map(Into::into).collect()) @@ -282,7 +245,7 @@ impl SignedTransaction { /// Return transaction metadata. #[inline] - pub fn metadata(&self) -> &UnlimitedMetadata { + pub fn metadata(&self) -> &Metadata { let SignedTransaction::V1(tx) = self; &tx.payload.metadata } @@ -586,7 +549,6 @@ pub mod error { use InstructionBox::*; let kind = match self.instruction { Burn(_) => "burn", - Fail(_) => "fail", Mint(_) => "mint", Register(_) => "register", Transfer(_) => "transfer", @@ -597,7 +559,6 @@ pub mod error { Revoke(_) => "revoke", ExecuteTrigger(_) => "execute trigger", SetParameter(_) => "set parameter", - NewParameter(_) => "new parameter", Upgrade(_) => "upgrade", Log(_) => "log", Custom(_) => "custom", @@ -656,7 +617,7 @@ mod http { nonce: None, time_to_live_ms: None, instructions: Vec::::new().into(), - metadata: UnlimitedMetadata::new(), + metadata: Metadata::default(), }, } } @@ -723,7 +684,7 @@ mod http { } /// Adds metadata to the `Transaction` - pub fn with_metadata(mut self, metadata: UnlimitedMetadata) -> Self { + pub fn with_metadata(mut self, metadata: Metadata) -> Self { self.payload.metadata = metadata; self } diff --git a/data_model/src/trigger.rs b/data_model/src/trigger.rs index 1170a87c4e2..8be9660bc05 100644 --- a/data_model/src/trigger.rs +++ b/data_model/src/trigger.rs @@ -17,9 +17,7 @@ use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; -use crate::{ - events::prelude::*, metadata::Metadata, transaction::Executable, Identifiable, Name, Registered, -}; +use crate::{events::prelude::*, metadata::Metadata, transaction::Executable, Name, Registered}; #[model] mod model { @@ -189,7 +187,7 @@ pub mod action { // TODO: At this point the authority is meaningless. authority, filter: filter.into(), - metadata: Metadata::new(), + metadata: Metadata::default(), } } diff --git a/data_model/src/visit.rs b/data_model/src/visit.rs index bf0dcfacdf0..cc0e92764f5 100644 --- a/data_model/src/visit.rs +++ b/data_model/src/visit.rs @@ -28,7 +28,6 @@ pub trait Visit { // Visit InstructionBox visit_burn(&BurnBox), - visit_fail(&Fail), visit_grant(&GrantBox), visit_mint(&MintBox), visit_register(&RegisterBox), @@ -40,10 +39,9 @@ pub trait Visit { visit_upgrade(&Upgrade), visit_execute_trigger(&ExecuteTrigger), - visit_new_parameter(&NewParameter), visit_set_parameter(&SetParameter), visit_log(&Log), - visit_custom(&Custom), + visit_custom(&CustomInstruction), // Visit QueryBox visit_find_account_by_id(&FindAccountById), @@ -233,9 +231,6 @@ pub fn visit_instruction( isi: &InstructionBox, ) { match isi { - InstructionBox::NewParameter(variant_value) => { - visitor.visit_new_parameter(authority, variant_value) - } InstructionBox::SetParameter(variant_value) => { visitor.visit_set_parameter(authority, variant_value) } @@ -244,7 +239,6 @@ pub fn visit_instruction( } InstructionBox::Log(variant_value) => visitor.visit_log(authority, variant_value), InstructionBox::Burn(variant_value) => visitor.visit_burn(authority, variant_value), - InstructionBox::Fail(variant_value) => visitor.visit_fail(authority, variant_value), InstructionBox::Grant(variant_value) => visitor.visit_grant(authority, variant_value), InstructionBox::Mint(variant_value) => visitor.visit_mint(authority, variant_value), InstructionBox::Register(variant_value) => visitor.visit_register(authority, variant_value), @@ -428,12 +422,10 @@ leaf_visitors! { visit_mint_trigger_repetitions(&Mint), visit_burn_trigger_repetitions(&Burn), visit_upgrade(&Upgrade), - visit_new_parameter(&NewParameter), visit_set_parameter(&SetParameter), visit_execute_trigger(&ExecuteTrigger), - visit_fail(&Fail), visit_log(&Log), - visit_custom(&Custom), + visit_custom(&CustomInstruction), // Query visitors visit_find_account_by_id(&FindAccountById), diff --git a/default_executor/src/lib.rs b/default_executor/src/lib.rs index a506d4df6dc..9e79fec18a9 100644 --- a/default_executor/src/lib.rs +++ b/default_executor/src/lib.rs @@ -49,7 +49,7 @@ impl Executor { /// If `migrate()` entrypoint fails then the whole `Upgrade` instruction /// will be denied and previous executor will stay unchanged. #[entrypoint] -pub fn migrate(block_height: u64) -> MigrationResult { +fn migrate(block_height: u64) -> MigrationResult { Executor::ensure_genesis(block_height)?; DataModelBuilder::with_default_permissions().build_and_set(); diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index e633ae613e1..20920ee1ab4 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -17,59 +17,49 @@ }, "AccountEvent": { "Enum": [ - { - "tag": "Asset", - "discriminant": 0, - "type": "AssetEvent" - }, { "tag": "Created", - "discriminant": 1, + "discriminant": 0, "type": "Account" }, { "tag": "Deleted", - "discriminant": 2, - "type": "AccountId" - }, - { - "tag": "AuthenticationAdded", - "discriminant": 3, + "discriminant": 1, "type": "AccountId" }, { - "tag": "AuthenticationRemoved", - "discriminant": 4, - "type": "AccountId" + "tag": "Asset", + "discriminant": 2, + "type": "AssetEvent" }, { "tag": "PermissionAdded", - "discriminant": 5, + "discriminant": 3, "type": "AccountPermissionChanged" }, { "tag": "PermissionRemoved", - "discriminant": 6, + "discriminant": 4, "type": "AccountPermissionChanged" }, { - "tag": "RoleRevoked", - "discriminant": 7, + "tag": "RoleGranted", + "discriminant": 5, "type": "AccountRoleChanged" }, { - "tag": "RoleGranted", - "discriminant": 8, + "tag": "RoleRevoked", + "discriminant": 6, "type": "AccountRoleChanged" }, { "tag": "MetadataInserted", - "discriminant": 9, + "discriminant": 7, "type": "MetadataChanged" }, { "tag": "MetadataRemoved", - "discriminant": 10, + "discriminant": 8, "type": "MetadataChanged" } ] @@ -91,48 +81,40 @@ "repr": "u32", "masks": [ { - "name": "AnyAsset", + "name": "Created", "mask": 1 }, { - "name": "Created", + "name": "Deleted", "mask": 2 }, { - "name": "Deleted", + "name": "AnyAsset", "mask": 4 }, { - "name": "AuthenticationAdded", + "name": "PermissionAdded", "mask": 8 }, { - "name": "AuthenticationRemoved", + "name": "PermissionRemoved", "mask": 16 }, { - "name": "PermissionAdded", + "name": "RoleGranted", "mask": 32 }, - { - "name": "PermissionRemoved", - "mask": 64 - }, { "name": "RoleRevoked", - "mask": 128 - }, - { - "name": "RoleGranted", - "mask": 256 + "mask": 64 }, { "name": "MetadataInserted", - "mask": 512 + "mask": 128 }, { "name": "MetadataRemoved", - "mask": 1024 + "mask": 256 } ] } @@ -157,7 +139,7 @@ }, { "name": "permission", - "type": "PermissionId" + "type": "Permission" } ] }, @@ -266,8 +248,8 @@ "type": "AssetDefinitionId" }, { - "name": "value_type", - "type": "AssetValueType" + "name": "type_", + "type": "AssetType" }, { "name": "mintable", @@ -295,34 +277,34 @@ "type": "AssetDefinition" }, { - "tag": "MintabilityChanged", + "tag": "Deleted", "discriminant": 1, "type": "AssetDefinitionId" }, { - "tag": "OwnerChanged", + "tag": "MetadataInserted", "discriminant": 2, - "type": "AssetDefinitionOwnerChanged" + "type": "MetadataChanged" }, { - "tag": "Deleted", + "tag": "MetadataRemoved", "discriminant": 3, - "type": "AssetDefinitionId" + "type": "MetadataChanged" }, { - "tag": "MetadataInserted", + "tag": "MintabilityChanged", "discriminant": 4, - "type": "MetadataChanged" + "type": "AssetDefinitionId" }, { - "tag": "MetadataRemoved", + "tag": "TotalQuantityChanged", "discriminant": 5, - "type": "MetadataChanged" + "type": "AssetDefinitionTotalQuantityChanged" }, { - "tag": "TotalQuantityChanged", + "tag": "OwnerChanged", "discriminant": 6, - "type": "AssetDefinitionTotalQuantityChanged" + "type": "AssetDefinitionOwnerChanged" } ] }, @@ -347,27 +329,27 @@ "mask": 1 }, { - "name": "MintabilityChanged", + "name": "Deleted", "mask": 2 }, { - "name": "OwnerChanged", + "name": "MetadataInserted", "mask": 4 }, { - "name": "Deleted", + "name": "MetadataRemoved", "mask": 8 }, { - "name": "MetadataInserted", + "name": "MintabilityChanged", "mask": 16 }, { - "name": "MetadataRemoved", + "name": "TotalQuantityChanged", "mask": 32 }, { - "name": "TotalQuantityChanged", + "name": "OwnerChanged", "mask": 64 } ] @@ -512,30 +494,30 @@ } ] }, - "AssetValue": { + "AssetType": { "Enum": [ { "tag": "Numeric", "discriminant": 0, - "type": "Numeric" + "type": "NumericSpec" }, { "tag": "Store", - "discriminant": 1, - "type": "Metadata" + "discriminant": 1 } ] }, - "AssetValueType": { + "AssetValue": { "Enum": [ { "tag": "Numeric", "discriminant": 0, - "type": "NumericSpec" + "type": "Numeric" }, { "tag": "Store", - "discriminant": 1 + "discriminant": 1, + "type": "Metadata" } ] }, @@ -592,7 +574,7 @@ "Struct": [ { "name": "height", - "type": "Option" + "type": "Option>" }, { "name": "status", @@ -604,7 +586,7 @@ "Struct": [ { "name": "height", - "type": "u64" + "type": "NonZero" }, { "name": "prev_block_hash", @@ -615,7 +597,7 @@ "type": "HashOf>" }, { - "name": "timestamp_ms", + "name": "creation_time_ms", "type": "u64" }, { @@ -629,6 +611,23 @@ ] }, "BlockMessage": "SignedBlock", + "BlockParameter": { + "Enum": [ + { + "tag": "MaxTransactions", + "discriminant": 0, + "type": "NonZero" + } + ] + }, + "BlockParameters": { + "Struct": [ + { + "name": "max_transactions", + "type": "NonZero" + } + ] + }, "BlockPayload": { "Struct": [ { @@ -775,26 +774,12 @@ { "tag": "Changed", "discriminant": 0, - "type": "ParameterId" - }, - { - "tag": "Created", - "discriminant": 1, - "type": "ParameterId" - }, - { - "tag": "Deleted", - "discriminant": 2, - "type": "ParameterId" + "type": "ParameterChanged" } ] }, "ConfigurationEventFilter": { "Struct": [ - { - "name": "id_matcher", - "type": "Option" - }, { "name": "event_set", "type": "ConfigurationEventSet" @@ -808,14 +793,6 @@ { "name": "Changed", "mask": 1 - }, - { - "name": "Created", - "mask": 2 - }, - { - "name": "Deleted", - "mask": 4 } ] } @@ -839,7 +816,7 @@ } ] }, - "Custom": { + "CustomInstruction": { "Struct": [ { "name": "payload", @@ -847,6 +824,19 @@ } ] }, + "CustomParameter": { + "Struct": [ + { + "name": "id", + "type": "CustomParameterId" + }, + { + "name": "payload", + "type": "JsonString" + } + ] + }, + "CustomParameterId": "Name", "DataEvent": { "Enum": [ { @@ -965,24 +955,24 @@ "DomainEvent": { "Enum": [ { - "tag": "Account", + "tag": "Created", "discriminant": 0, - "type": "AccountEvent" + "type": "Domain" }, { - "tag": "AssetDefinition", + "tag": "Deleted", "discriminant": 1, - "type": "AssetDefinitionEvent" + "type": "DomainId" }, { - "tag": "Created", + "tag": "AssetDefinition", "discriminant": 2, - "type": "Domain" + "type": "AssetDefinitionEvent" }, { - "tag": "Deleted", + "tag": "Account", "discriminant": 3, - "type": "DomainId" + "type": "AccountEvent" }, { "tag": "MetadataInserted", @@ -1018,19 +1008,19 @@ "repr": "u32", "masks": [ { - "name": "AnyAccount", + "name": "Created", "mask": 1 }, { - "name": "AnyAssetDefinition", + "name": "Deleted", "mask": 2 }, { - "name": "Created", + "name": "AnyAssetDefinition", "mask": 4 }, { - "name": "Deleted", + "name": "AnyAccount", "mask": 8 }, { @@ -1204,12 +1194,16 @@ "ExecutorDataModel": { "Struct": [ { - "name": "permissions", - "type": "SortedVec" + "name": "parameters", + "type": "SortedMap" }, { - "name": "custom_instruction", - "type": "Option" + "name": "instructions", + "type": "SortedVec" + }, + { + "name": "permissions", + "type": "SortedVec" }, { "name": "schema", @@ -1253,14 +1247,6 @@ } ] }, - "Fail": { - "Struct": [ - { - "name": "message", - "type": "String" - } - ] - }, "FetchSize": { "Struct": [ { @@ -1292,7 +1278,7 @@ "FindAccountsByDomainId": { "Struct": [ { - "name": "domain_id", + "name": "domain", "type": "DomainId" } ] @@ -1300,7 +1286,7 @@ "FindAccountsWithAsset": { "Struct": [ { - "name": "asset_definition_id", + "name": "asset_definition", "type": "AssetDefinitionId" } ] @@ -1368,7 +1354,7 @@ "FindAssetsByAccountId": { "Struct": [ { - "name": "account_id", + "name": "account", "type": "AccountId" } ] @@ -1376,7 +1362,7 @@ "FindAssetsByAssetDefinitionId": { "Struct": [ { - "name": "asset_definition_id", + "name": "asset_definition", "type": "AssetDefinitionId" } ] @@ -1384,7 +1370,7 @@ "FindAssetsByDomainId": { "Struct": [ { - "name": "domain_id", + "name": "domain", "type": "DomainId" } ] @@ -1392,11 +1378,11 @@ "FindAssetsByDomainIdAndAssetDefinitionId": { "Struct": [ { - "name": "domain_id", + "name": "domain", "type": "DomainId" }, { - "name": "asset_definition_id", + "name": "asset_definition", "type": "AssetDefinitionId" } ] @@ -1492,16 +1478,11 @@ { "tag": "Permission", "discriminant": 10, - "type": "PermissionId" - }, - { - "tag": "Parameter", - "discriminant": 11, - "type": "ParameterId" + "type": "Permission" }, { "tag": "PublicKey", - "discriminant": 12, + "discriminant": 11, "type": "PublicKey" } ] @@ -1550,7 +1531,7 @@ "FindTransactionsByAccountId": { "Struct": [ { - "name": "account_id", + "name": "account", "type": "AccountId" } ] @@ -1578,7 +1559,7 @@ "FindTriggersByAuthorityDomainId": { "Struct": [ { - "name": "domain_id", + "name": "domain", "type": "DomainId" } ] @@ -1586,7 +1567,7 @@ "FindTriggersByAuthorityId": { "Struct": [ { - "name": "account_id", + "name": "account", "type": "AccountId" } ] @@ -1594,7 +1575,7 @@ "ForwardCursor": { "Struct": [ { - "name": "query_id", + "name": "query", "type": "Option" }, { @@ -1724,14 +1705,14 @@ "type": "RoleId" }, { - "tag": "PermissionId", + "tag": "Permission", "discriminant": 7, - "type": "PermissionId" + "type": "Permission" }, { - "tag": "ParameterId", + "tag": "CustomParameterId", "discriminant": 8, - "type": "ParameterId" + "type": "CustomParameterId" } ] }, @@ -1793,9 +1774,9 @@ "type": "Role" }, { - "tag": "Parameter", + "tag": "CustomParameter", "discriminant": 11, - "type": "Parameter" + "type": "CustomParameter" } ] }, @@ -1856,30 +1837,20 @@ "discriminant": 10, "type": "SetParameter" }, - { - "tag": "NewParameter", - "discriminant": 11, - "type": "NewParameter" - }, { "tag": "Upgrade", - "discriminant": 12, + "discriminant": 11, "type": "Upgrade" }, { "tag": "Log", - "discriminant": 13, + "discriminant": 12, "type": "Log" }, { "tag": "Custom", - "discriminant": 14, - "type": "Custom" - }, - { - "tag": "Fail", - "discriminant": 15, - "type": "Fail" + "discriminant": 13, + "type": "CustomInstruction" } ] }, @@ -1939,24 +1910,14 @@ "discriminant": 6, "type": "MathError" }, - { - "tag": "Metadata", - "discriminant": 7, - "type": "MetadataError" - }, - { - "tag": "Fail", - "discriminant": 8, - "type": "String" - }, { "tag": "InvalidParameter", - "discriminant": 9, + "discriminant": 7, "type": "InvalidParameterError" }, { "tag": "InvariantViolation", - "discriminant": 10, + "discriminant": 8, "type": "String" } ] @@ -2019,25 +1980,17 @@ "tag": "SetParameter", "discriminant": 10 }, - { - "tag": "NewParameter", - "discriminant": 11 - }, { "tag": "Upgrade", - "discriminant": 12 + "discriminant": 11 }, { "tag": "Log", - "discriminant": 13 + "discriminant": 12 }, { "tag": "Custom", - "discriminant": 14 - }, - { - "tag": "Fail", - "discriminant": 15 + "discriminant": 13 } ] }, @@ -2062,18 +2015,6 @@ "Ipv4Addr": "Array", "Ipv6Addr": "Array", "JsonString": "String", - "LengthLimits": { - "Struct": [ - { - "name": "min", - "type": "u32" - }, - { - "name": "max", - "type": "u32" - } - ] - }, "Level": { "Enum": [ { @@ -2098,18 +2039,6 @@ } ] }, - "Limits": { - "Struct": [ - { - "name": "capacity", - "type": "u32" - }, - { - "name": "max_entry_len", - "type": "u32" - } - ] - }, "Log": { "Struct": [ { @@ -2158,7 +2087,7 @@ "MerkleTree": { "Vec": "HashOf" }, - "Metadata": "SortedMap", + "Metadata": "SortedMap", "MetadataChanged": { "Struct": [ { @@ -2171,7 +2100,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -2187,7 +2116,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -2203,7 +2132,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -2219,7 +2148,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -2235,74 +2164,7 @@ }, { "name": "value", - "type": "MetadataValueBox" - } - ] - }, - "MetadataError": { - "Enum": [ - { - "tag": "EmptyPath", - "discriminant": 0 - }, - { - "tag": "EntryTooBig", - "discriminant": 1, - "type": "SizeError" - }, - { - "tag": "MaxCapacity", - "discriminant": 2, - "type": "SizeError" - }, - { - "tag": "MissingSegment", - "discriminant": 3, - "type": "Name" - }, - { - "tag": "InvalidSegment", - "discriminant": 4, - "type": "Name" - } - ] - }, - "MetadataValueBox": { - "Enum": [ - { - "tag": "Bool", - "discriminant": 0, - "type": "bool" - }, - { - "tag": "String", - "discriminant": 1, - "type": "String" - }, - { - "tag": "Name", - "discriminant": 2, - "type": "Name" - }, - { - "tag": "Bytes", - "discriminant": 3, - "type": "Vec" - }, - { - "tag": "Numeric", - "discriminant": 4, - "type": "Numeric" - }, - { - "tag": "LimitedMetadata", - "discriminant": 5, - "type": "Metadata" - }, - { - "tag": "Vec", - "discriminant": 6, - "type": "Vec" + "type": "JsonString" } ] }, @@ -2372,15 +2234,15 @@ } ] }, - "Mismatch": { + "Mismatch": { "Struct": [ { "name": "expected", - "type": "AssetValueType" + "type": "AssetType" }, { "name": "actual", - "type": "AssetValueType" + "type": "AssetType" } ] }, @@ -2404,8 +2266,8 @@ "type": "AssetDefinitionId" }, { - "name": "value_type", - "type": "AssetValueType" + "name": "type_", + "type": "AssetType" }, { "name": "mintable", @@ -2437,14 +2299,6 @@ } ] }, - "NewParameter": { - "Struct": [ - { - "name": "parameter", - "type": "Parameter" - } - ] - }, "NewRole": { "Struct": [ { @@ -2512,11 +2366,8 @@ "Option>": { "Option": "NonZero" }, - "Option>": { - "Option": "Option" - }, - "Option": { - "Option": "ParameterId" + "Option>>": { + "Option": "Option>" }, "Option": { "Option": "PeerId" @@ -2545,9 +2396,6 @@ "Option": { "Option": "u32" }, - "Option": { - "Option": "u64" - }, "Pagination": { "Struct": [ { @@ -2561,46 +2409,76 @@ ] }, "Parameter": { - "Struct": [ + "Enum": [ { - "name": "id", - "type": "ParameterId" + "tag": "Sumeragi", + "discriminant": 0, + "type": "SumeragiParameter" + }, + { + "tag": "Block", + "discriminant": 1, + "type": "BlockParameter" + }, + { + "tag": "Transaction", + "discriminant": 2, + "type": "TransactionParameter" + }, + { + "tag": "SmartContract", + "discriminant": 3, + "type": "SmartContractParameter" + }, + { + "tag": "Executor", + "discriminant": 4, + "type": "SmartContractParameter" }, { - "name": "val", - "type": "ParameterValueBox" + "tag": "Custom", + "discriminant": 5, + "type": "CustomParameter" } ] }, - "ParameterId": { + "ParameterChanged": { "Struct": [ { - "name": "name", - "type": "Name" + "name": "old_value", + "type": "Parameter" + }, + { + "name": "new_value", + "type": "Parameter" } ] }, - "ParameterValueBox": { - "Enum": [ + "Parameters": { + "Struct": [ { - "tag": "TransactionLimits", - "discriminant": 0, - "type": "TransactionLimits" + "name": "sumeragi", + "type": "SumeragiParameters" }, { - "tag": "MetadataLimits", - "discriminant": 1, - "type": "Limits" + "name": "block", + "type": "BlockParameters" }, { - "tag": "LengthLimits", - "discriminant": 2, - "type": "LengthLimits" + "name": "transaction", + "type": "TransactionParameters" }, { - "tag": "Numeric", - "discriminant": 3, - "type": "Numeric" + "name": "executor", + "type": "SmartContractParameters" + }, + { + "name": "smart_contract", + "type": "SmartContractParameters" + }, + { + "name": "custom", + "type": "SortedMap" } ] }, @@ -2668,8 +2546,8 @@ "Permission": { "Struct": [ { - "name": "id", - "type": "PermissionId" + "name": "name", + "type": "String" }, { "name": "payload", @@ -2677,14 +2555,6 @@ } ] }, - "PermissionId": { - "Struct": [ - { - "name": "name", - "type": "Name" - } - ] - }, "PipelineEventBox": { "Enum": [ { @@ -2978,33 +2848,38 @@ "type": "Permission" }, { - "tag": "LimitedMetadata", + "tag": "Parameters", "discriminant": 4, - "type": "MetadataValueBox" + "type": "Parameters" }, { - "tag": "Numeric", + "tag": "Metadata", "discriminant": 5, + "type": "JsonString" + }, + { + "tag": "Numeric", + "discriminant": 6, "type": "Numeric" }, { "tag": "BlockHeader", - "discriminant": 6, + "discriminant": 7, "type": "BlockHeader" }, { "tag": "Block", - "discriminant": 7, + "discriminant": 8, "type": "SignedBlock" }, { "tag": "ExecutorDataModel", - "discriminant": 8, + "discriminant": 9, "type": "ExecutorDataModel" }, { "tag": "Vec", - "discriminant": 9, + "discriminant": 10, "type": "Vec" } ] @@ -3263,7 +3138,7 @@ "RepetitionError": { "Struct": [ { - "name": "instruction_type", + "name": "instruction", "type": "InstructionType" }, { @@ -3352,12 +3227,12 @@ "type": "RoleId" }, { - "tag": "PermissionRemoved", + "tag": "PermissionAdded", "discriminant": 2, "type": "RolePermissionChanged" }, { - "tag": "PermissionAdded", + "tag": "PermissionRemoved", "discriminant": 3, "type": "RolePermissionChanged" } @@ -3388,11 +3263,11 @@ "mask": 2 }, { - "name": "PermissionRemoved", + "name": "PermissionAdded", "mask": 4 }, { - "name": "PermissionAdded", + "name": "PermissionRemoved", "mask": 8 } ] @@ -3414,7 +3289,7 @@ }, { "name": "permission", - "type": "PermissionId" + "type": "Permission" } ] }, @@ -3475,7 +3350,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -3491,7 +3366,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -3507,7 +3382,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -3523,7 +3398,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -3539,7 +3414,7 @@ }, { "name": "value", - "type": "MetadataValueBox" + "type": "JsonString" } ] }, @@ -3572,14 +3447,7 @@ } ] }, - "SetParameter": { - "Struct": [ - { - "name": "parameter", - "type": "Parameter" - } - ] - }, + "SetParameter": "Parameter", "Signature": { "Struct": [ { @@ -3654,15 +3522,29 @@ } ] }, - "SizeError": { + "SmartContractParameter": { + "Enum": [ + { + "tag": "Fuel", + "discriminant": 0, + "type": "NonZero" + }, + { + "tag": "Memory", + "discriminant": 1, + "type": "NonZero" + } + ] + }, + "SmartContractParameters": { "Struct": [ { - "name": "limits", - "type": "Limits" + "name": "fuel", + "type": "NonZero" }, { - "name": "actual", - "type": "u64" + "name": "memory", + "type": "NonZero" } ] }, @@ -3739,17 +3621,23 @@ "value": "Numeric" } }, - "SortedMap": { + "SortedMap": { + "Map": { + "key": "CustomParameterId", + "value": "CustomParameter" + } + }, + "SortedMap": { "Map": { "key": "Name", - "value": "MetadataValueBox" + "value": "JsonString" } }, "SortedVec": { "Vec": "Permission" }, - "SortedVec": { - "Vec": "PermissionId" + "SortedVec": { + "Vec": "String" }, "Sorting": { "Struct": [ @@ -3784,6 +3672,32 @@ } ] }, + "SumeragiParameter": { + "Enum": [ + { + "tag": "BlockTimeMs", + "discriminant": 0, + "type": "u64" + }, + { + "tag": "CommitTimeMs", + "discriminant": 1, + "type": "u64" + } + ] + }, + "SumeragiParameters": { + "Struct": [ + { + "name": "block_time_ms", + "type": "u64" + }, + { + "name": "commit_time_ms", + "type": "u64" + } + ] + }, "TimeEvent": { "Struct": [ { @@ -3817,7 +3731,7 @@ }, { "name": "block_height", - "type": "Option" + "type": "Option>" }, { "name": "status", @@ -3833,7 +3747,7 @@ }, { "name": "block_height", - "type": "Option>" + "type": "Option>>" }, { "name": "status", @@ -3849,15 +3763,29 @@ } ] }, - "TransactionLimits": { + "TransactionParameter": { + "Enum": [ + { + "tag": "MaxInstructions", + "discriminant": 0, + "type": "NonZero" + }, + { + "tag": "SmartContractSize", + "discriminant": 1, + "type": "NonZero" + } + ] + }, + "TransactionParameters": { "Struct": [ { - "name": "max_instruction_number", - "type": "u64" + "name": "max_instructions", + "type": "NonZero" }, { - "name": "max_wasm_size_bytes", - "type": "u64" + "name": "smart_contract_size", + "type": "NonZero" } ] }, @@ -3889,7 +3817,7 @@ }, { "name": "metadata", - "type": "SortedMap" + "type": "Metadata" } ] }, @@ -4224,19 +4152,14 @@ "TypeError": { "Enum": [ { - "tag": "AssetValueType", + "tag": "AssetType", "discriminant": 0, - "type": "Mismatch" + "type": "Mismatch" }, { - "tag": "NumericAssetValueTypeExpected", + "tag": "NumericAssetTypeExpected", "discriminant": 1, - "type": "AssetValueType" - }, - { - "tag": "StoreAssetValueTypeExpected", - "discriminant": 2, - "type": "AssetValueType" + "type": "AssetType" } ] }, @@ -4388,9 +4311,6 @@ "Vec": { "Vec": "InstructionBox" }, - "Vec": { - "Vec": "MetadataValueBox" - }, "Vec": { "Vec": "PeerId" }, @@ -4409,7 +4329,6 @@ ] }, "WasmSmartContract": "Vec", - "bool": "bool", "u128": { "Int": "FixedWidth" }, diff --git a/ffi/src/std_impls.rs b/ffi/src/std_impls.rs index d45d2ece14e..ff65bbae07b 100644 --- a/ffi/src/std_impls.rs +++ b/ffi/src/std_impls.rs @@ -47,17 +47,25 @@ ffi_type! { niche_value=RefMutSlice::null_mut() } } +ffi_type! { + unsafe impl Transparent for core::ptr::NonNull { + type Target = *mut T; + + validation_fn=unsafe {|target: &*mut T| !target.is_null()}, + niche_value=core::ptr::null_mut() + } +} ffi_type! { unsafe impl Transparent for core::mem::ManuallyDrop { type Target = T; } } ffi_type! { - unsafe impl Transparent for core::ptr::NonNull { - type Target = *mut T; + unsafe impl Transparent for core::num::NonZeroU64 { + type Target = u64; - validation_fn=unsafe {|target: &*mut T| !target.is_null()}, - niche_value=core::ptr::null_mut() + validation_fn=unsafe {|target: &u64| *target != 0}, + niche_value=0 } } diff --git a/genesis/Cargo.toml b/genesis/Cargo.toml index 49994d15f6a..3ca02c7bd25 100644 --- a/genesis/Cargo.toml +++ b/genesis/Cargo.toml @@ -21,6 +21,7 @@ serde_json = { workspace = true } once_cell = { workspace = true } tracing = { workspace = true } eyre = { workspace = true } +parity-scale-codec = { workspace = true } [dev-dependencies] iroha_crypto = { workspace = true, features = ["rand"] } diff --git a/genesis/src/lib.rs b/genesis/src/lib.rs index 095b5a50793..58d27ba53e3 100644 --- a/genesis/src/lib.rs +++ b/genesis/src/lib.rs @@ -13,6 +13,7 @@ use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::{block::SignedBlock, prelude::*}; use iroha_schema::IntoSchema; use once_cell::sync::Lazy; +use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; /// [`DomainId`](iroha_data_model::domain::DomainId) of the genesis account. @@ -30,7 +31,7 @@ pub struct GenesisBlock(pub SignedBlock); /// It should be signed, converted to [`GenesisBlock`], /// and serialized in SCALE format before supplying to Iroha peer. /// See `kagami genesis sign`. -#[derive(Debug, Clone, Serialize, Deserialize, IntoSchema)] +#[derive(Debug, Clone, Serialize, Deserialize, IntoSchema, Encode, Decode)] pub struct RawGenesisTransaction { /// Unique id of blockchain chain: ChainId, @@ -261,15 +262,32 @@ impl GenesisDomainBuilder { } /// Add [`AssetDefinition`] to current domain. - pub fn asset(mut self, asset_name: Name, asset_value_type: AssetValueType) -> Self { + pub fn asset(mut self, asset_name: Name, asset_type: AssetType) -> Self { let asset_definition_id = AssetDefinitionId::new(self.domain_id.clone(), asset_name); - let asset_definition = AssetDefinition::new(asset_definition_id, asset_value_type); + let asset_definition = AssetDefinition::new(asset_definition_id, asset_type); self.instructions .push(Register::asset_definition(asset_definition).into()); self } } +impl Encode for ExecutorPath { + fn encode(&self) -> Vec { + self.0 + .to_str() + .expect("path contains not valid UTF-8") + .encode() + } +} + +impl Decode for ExecutorPath { + fn decode( + input: &mut I, + ) -> std::result::Result { + String::decode(input).map(PathBuf::from).map(ExecutorPath) + } +} + #[cfg(test)] mod tests { use test_samples::{ALICE_KEYPAIR, BOB_KEYPAIR}; @@ -319,7 +337,7 @@ mod tests { .account(public_key["mad_hatter"].clone()) .asset( "hats".parse().unwrap(), - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), ) .finish_domain(); diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 14f6c12e6f8..5f19e8273b1 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -42,6 +42,8 @@ smallstr = { version = "0.3.0", default-features = false, features = ["serde", " thiserror = { workspace = true, optional = true } displaydoc = { workspace = true } parking_lot = { workspace = true, optional = true } +serde_json = { workspace = true } + [dev-dependencies] serde_json = { workspace = true, features = ["alloc"] } diff --git a/primitives/src/json.rs b/primitives/src/json.rs new file mode 100644 index 00000000000..f36be4d5f92 --- /dev/null +++ b/primitives/src/json.rs @@ -0,0 +1,176 @@ +//! String containing serialized valid JSON. +//! This string is guaranteed to parse as JSON + +#[cfg(not(feature = "std"))] +use alloc::{ + format, + string::{String, ToString}, + vec::Vec, +}; +use core::str::FromStr; +#[cfg(feature = "std")] +use std::{ + string::{String, ToString}, + vec::Vec, +}; + +use derive_more::Display; +use iroha_schema::IntoSchema; +use parity_scale_codec::{Decode, Encode}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::Value; + +/// A valid `JsonString` that consists of valid String of Json type +#[derive(Debug, Display, Clone, PartialOrd, PartialEq, Ord, Eq, IntoSchema, Encode, Decode)] +#[display(fmt = "{_0}")] +pub struct JsonString(String); + +impl JsonString { + /// Constructs [`JsonString`] + /// # Errors + /// + /// - Serialization can fail if T's implementation of Serialize decides to fail, + /// - or if T contains a map with non-string keys. + // Todo: Doesn't remove extra spaces in if `&str` is an object + pub fn new(payload: T) -> Self { + candidate::JsonCandidate::new(payload).try_into().unwrap() + } + + /// Tries cast [`JsonString`] to any value. + /// + /// # Errors + /// - if invalid representation of `T` + pub fn try_into_any(&self) -> Result { + serde_json::from_str(&self.0) + } + + /// Create without checking whether the input is a valid JSON string. + /// + /// The caller must guarantee that the value is valid. + pub fn from_string_unchecked(value: String) -> Self { + Self(value) + } + + /// Getter for [`JsonString`] + pub fn get(&self) -> &String { + &self.0 + } +} + +impl<'de> serde::de::Deserialize<'de> for JsonString { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let json = Value::deserialize(deserializer)?; + Ok(Self(json.to_string())) + } +} + +impl serde::ser::Serialize for JsonString { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let json: Value = serde_json::from_str(&self.0).map_err(serde::ser::Error::custom)?; + json.serialize(serializer) + } +} + +impl From<&Value> for JsonString { + fn from(value: &Value) -> Self { + JsonString(value.to_string()) + } +} + +impl From for JsonString { + fn from(value: Value) -> Self { + JsonString(value.to_string()) + } +} + +impl From for JsonString { + fn from(value: u32) -> Self { + JsonString::new(value) + } +} + +impl From for JsonString { + fn from(value: u64) -> Self { + JsonString::new(value) + } +} + +impl From for JsonString { + fn from(value: f64) -> Self { + JsonString::new(value) + } +} + +impl From for JsonString { + fn from(value: bool) -> Self { + JsonString::new(value) + } +} + +impl From<&str> for JsonString { + fn from(value: &str) -> Self { + value.parse::().expect("Impossible error") + } +} + +impl + Serialize> From> for JsonString { + fn from(value: Vec) -> Self { + JsonString::new(value) + } +} + +/// Removes extra spaces from object if `&str` is an object +impl FromStr for JsonString { + type Err = serde_json::Error; + + fn from_str(s: &str) -> Result { + if let Ok(value) = serde_json::from_str::(s) { + Ok(JsonString(value.to_string())) + } else { + let json_formatted_string = serde_json::to_string(s)?; + let value: Value = serde_json::from_str(&json_formatted_string)?; + Ok(JsonString(value.to_string())) + } + } +} + +impl Default for JsonString { + fn default() -> Self { + // NOTE: empty string isn't valid JSON + Self("null".to_string()) + } +} + +impl AsRef for JsonString { + fn as_ref(&self) -> &str { + &self.0 + } +} + +mod candidate { + use super::*; + + /// A candidate for a valid `JsonString`. + /// Is used for generalizing ser/de any types to `JsonString` and vise versa + #[derive(Serialize, Deserialize, Clone)] + pub(super) struct JsonCandidate(T); + + impl JsonCandidate { + pub(super) fn new(value: T) -> Self { + JsonCandidate(value) + } + } + + impl TryFrom> for JsonString { + type Error = serde_json::Error; + fn try_from(value: JsonCandidate) -> Result { + Ok(JsonString(serde_json::to_string(&value.0)?)) + } + } +} diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index 60eb5ef214a..0face676e98 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -17,6 +17,7 @@ pub mod cmpext; pub mod const_vec; #[cfg(not(feature = "ffi_import"))] pub mod conststr; +pub mod json; pub mod must_use; pub mod numeric; pub mod riffle_iter; diff --git a/schema/gen/src/lib.rs b/schema/gen/src/lib.rs index 312c3f23ce7..4e846d0aa72 100644 --- a/schema/gen/src/lib.rs +++ b/schema/gen/src/lib.rs @@ -86,15 +86,14 @@ types!( AssetId, AssetTransferBox, AssetValue, - AssetValueType, + AssetType, AtIndex, - BTreeMap, BTreeMap, BTreeMap, BTreeMap, - BTreeMap, + BTreeMap, + BTreeSet, BTreeSet, - BTreeSet, BatchedResponse, BatchedResponseV1, BlockEvent, @@ -121,7 +120,7 @@ types!( ConstVec, Container, ClientQueryPayload, - Custom, + CustomInstruction, DataEvent, DataEventFilter, Domain, @@ -145,7 +144,7 @@ types!( ExecutorEventSet, ExecutorUpgrade, ExecutorDataModel, - Fail, + ExecutorPath, EventFilterBox, FetchSize, FindAccountById, @@ -209,7 +208,7 @@ types!( IpfsPath, Ipv4Addr, Ipv6Addr, - LengthLimits, + JsonString, Level, Log, MathError, @@ -220,20 +219,16 @@ types!( MetadataChanged, MetadataChanged, MetadataChanged, - MetadataError, - MetadataLimits, - MetadataValueBox, Mint, Mint, MintBox, MintabilityError, Mintable, - Mismatch, + Mismatch, Name, NewAccount, NewAssetDefinition, NewDomain, - NewParameter, NewRole, NonTrivial, NonZeroU32, @@ -255,7 +250,6 @@ types!( Option, Option, Option>, - Option, Option, Option, Option, @@ -266,14 +260,11 @@ types!( Option, Pagination, Parameter, - ParameterId, - ParameterValueBox, Peer, PeerEvent, PeerEventFilter, PeerEventSet, PeerId, - PermissionId, RolePermissionChanged, Permission, PipelineEventBox, @@ -285,6 +276,7 @@ types!( QueryOutputBox, QueryOutputPredicate, QuerySignature, + iroha_genesis::RawGenesisTransaction, Register, Register, Register, @@ -330,7 +322,6 @@ types!( SignedQueryV1, SignedTransaction, SignedTransactionV1, - SizeError, SocketAddr, SocketAddrHost, SocketAddrV4, @@ -346,7 +337,6 @@ types!( TransactionEvent, TransactionEventFilter, TransactionLimitError, - TransactionLimits, TransactionPayload, TransactionQueryOutput, TransactionRejectionReason, @@ -384,7 +374,7 @@ types!( Vec, Vec, Vec, - Vec, + Vec, Vec, Vec, Vec, @@ -431,8 +421,6 @@ pub mod complete_data_model { }, InstructionType, }, - metadata::{MetadataError, MetadataValueBox, SizeError}, - parameter::ParameterValueBox, prelude::*, query::{ error::{FindError, QueryExecutionFail}, @@ -445,15 +433,17 @@ pub mod complete_data_model { ForwardCursor, Pagination, QueryOutputBox, Sorting, }, transaction::{ - error::TransactionLimitError, SignedTransactionV1, TransactionLimits, - TransactionPayload, TransactionSignature, + error::TransactionLimitError, SignedTransactionV1, TransactionPayload, + TransactionSignature, }, - BatchedResponse, BatchedResponseV1, JsonString, Level, + BatchedResponse, BatchedResponseV1, Level, }; + pub use iroha_genesis::ExecutorPath; pub use iroha_primitives::{ addr::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrHost, SocketAddrV4, SocketAddrV6}, const_vec::ConstVec, conststr::ConstString, + json::JsonString, }; pub use iroha_schema::Compact; } diff --git a/schema/src/lib.rs b/schema/src/lib.rs index 5e880108542..8d1736d04d2 100644 --- a/schema/src/lib.rs +++ b/schema/src/lib.rs @@ -73,6 +73,7 @@ impl IntoIterator for MetaMap { } } +// TODO: Should be &str or ConstString. /// Identifier of the type pub type Ident = String; @@ -82,14 +83,12 @@ pub type Ident = String; /// is devised that can prove that all impls are unique pub trait TypeId: 'static { /// Return unique type id - // TODO: Should return &str or ConstString. fn id() -> Ident; } /// `IntoSchema` trait pub trait IntoSchema: TypeId { /// Name under which a type is represented in the schema - // TODO: Should return &str or ConstString. fn type_name() -> Ident; /// Insert descriptions of types referenced by [`Self`] diff --git a/scripts/test_env.py b/scripts/test_env.py index 4f3d55723c3..5101d0cce8c 100755 --- a/scripts/test_env.py +++ b/scripts/test_env.py @@ -37,8 +37,9 @@ def __init__(self, args: argparse.Namespace): logging.info("Generating shared configuration...") trusted_peers = [{"address": f"{peer.host_ip}:{peer.p2p_port}", "public_key": peer.public_key} for peer in self.peers] - genesis_public_key = self.peers[0].public_key - genesis_private_key = self.peers[0].private_key + genesis_key_pair = kagami_generate_key_pair(args.out_dir, seed="Irohagenesis") + genesis_public_key = genesis_key_pair["public_key"] + genesis_private_key = genesis_key_pair["private_key"] shared_config = { "chain": "00000000-0000-0000-0000-000000000000", "genesis": { @@ -81,7 +82,7 @@ def wait_for_genesis(self, n_tries: int): def run(self): for i, peer in enumerate(self.peers): - peer.run(submit_genesis=(i == 0)) + peer.run() self.wait_for_genesis(20) class _Peer: @@ -103,18 +104,8 @@ def __init__(self, args: argparse.Namespace, nth: int): logging.info(f"Peer {self.name} generating key pair...") - command = [self.out_dir / "kagami", "crypto", "-j"] - if nth == 0: - command.extend(["-s", "Iroha" + "genesis"]) - elif args.peer_name_as_seed: - command.extend(["-s", self.name]) - kagami = subprocess.run(command, capture_output=True) - if kagami.returncode: - logging.error("Kagami failed to generate a key pair.") - sys.exit(3) - str_keypair = kagami.stdout - # dict with `{ public_key: string, private_key: { algorithm: string, payload: string } }` - self.key_pair = json.loads(str_keypair) + seed = self.name if args.peer_name_as_seed else None + self.key_pair = kagami_generate_key_pair(args.out_dir, seed) os.makedirs(self.peer_dir, exist_ok=True) config = { @@ -138,10 +129,9 @@ def __init__(self, args: argparse.Namespace, nth: int): # "tokio_console_addr": f"{self.host_ip}:{self.tokio_console_port}", # } } - if nth == 0: - config["genesis"] = { - "signed_file": "../../genesis.signed.scale" - } + config["genesis"] = { + "signed_file": "../../genesis.signed.scale" + } with open(self.config_path, "wb") as f: tomli_w.dump(config, f) logging.info(f"Peer {self.name} initialized") @@ -154,14 +144,14 @@ def public_key(self): def private_key(self): return self.key_pair["private_key"] - def run(self, submit_genesis: bool = False): + def run(self): logging.info(f"Running peer {self.name}...") # FD never gets closed stdout_file = open(self.peer_dir / ".stdout", "w") stderr_file = open(self.peer_dir / ".stderr", "w") # These processes are created detached from the parent process already - subprocess.Popen([self.name, "--config", self.config_path] + (["--submit-genesis"] if submit_genesis else []), + subprocess.Popen([self.name, "--config", self.config_path], executable=self.out_dir / "peers/irohad", stdout=stdout_file, stderr=stderr_file) def pos_int(arg): @@ -191,6 +181,17 @@ def copy_or_prompt_build_bin(bin_name: str, root_dir: pathlib.Path, target_dir: else: logging.error("Please answer with either `y[es]` or `n[o]`") +def kagami_generate_key_pair(out_dir: pathlib.Path, seed: str = None): + command = [out_dir / "kagami", "crypto", "-j"] + if seed is not None: + command.extend(["-s", seed]) + kagami = subprocess.run(command, capture_output=True) + if kagami.returncode: + logging.error("Kagami failed to generate a key pair.") + sys.exit(3) + # dict with `{ public_key: string, private_key: string }` + return json.loads(kagami.stdout) + def copy_genesis_json_and_change_topology(args: argparse.Namespace, topology): try: with open(args.root_dir / SWARM_CONFIGS_DIRECTORY / "genesis.json", 'r') as f: diff --git a/smart_contract/executor/derive/src/default.rs b/smart_contract/executor/derive/src/default.rs index 6b436fc7019..a307de0674e 100644 --- a/smart_contract/executor/derive/src/default.rs +++ b/smart_contract/executor/derive/src/default.rs @@ -156,11 +156,9 @@ pub fn impl_derive_visit(emitter: &mut Emitter, input: &syn::DeriveInput) -> Tok "fn visit_burn_trigger_repetitions(operation: &Burn)", "fn visit_execute_trigger(operation: &ExecuteTrigger)", "fn visit_set_parameter(operation: &SetParameter)", - "fn visit_new_parameter(operation: &NewParameter)", "fn visit_upgrade(operation: &Upgrade)", "fn visit_log(operation: &Log)", - "fn visit_custom(operation: &Custom)", - "fn visit_fail(operation: &Fail)", + "fn visit_custom(operation: &CustomInstruction)", ] .into_iter() .map(|item| { diff --git a/smart_contract/executor/derive/src/lib.rs b/smart_contract/executor/derive/src/lib.rs index 4b11347a112..6c3360d5417 100644 --- a/smart_contract/executor/derive/src/lib.rs +++ b/smart_contract/executor/derive/src/lib.rs @@ -7,6 +7,7 @@ use proc_macro2::TokenStream; mod conversion; mod default; mod entrypoint; +mod parameter; mod permission; mod validate; @@ -100,6 +101,16 @@ pub fn derive_permission(input: TokenStream) -> Result { Ok(permission::impl_derive_permission(&input)) } +/// Derive macro for `Parameter` trait. +/// ``` +#[manyhow] +#[proc_macro_derive(Parameter)] +pub fn derive_parameter(input: TokenStream) -> Result { + let input = syn::parse2(input)?; + + Ok(parameter::impl_derive_parameter(&input)) +} + /// Derive macro for `ValidateGrantRevoke` trait. /// /// # Attributes diff --git a/smart_contract/executor/derive/src/parameter.rs b/smart_contract/executor/derive/src/parameter.rs new file mode 100644 index 00000000000..14ee485351d --- /dev/null +++ b/smart_contract/executor/derive/src/parameter.rs @@ -0,0 +1,43 @@ +//! Module with [`derive_parameter`](crate::derive_parameter) macro implementation + +use proc_macro2::TokenStream; +use quote::quote; + +/// [`derive_parameter`](crate::derive_parameter()) macro implementation +pub fn impl_derive_parameter(input: &syn::DeriveInput) -> TokenStream { + let generics = &input.generics; + let ident = &input.ident; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + quote! { + impl #impl_generics ::iroha_executor::parameter::Parameter for #ident #ty_generics #where_clause {} + + impl #impl_generics TryFrom<&::iroha_executor::data_model::parameter::CustomParameter> for #ident #ty_generics #where_clause { + type Error = ::iroha_executor::TryFromDataModelObjectError; + + fn try_from(value: &::iroha_executor::data_model::parameter::CustomParameter) -> core::result::Result { + if *value.id() != ::id() { + return Err(Self::Error::UnknownIdent(alloc::string::ToString::to_string(value.id().name().as_ref()))); + } + + serde_json::from_str::(value.payload().as_ref()).map_err(Self::Error::Deserialize) + } + } + + impl #impl_generics From<#ident #ty_generics> for ::iroha_executor::data_model::parameter::CustomParameter #where_clause { + fn from(value: #ident #ty_generics) -> Self { + ::iroha_executor::data_model::parameter::CustomParameter::new( + <#ident as ::iroha_executor::parameter::Parameter>::id(), + ::serde_json::to_value::<#ident #ty_generics>(value) + .expect("INTERNAL BUG: Failed to serialize Executor data model entity"), + ) + } + } + + impl #impl_generics From<#ident #ty_generics> for ::iroha_executor::data_model::parameter::Parameter #where_clause { + fn from(value: #ident #ty_generics) -> Self { + Self::Custom(value.into()) + } + } + } +} diff --git a/smart_contract/executor/derive/src/permission.rs b/smart_contract/executor/derive/src/permission.rs index 0cb6f5c3a39..867ec0813ef 100644 --- a/smart_contract/executor/derive/src/permission.rs +++ b/smart_contract/executor/derive/src/permission.rs @@ -18,7 +18,7 @@ pub fn impl_derive_permission(input: &syn::DeriveInput) -> TokenStream { account_id.clone(), ) ) - .expect("`FindPermissionsByAccountId` query should never fail, it's a bug"); + .expect("INTERNAL BUG: `FindPermissionsByAccountId` should never fail"); account_tokens_cursor .into_iter() @@ -34,11 +34,11 @@ pub fn impl_derive_permission(input: &syn::DeriveInput) -> TokenStream { impl #impl_generics TryFrom<&::iroha_executor::data_model::permission::Permission> for #ident #ty_generics #where_clause { type Error = ::iroha_executor::TryFromDataModelObjectError; - fn try_from( - value: &::iroha_executor::data_model::permission::Permission, - ) -> core::result::Result { - if *value.id() != ::id() { - return Err(Self::Error::Id(value.id().name().clone())); + fn try_from(value: &::iroha_executor::data_model::permission::Permission) -> core::result::Result { + use alloc::borrow::ToOwned as _; + + if *value.name() != ::name() { + return Err(Self::Error::UnknownIdent(value.name().to_owned())); } serde_json::from_str::(value.payload().as_ref()).map_err(Self::Error::Deserialize) @@ -48,7 +48,7 @@ pub fn impl_derive_permission(input: &syn::DeriveInput) -> TokenStream { impl #impl_generics From<#ident #ty_generics> for ::iroha_executor::data_model::permission::Permission #where_clause { fn from(value: #ident #ty_generics) -> Self { ::iroha_executor::data_model::permission::Permission::new( - <#ident as ::iroha_executor::permission::Permission>::id(), + <#ident as ::iroha_executor::permission::Permission>::name(), ::serde_json::to_value::<#ident #ty_generics>(value) .expect("INTERNAL BUG: Failed to serialize Executor data model entity"), ) diff --git a/smart_contract/executor/src/default.rs b/smart_contract/executor/src/default.rs index 3d7010422f1..e7102fba840 100644 --- a/smart_contract/executor/src/default.rs +++ b/smart_contract/executor/src/default.rs @@ -25,10 +25,9 @@ pub use domain::{ visit_transfer_domain, visit_unregister_domain, }; pub use executor::visit_upgrade; -pub use fail::visit_fail; use iroha_smart_contract::data_model::isi::InstructionBox; pub use log::visit_log; -pub use parameter::{visit_new_parameter, visit_set_parameter}; +pub use parameter::visit_set_parameter; pub use peer::{visit_register_peer, visit_unregister_peer}; pub use permission::{visit_grant_account_permission, visit_revoke_account_permission}; use permissions::AnyPermission; @@ -86,9 +85,6 @@ pub fn visit_instruction( isi: &InstructionBox, ) { match isi { - InstructionBox::NewParameter(isi) => { - executor.visit_new_parameter(authority, isi); - } InstructionBox::SetParameter(isi) => { executor.visit_set_parameter(authority, isi); } @@ -101,9 +97,6 @@ pub fn visit_instruction( InstructionBox::Burn(isi) => { executor.visit_burn(authority, isi); } - InstructionBox::Fail(isi) => { - executor.visit_fail(authority, isi); - } InstructionBox::Grant(isi) => { executor.visit_grant(authority, isi); } @@ -842,7 +835,7 @@ pub mod asset_definition { pub mod asset { use iroha_smart_contract::data_model::{ - asset::AssetValue, isi::Instruction, metadata::Metadata, + asset::AssetValue, isi::BuiltInInstruction, metadata::Metadata, }; use iroha_smart_contract_utils::Encode; @@ -919,7 +912,7 @@ pub mod asset { where V: Validate + Visit + ?Sized, Q: Into, - Mint: Instruction + Encode, + Mint: BuiltInInstruction + Encode, { let asset_id = isi.destination(); if is_genesis(executor) { @@ -962,7 +955,7 @@ pub mod asset { where V: Validate + Visit + ?Sized, Q: Into, - Burn: Instruction + Encode, + Burn: BuiltInInstruction + Encode, { let asset_id = isi.destination(); if is_genesis(executor) { @@ -1010,7 +1003,7 @@ pub mod asset { ) where V: Validate + Visit + ?Sized, Q: Into, - Transfer: Instruction + Encode, + Transfer: BuiltInInstruction + Encode, { let asset_id = isi.source(); if is_genesis(executor) { @@ -1121,25 +1114,6 @@ pub mod asset { pub mod parameter { use super::*; - #[allow(clippy::needless_pass_by_value)] - pub fn visit_new_parameter( - executor: &mut V, - authority: &AccountId, - isi: &NewParameter, - ) { - if is_genesis(executor) { - execute!(executor, isi); - } - if permissions::parameter::CanCreateParameters.is_owned_by(authority) { - execute!(executor, isi); - } - - deny!( - executor, - "Can't create new configuration parameters outside genesis without permission" - ); - } - #[allow(clippy::needless_pass_by_value)] pub fn visit_set_parameter( executor: &mut V, @@ -1695,7 +1669,7 @@ pub mod custom { pub fn visit_custom( executor: &mut V, _authority: &AccountId, - _isi: &Custom, + _isi: &CustomInstruction, ) { deny!( executor, @@ -1704,18 +1678,6 @@ pub mod custom { } } -pub mod fail { - use super::*; - - pub fn visit_fail( - executor: &mut V, - _authority: &AccountId, - isi: &Fail, - ) { - execute!(executor, isi) - } -} - fn is_genesis(executor: &V) -> bool { executor.block_height() == 0 } diff --git a/smart_contract/executor/src/default/permissions.rs b/smart_contract/executor/src/default/permissions.rs index 735ef253942..0d9f74ceacd 100644 --- a/smart_contract/executor/src/default/permissions.rs +++ b/smart_contract/executor/src/default/permissions.rs @@ -46,12 +46,12 @@ macro_rules! declare_permissions { type Error = $crate::TryFromDataModelObjectError; fn try_from(token: &$crate::data_model::permission::Permission) -> Result { - match token.id().name().as_ref() { $( + match token.name().as_ref() { $( stringify!($token_ty) => { let token = <$($token_path::)+$token_ty>::try_from(token)?; Ok(Self::$token_ty(token)) } )+ - _ => Err(Self::Error::Id(token.id().name().clone())) + _ => Err(Self::Error::UnknownIdent(token.name().to_owned())) } } } diff --git a/smart_contract/executor/src/lib.rs b/smart_contract/executor/src/lib.rs index f2a430d5dc5..0208209c2dc 100644 --- a/smart_contract/executor/src/lib.rs +++ b/smart_contract/executor/src/lib.rs @@ -19,6 +19,7 @@ use iroha_smart_contract_utils::{decode_with_length_prefix_from_raw, encode_and_ pub use smart_contract::{data_model, parse, stub_getrandom}; pub mod default; +pub mod parameter; pub mod permission; pub mod utils { @@ -179,8 +180,8 @@ macro_rules! deny { /// Such objects are [`data_model::prelude::Permission`] and [`data_model::prelude::Parameter`]. #[derive(Debug)] pub enum TryFromDataModelObjectError { - /// Unexpected object id - Id(data_model::prelude::Name), + /// Unexpected object name + UnknownIdent(iroha_schema::Ident), /// Failed to deserialize object payload Deserialize(serde_json::Error), } @@ -188,8 +189,9 @@ pub enum TryFromDataModelObjectError { /// A convenience to build [`ExecutorDataModel`] from within the executor #[derive(Debug, Clone)] pub struct DataModelBuilder { - permissions: BTreeSet, - custom_instruction: Option, + parameters: BTreeSet, + instructions: BTreeSet, + permissions: BTreeSet, schema: MetaMap, } @@ -199,8 +201,9 @@ impl DataModelBuilder { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self { + parameters: <_>::default(), + instructions: <_>::default(), permissions: <_>::default(), - custom_instruction: None, schema: <_>::default(), } } @@ -223,27 +226,39 @@ impl DataModelBuilder { /// Define a permission in the data model #[must_use] - pub fn add_permission(mut self) -> Self { - ::update_schema_map(&mut self.schema); - self.permissions.insert(::id()); + pub fn add_parameter>( + mut self, + param: T, + ) -> Self { + T::update_schema_map(&mut self.schema); + self.parameters.insert(param.into()); self } /// Define a type of custom instruction in the data model. /// Corresponds to payload of `InstructionBox::Custom`. #[must_use] - pub fn with_custom_instruction(mut self) -> Self { + pub fn add_instruction(mut self) -> Self { + T::update_schema_map(&mut self.schema); + self.instructions.insert(T::type_name()); + self + } + + /// Define a permission in the data model + #[must_use] + pub fn add_permission(mut self) -> Self { T::update_schema_map(&mut self.schema); - self.custom_instruction = Some(T::type_name()); + self.permissions + .insert(::name()); self } /// Remove a permission from the data model #[must_use] pub fn remove_permission(mut self) -> Self { - ::remove_from_schema(&mut self.schema); + T::remove_from_schema(&mut self.schema); self.permissions - .remove(&::id()); + .remove(&::name()); self } @@ -257,7 +272,7 @@ impl DataModelBuilder { for role in all_roles.into_iter().map(|role| role.unwrap()) { for permission in role.permissions() { - if !self.permissions.contains(permission.id()) { + if !self.permissions.contains(permission.name()) { Revoke::role_permission(permission.clone(), role.id().clone()) .execute() .unwrap(); @@ -272,7 +287,7 @@ impl DataModelBuilder { .into_iter(); for permission in account_permissions.map(|permission| permission.unwrap()) { - if !self.permissions.contains(permission.id()) { + if !self.permissions.contains(permission.name()) { Revoke::permission(permission, account.id().clone()) .execute() .unwrap(); @@ -281,8 +296,12 @@ impl DataModelBuilder { } set_data_model(&ExecutorDataModel::new( + self.parameters + .into_iter() + .map(|param| (param.id().clone(), param)) + .collect(), + self.instructions, self.permissions, - self.custom_instruction, serde_json::to_value(&self.schema) .expect("INTERNAL BUG: Failed to serialize Executor data model entity") .into(), @@ -308,8 +327,8 @@ pub mod prelude { pub use alloc::vec::Vec; pub use iroha_executor_derive::{ - entrypoint, Constructor, Permission, Validate, ValidateEntrypoints, ValidateGrantRevoke, - Visit, + entrypoint, Constructor, Parameter, Permission, Validate, ValidateEntrypoints, + ValidateGrantRevoke, Visit, }; pub use iroha_smart_contract::prelude::*; @@ -320,6 +339,7 @@ pub mod prelude { ValidationFail, }, deny, execute, + parameter::Parameter as ParameterTrait, permission::Permission as PermissionTrait, DataModelBuilder, Validate, }; diff --git a/smart_contract/executor/src/parameter.rs b/smart_contract/executor/src/parameter.rs new file mode 100644 index 00000000000..22a61c74a3d --- /dev/null +++ b/smart_contract/executor/src/parameter.rs @@ -0,0 +1,17 @@ +//! Module with parameter related functionality. + +use iroha_schema::IntoSchema; +use iroha_smart_contract::{data_model::parameter::CustomParameterId, debug::DebugExpectExt}; +use serde::{de::DeserializeOwned, Serialize}; + +/// Blockchain specific parameter +pub trait Parameter: Default + Serialize + DeserializeOwned + IntoSchema { + /// Parameter id, according to [`IntoSchema`]. + fn id() -> CustomParameterId { + CustomParameterId::new( + ::type_name() + .parse() + .dbg_expect("Failed to parse parameter id as `Name`"), + ) + } +} diff --git a/smart_contract/executor/src/permission.rs b/smart_contract/executor/src/permission.rs index a6ead932324..a402e861c32 100644 --- a/smart_contract/executor/src/permission.rs +++ b/smart_contract/executor/src/permission.rs @@ -1,15 +1,15 @@ -//! Module with permission tokens and permission related functionality. +//! Module with permission related functionality. use alloc::borrow::ToOwned as _; -use iroha_schema::IntoSchema; +use iroha_schema::{Ident, IntoSchema}; use iroha_smart_contract::QueryOutputCursor; use iroha_smart_contract_utils::debug::DebugExpectExt as _; use serde::{de::DeserializeOwned, Serialize}; use crate::prelude::{Permission as PermissionObject, *}; -/// Is used to check if the permission token is owned by the account. +/// Used to check if the permission token is owned by the account. pub trait Permission: Serialize + DeserializeOwned + IntoSchema + PartialEq + ValidateGrantRevoke { @@ -17,12 +17,8 @@ pub trait Permission: fn is_owned_by(&self, account_id: &AccountId) -> bool; /// Permission id, according to [`IntoSchema`]. - fn id() -> PermissionId { - PermissionId::new( - ::type_name() - .parse() - .dbg_expect("Failed to parse permission id as `Name`"), - ) + fn name() -> Ident { + ::type_name() } } diff --git a/smart_contract/src/lib.rs b/smart_contract/src/lib.rs index 327ac4744d3..f152401f045 100644 --- a/smart_contract/src/lib.rs +++ b/smart_contract/src/lib.rs @@ -9,17 +9,16 @@ use alloc::{boxed::Box, vec::Vec}; #[cfg(not(test))] use data_model::smart_contract::payloads; use data_model::{ - isi::Instruction, + isi::BuiltInInstruction, prelude::*, query::{ - cursor::ForwardCursor, predicate::PredicateBox, sorting::Sorting, Pagination, Query, - QueryOutputBox, + cursor::ForwardCursor, predicate::PredicateBox, sorting::Sorting, IterableQuery, + Pagination, Query, QueryOutputBox, }, BatchedResponse, }; use derive_more::Display; pub use iroha_data_model as data_model; -use iroha_data_model::query::IterableQuery; pub use iroha_smart_contract_derive::main; pub use iroha_smart_contract_utils::{debug, error, info, log, warn}; use iroha_smart_contract_utils::{ @@ -116,7 +115,7 @@ macro_rules! parse { } /// Implementing instructions can be executed on the host -pub trait ExecuteOnHost: Instruction { +pub trait ExecuteOnHost { /// Execute instruction on the host /// /// # Errors @@ -126,7 +125,7 @@ pub trait ExecuteOnHost: Instruction { fn execute(&self) -> Result<(), ValidationFail>; } -impl ExecuteOnHost for I { +impl ExecuteOnHost for I { fn execute(&self) -> Result<(), ValidationFail> { #[cfg(not(test))] use host::execute_instruction as host_execute_instruction; diff --git a/tools/kagami/src/genesis/generate.rs b/tools/kagami/src/genesis/generate.rs index fc4453daa17..1004a9f57d1 100644 --- a/tools/kagami/src/genesis/generate.rs +++ b/tools/kagami/src/genesis/generate.rs @@ -5,12 +5,7 @@ use std::{ use clap::{Parser, Subcommand}; use color_eyre::eyre::WrapErr as _; -use iroha_config::parameters::defaults::chain_wide as chain_wide_defaults; -use iroha_data_model::{ - metadata::Limits, - parameter::{default::*, ParametersBuilder}, - prelude::*, -}; +use iroha_data_model::prelude::*; use iroha_genesis::{GenesisBuilder, RawGenesisTransaction, GENESIS_DOMAIN_ID}; use serde_json::json; use test_samples::{gen_account_in, ALICE_ID, BOB_ID, CARPENTER_ID}; @@ -92,23 +87,20 @@ pub fn generate_default( genesis_public_key: PublicKey, ) -> color_eyre::Result { let genesis_account_id = AccountId::new(GENESIS_DOMAIN_ID.clone(), genesis_public_key); - let mut meta = Metadata::new(); - meta.insert_with_limits("key".parse()?, "value".to_owned(), Limits::new(1024, 1024))?; + let mut meta = Metadata::default(); + meta.insert("key".parse()?, JsonString::new("value")); let mut builder = builder .domain_with_metadata("wonderland".parse()?, meta.clone()) .account_with_metadata(ALICE_ID.signatory().clone(), meta.clone()) .account_with_metadata(BOB_ID.signatory().clone(), meta) - .asset( - "rose".parse()?, - AssetValueType::Numeric(NumericSpec::default()), - ) + .asset("rose".parse()?, AssetType::Numeric(NumericSpec::default())) .finish_domain() .domain("garden_of_live_flowers".parse()?) .account(CARPENTER_ID.signatory().clone()) .asset( "cabbage".parse()?, - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), ) .finish_domain(); @@ -134,7 +126,7 @@ pub fn generate_default( "wonderland".parse()?, ALICE_ID.clone(), ); - let register_user_metadata_access = Register::role( + let register_user_metadata_access: InstructionBox = Register::role( Role::new("ALICE_METADATA_ACCESS".parse()?) .add_permission(Permission::new( "CanSetKeyValueInAccount".parse()?, @@ -147,62 +139,6 @@ pub fn generate_default( ) .into(); - let parameter_defaults = ParametersBuilder::new() - .add_parameter( - MAX_TRANSACTIONS_IN_BLOCK, - Numeric::new(chain_wide_defaults::MAX_TXS.get().into(), 0), - )? - .add_parameter( - BLOCK_TIME, - Numeric::new(chain_wide_defaults::BLOCK_TIME.as_millis(), 0), - )? - .add_parameter( - COMMIT_TIME_LIMIT, - Numeric::new(chain_wide_defaults::COMMIT_TIME.as_millis(), 0), - )? - .add_parameter(TRANSACTION_LIMITS, chain_wide_defaults::TRANSACTION_LIMITS)? - .add_parameter( - WSV_DOMAIN_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ASSET_DEFINITION_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ACCOUNT_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_ASSET_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_TRIGGER_METADATA_LIMITS, - chain_wide_defaults::METADATA_LIMITS, - )? - .add_parameter( - WSV_IDENT_LENGTH_LIMITS, - chain_wide_defaults::IDENT_LENGTH_LIMITS, - )? - .add_parameter( - EXECUTOR_FUEL_LIMIT, - Numeric::new(chain_wide_defaults::WASM_FUEL_LIMIT.into(), 0), - )? - .add_parameter( - EXECUTOR_MAX_MEMORY, - Numeric::new(chain_wide_defaults::WASM_MAX_MEMORY.get().into(), 0), - )? - .add_parameter( - WASM_FUEL_LIMIT, - Numeric::new(chain_wide_defaults::WASM_FUEL_LIMIT.into(), 0), - )? - .add_parameter( - WASM_MAX_MEMORY, - Numeric::new(chain_wide_defaults::WASM_MAX_MEMORY.get().into(), 0), - )? - .into_create_parameters(); - for isi in [ mint.into(), mint_cabbage.into(), @@ -211,7 +147,6 @@ pub fn generate_default( grant_permission_to_set_parameters.into(), ] .into_iter() - .chain(parameter_defaults.into_iter()) .chain(std::iter::once(register_user_metadata_access)) { builder = builder.append_instruction(isi); @@ -250,7 +185,7 @@ fn generate_synthetic( genesis.append_instruction( Register::asset_definition(AssetDefinition::new( asset_definition_id, - AssetValueType::Numeric(NumericSpec::default()), + AssetType::Numeric(NumericSpec::default()), )) .into(), ); diff --git a/tools/parity_scale_cli/samples/account.bin b/tools/parity_scale_cli/samples/account.bin index b9ece2bea12..84168a56491 100644 Binary files a/tools/parity_scale_cli/samples/account.bin and b/tools/parity_scale_cli/samples/account.bin differ diff --git a/tools/parity_scale_cli/samples/domain.bin b/tools/parity_scale_cli/samples/domain.bin index 433aeb0bfe7..6242172300d 100644 Binary files a/tools/parity_scale_cli/samples/domain.bin and b/tools/parity_scale_cli/samples/domain.bin differ diff --git a/tools/parity_scale_cli/samples/trigger.bin b/tools/parity_scale_cli/samples/trigger.bin index c6493efbb4f..d46095a94fd 100644 Binary files a/tools/parity_scale_cli/samples/trigger.bin and b/tools/parity_scale_cli/samples/trigger.bin differ diff --git a/tools/parity_scale_cli/src/main.rs b/tools/parity_scale_cli/src/main.rs index 1ff425768d1..bd162df8b47 100644 --- a/tools/parity_scale_cli/src/main.rs +++ b/tools/parity_scale_cli/src/main.rs @@ -313,13 +313,11 @@ mod tests { #[test] fn decode_account_sample() { - let limits = MetadataLimits::new(256, 256); - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); metadata - .insert_with_limits( + .insert( "hat".parse().expect("Valid"), - "white".parse::().unwrap(), - limits, + "white".parse::().expect("Valid"), ) .expect("Valid"); let account = Account::new(ALICE_ID.clone()).with_metadata(metadata); @@ -329,10 +327,9 @@ mod tests { #[test] fn decode_domain_sample() { - let limits = MetadataLimits::new(256, 256); - let mut metadata = Metadata::new(); + let mut metadata = Metadata::default(); metadata - .insert_with_limits("Is_Jabberwocky_alive".parse().expect("Valid"), true, limits) + .insert("Is_Jabberwocky_alive".parse().expect("Valid"), true) .expect("Valid"); let domain = Domain::new("wonderland".parse().expect("Valid")) .with_logo( diff --git a/tools/swarm/src/lib.rs b/tools/swarm/src/lib.rs index 73f4a885967..4b864f38c1c 100644 --- a/tools/swarm/src/lib.rs +++ b/tools/swarm/src/lib.rs @@ -218,7 +218,7 @@ mod tests { --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " "##]).assert_eq(&build_as_string( nonzero_ext::nonzero!(1u16), @@ -276,7 +276,7 @@ mod tests { --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " "##]).assert_eq(&build_as_string( nonzero_ext::nonzero!(1u16), @@ -329,7 +329,7 @@ mod tests { --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " "#]).assert_eq(&build_as_string( nonzero_ext::nonzero!(1u16), @@ -384,7 +384,7 @@ mod tests { --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad " irohad1: image: hyperledger/iroha:dev diff --git a/tools/swarm/src/schema.rs b/tools/swarm/src/schema.rs index b6304648c67..90278d1fa01 100644 --- a/tools/swarm/src/schema.rs +++ b/tools/swarm/src/schema.rs @@ -202,7 +202,7 @@ impl<'a> GenesisEnv<'a> { Self { base: PeerEnv::new(key_pair, ports, chain, genesis_public_key, topology), genesis_private_key, - genesis_signed_file: ContainerPath(GENESIS_SIGNED_FILE), + genesis_signed_file: ContainerPath(GENESIS), topology: topology.iter().collect(), } } @@ -218,7 +218,7 @@ struct PortMapping(u16, u16); struct ContainerPath<'a>(&'a str); const CONTAINER_CONFIG_DIR: &str = "/config"; -const GENESIS_SIGNED_FILE: &str = "/tmp/genesis.signed.scale"; +const GENESIS: &str = "/tmp/genesis.signed.scale"; /// Mapping between `host:container` paths. #[derive(Copy, Clone, Debug)] @@ -299,7 +299,7 @@ const SIGN_AND_SUBMIT_GENESIS: &str = r#"/bin/sh -c " --private-key $$GENESIS_PRIVATE_KEY \\ --out-file $$GENESIS_SIGNED_FILE \\ && \\ - irohad --submit-genesis + irohad ""#; /// Configuration of the `irohad` service that submits genesis. diff --git a/torii/src/routing.rs b/torii/src/routing.rs index 87c2381673e..3f8dbe997ef 100644 --- a/torii/src/routing.rs +++ b/torii/src/routing.rs @@ -52,7 +52,7 @@ pub async fn handle_transaction( transaction: SignedTransaction, ) -> Result { let state_view = state.view(); - let transaction_limits = state_view.config.transaction_limits; + let transaction_limits = state_view.world().parameters().transaction; let transaction = AcceptedTransaction::accept(transaction, &chain_id, transaction_limits) .map_err(Error::AcceptTransaction)?; queue