diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..9fb23ce69433bf0a487e5b186f3a9ea7a0082880 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,13 @@ +# EditorConfig helps developers define and maintain consistent +# coding styles between different editors and IDEs +# editorconfig.org + +root = true + +[*] +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +indent_style = space +indent_size = 4 diff --git a/.rustfmt.toml b/.rustfmt.toml index 183ce92e0b521dd80e3070fff7cbbfe0841ee691..422da2beabf953cdfcbf5798de76f6a3abcde8df 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -1,3 +1,67 @@ +max_width = 90 # changed +hard_tabs = false +tab_spaces = 4 +newline_style = "Auto" +use_small_heuristics = "Default" +indent_style = "Block" +wrap_comments = false +format_code_in_doc_comments = false +comment_width = 80 +normalize_comments = true # changed +normalize_doc_attributes = false license_template_path = "FILE_HEADER" # changed +format_strings = false +format_macro_matchers = false +format_macro_bodies = true +empty_item_single_line = true +struct_lit_single_line = true +fn_single_line = false +where_single_line = false +imports_indent = "Block" +imports_layout = "Vertical" # changed +imports_granularity = "Crate" # changed +reorder_imports = true +reorder_modules = true +reorder_impl_items = false +type_punctuation_density = "Wide" +space_before_colon = false +space_after_colon = true +spaces_around_ranges = false +binop_separator = "Front" +remove_nested_parens = true +combine_control_expr = false # changed +overflow_delimited_expr = false +struct_field_align_threshold = 0 +enum_discrim_align_threshold = 0 +match_arm_blocks = true +force_multiline_blocks = true # changed +fn_args_layout = "Tall" +brace_style = "SameLineWhere" +control_brace_style = "AlwaysSameLine" +trailing_semicolon = false # changed +trailing_comma = "Vertical" +match_block_trailing_comma = false +blank_lines_upper_bound = 1 +blank_lines_lower_bound = 0 +edition = "2021" # changed +version = "One" +merge_derives = true +use_try_shorthand = true # changed +use_field_init_shorthand = true # changed +force_explicit_abi = true +condense_wildcard_suffixes = false +color = "Auto" +unstable_features = true # changed +disable_all_formatting = false +skip_children = false +hide_parse_errors = false +error_on_line_overflow = false +error_on_unformatted = false report_todo = "Always" report_fixme = "Always" +ignore = [] + +# Below are `rustfmt` internal settings +# +# emit_mode = "Files" +# make_backup = false diff --git a/build.rs b/build.rs index 8a8fcd97a5720ca90ce9a00a0d8383e91c574a35..5a31ad83e78794536fdd4a769db7363ff471649a 100644 --- a/build.rs +++ b/build.rs @@ -19,17 +19,31 @@ use std::{ env, ffi::OsStr, fs::File, - io::{prelude::*, Write}, + io::{ + prelude::*, + Write, + }, iter::Iterator, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, process::Command, }; use anyhow::Result; use walkdir::WalkDir; -use zip::{write::FileOptions, CompressionMethod, ZipWriter}; +use zip::{ + write::FileOptions, + CompressionMethod, + ZipWriter, +}; -use platforms::{TARGET_ARCH, TARGET_ENV, TARGET_OS}; +use platforms::{ + TARGET_ARCH, + TARGET_ENV, + TARGET_OS, +}; use substrate_build_script_utils::rerun_if_git_head_changed; const DEFAULT_UNIX_PERMISSIONS: u32 = 0o755; @@ -59,7 +73,10 @@ fn main() { /// * Creates a zip archive of the `new` project template. /// * Builds the `dylint` driver found in `ink_linting`, the compiled /// driver is put into a zip archive as well. -fn zip_template_and_build_dylint_driver(manifest_dir: PathBuf, out_dir: PathBuf) -> Result<()> { +fn zip_template_and_build_dylint_driver( + manifest_dir: PathBuf, + out_dir: PathBuf, +) -> Result<()> { zip_template(&manifest_dir, &out_dir)?; check_dylint_link_installed()?; @@ -108,7 +125,11 @@ fn zip_template_and_build_dylint_driver(manifest_dir: PathBuf, out_dir: PathBuf) ) })?; - let res = build_and_zip_dylint_driver(ink_dylint_driver_dir, out_dir, dylint_driver_dst_file); + let res = build_and_zip_dylint_driver( + ink_dylint_driver_dir, + out_dir, + dylint_driver_dst_file, + ); // After the build process of `ink_linting` happened we need to name back to the original // `_Cargo.toml` name, otherwise the directory would be "dirty" and `cargo publish` would @@ -295,7 +316,11 @@ fn zip_dir(src_dir: &Path, dst_file: &Path, method: CompressionMethod) -> Result /// /// `dylint` drivers have a file name of the form `libink_linting@toolchain.[so,dll]`. #[cfg(not(feature = "cargo-clippy"))] -fn zip_dylint_driver(src_dir: &Path, dst_file: &Path, method: CompressionMethod) -> Result<()> { +fn zip_dylint_driver( + src_dir: &Path, + dst_file: &Path, + method: CompressionMethod, +) -> Result<()> { if !src_dir.exists() { anyhow::bail!("src_dir '{}' does not exist", src_dir.display()); } @@ -333,7 +358,7 @@ fn zip_dylint_driver(src_dir: &Path, dst_file: &Path, method: CompressionMethod) zip.finish()?; lib_found = true; - break; + break } } diff --git a/metadata/byte_str.rs b/metadata/byte_str.rs index 020c2b2a40a257c7bc82ad2e1b0e513488bd255b..674377414ef6aefc27adf18b8fff9a2198270dbd 100644 --- a/metadata/byte_str.rs +++ b/metadata/byte_str.rs @@ -23,7 +23,7 @@ where { if bytes.is_empty() { // Return empty string without prepended `0x`. - return serializer.serialize_str(""); + return serializer.serialize_str("") } serde_hex::serialize(bytes, serializer) } @@ -56,7 +56,9 @@ where } /// Deserializes the given hex string with optional `0x` prefix. -pub fn deserialize_from_byte_str_array<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error> +pub fn deserialize_from_byte_str_array<'de, D>( + deserializer: D, +) -> Result<[u8; 32], D::Error> where D: serde::Deserializer<'de>, { diff --git a/metadata/lib.rs b/metadata/lib.rs index 990b8339c8951774ed3c5dd8db0241d9326e2e65..6a71554c8067022db7dc9fdc68740ae26e6a5e43 100644 --- a/metadata/lib.rs +++ b/metadata/lib.rs @@ -55,10 +55,22 @@ mod byte_str; use semver::Version; -use serde::{de, Deserialize, Serialize, Serializer}; -use serde_json::{Map, Value}; +use serde::{ + de, + Deserialize, + Serialize, + Serializer, +}; +use serde_json::{ + Map, + Value, +}; use std::{ - fmt::{Display, Formatter, Result as DisplayResult}, + fmt::{ + Display, + Formatter, + Result as DisplayResult, + }, str::FromStr, }; use url::Url; @@ -627,8 +639,10 @@ mod tests { #[test] fn json_with_optional_fields() { let language = SourceLanguage::new(Language::Ink, Version::new(2, 1, 0)); - let compiler = - SourceCompiler::new(Compiler::RustC, Version::parse("1.46.0-nightly").unwrap()); + let compiler = SourceCompiler::new( + Compiler::RustC, + Version::parse("1.46.0-nightly").unwrap(), + ); let wasm = SourceWasm::new(vec![0u8, 1u8, 2u8]); let source = Source::new(Some(wasm), CodeHash([0u8; 32]), language, compiler); let contract = Contract::builder() @@ -709,8 +723,10 @@ mod tests { #[test] fn json_excludes_optional_fields() { let language = SourceLanguage::new(Language::Ink, Version::new(2, 1, 0)); - let compiler = - SourceCompiler::new(Compiler::RustC, Version::parse("1.46.0-nightly").unwrap()); + let compiler = SourceCompiler::new( + Compiler::RustC, + Version::parse("1.46.0-nightly").unwrap(), + ); let source = Source::new(None, CodeHash([0u8; 32]), language, compiler); let contract = Contract::builder() .name("incrementer".to_string()) @@ -759,8 +775,10 @@ mod tests { #[test] fn decoding_works() { let language = SourceLanguage::new(Language::Ink, Version::new(2, 1, 0)); - let compiler = - SourceCompiler::new(Compiler::RustC, Version::parse("1.46.0-nightly").unwrap()); + let compiler = SourceCompiler::new( + Compiler::RustC, + Version::parse("1.46.0-nightly").unwrap(), + ); let wasm = SourceWasm::new(vec![0u8, 1u8, 2u8]); let source = Source::new(Some(wasm), CodeHash([0u8; 32]), language, compiler); let contract = Contract::builder() diff --git a/src/cmd/build.rs b/src/cmd/build.rs index 7cb0d4657e79954940fb51037030c1ceaf2026c1..ccc1d33dc279e8f54afbfdf6b4cb9f9335d90cbe 100644 --- a/src/cmd/build.rs +++ b/src/cmd/build.rs @@ -16,21 +16,49 @@ use crate::{ crate_metadata::CrateMetadata, - maybe_println, util, validate_wasm, - workspace::{Manifest, ManifestPath, Profile, Workspace}, - BuildArtifacts, BuildMode, BuildResult, Network, OptimizationPasses, OptimizationResult, - OutputType, UnstableFlags, UnstableOptions, Verbosity, VerbosityFlags, + maybe_println, + util, + validate_wasm, + workspace::{ + Manifest, + ManifestPath, + Profile, + Workspace, + }, + BuildArtifacts, + BuildMode, + BuildResult, + Network, + OptimizationPasses, + OptimizationResult, + OutputType, + UnstableFlags, + UnstableOptions, + Verbosity, + VerbosityFlags, +}; +use anyhow::{ + Context, + Result, }; -use anyhow::{Context, Result}; use colored::Colorize; -use parity_wasm::elements::{External, Internal, MemoryType, Module, Section}; +use parity_wasm::elements::{ + External, + Internal, + MemoryType, + Module, + Section, +}; use regex::Regex; use semver::Version; use std::{ convert::TryFrom, ffi::OsStr, fs::metadata, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, process::Command, str, }; @@ -417,19 +445,23 @@ fn check_dylint_requirements(_working_dir: Option<&Path>) -> Result<()> { /// /// Iterates over the import section, finds the memory import entry if any and adjusts the maximum /// limit. -fn ensure_maximum_memory_pages(module: &mut Module, maximum_allowed_pages: u32) -> Result<()> { +fn ensure_maximum_memory_pages( + module: &mut Module, + maximum_allowed_pages: u32, +) -> Result<()> { let mem_ty = module .import_section_mut() .and_then(|section| { - section - .entries_mut() - .iter_mut() - .find_map(|entry| match entry.external_mut() { + section.entries_mut().iter_mut().find_map(|entry| { + match entry.external_mut() { External::Memory(ref mut mem_ty) => Some(mem_ty), _ => None, - }) + } + }) }) - .context("Memory import is not found. Is --import-memory specified in the linker args")?; + .context( + "Memory import is not found. Is --import-memory specified in the linker args", + )?; if let Some(requested_maximum) = mem_ty.limits().maximum() { // The module already has maximum, check if it is within the limit bail out. @@ -453,10 +485,12 @@ fn ensure_maximum_memory_pages(module: &mut Module, maximum_allowed_pages: u32) /// Presently all custom sections are not required so they can be stripped safely. /// The name section is already stripped by `wasm-opt`. fn strip_custom_sections(module: &mut Module) { - module.sections_mut().retain(|section| match section { - Section::Reloc(_) => false, - Section::Custom(custom) if custom.name() != "name" => false, - _ => true, + module.sections_mut().retain(|section| { + match section { + Section::Reloc(_) => false, + Section::Custom(custom) if custom.name() != "name" => false, + _ => true, + } }) } @@ -484,8 +518,8 @@ fn load_module>(path: P) -> Result { /// Performs required post-processing steps on the Wasm artifact. fn post_process_wasm(crate_metadata: &CrateMetadata) -> Result<()> { // Deserialize Wasm module from a file. - let mut module = - load_module(&crate_metadata.original_wasm).context("Loading of original wasm failed")?; + let mut module = load_module(&crate_metadata.original_wasm) + .context("Loading of original wasm failed")?; strip_exports(&mut module); ensure_maximum_memory_pages(&mut module, MAX_MEMORY_PAGES)?; @@ -527,7 +561,7 @@ fn optimize_wasm( return Err(anyhow::anyhow!( "Optimization failed, optimized wasm output file `{}` not found.", dest_optimized.display() - )); + )) } let original_size = metadata(&crate_metadata.dest_wasm)?.len() as f64 / 1000.0; @@ -899,15 +933,28 @@ pub(crate) fn execute(args: ExecuteArgs) -> Result { #[cfg(test)] mod tests_ci_only { use super::{ - assert_compatible_ink_dependencies, assert_debug_mode_supported, + assert_compatible_ink_dependencies, + assert_debug_mode_supported, check_wasm_opt_version_compatibility, }; use crate::{ - cmd::{build::load_module, BuildCommand}, - util::tests::{with_new_contract_project, with_tmp_dir}, + cmd::{ + build::load_module, + BuildCommand, + }, + util::tests::{ + with_new_contract_project, + with_tmp_dir, + }, workspace::Manifest, - BuildArtifacts, BuildMode, ManifestPath, OptimizationPasses, OutputType, UnstableOptions, - Verbosity, VerbosityFlags, + BuildArtifacts, + BuildMode, + ManifestPath, + OptimizationPasses, + OutputType, + UnstableOptions, + Verbosity, + VerbosityFlags, }; use semver::Version; #[cfg(unix)] @@ -915,15 +962,22 @@ mod tests_ci_only { use std::{ ffi::OsStr, io::Write, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, }; /// Modifies the `Cargo.toml` under the supplied `cargo_toml_path` by /// setting `optimization-passes` in `[package.metadata.contract]` to `passes`. - fn write_optimization_passes_into_manifest(cargo_toml_path: &Path, passes: OptimizationPasses) { + fn write_optimization_passes_into_manifest( + cargo_toml_path: &Path, + passes: OptimizationPasses, + ) { let manifest_path = ManifestPath::new(cargo_toml_path).expect("manifest path creation failed"); - let mut manifest = Manifest::new(manifest_path.clone()).expect("manifest creation failed"); + let mut manifest = + Manifest::new(manifest_path.clone()).expect("manifest creation failed"); manifest .set_profile_optimization_passes(passes) .expect("setting `optimization-passes` in profile failed"); @@ -1125,7 +1179,8 @@ mod tests_ci_only { // the manifest path // when - let res = assert_compatible_ink_dependencies(&manifest_path, Verbosity::Default); + let res = + assert_compatible_ink_dependencies(&manifest_path, Verbosity::Default); // then assert!(res.is_ok()); @@ -1150,7 +1205,8 @@ mod tests_ci_only { .expect("writing manifest failed"); // when - let res = assert_compatible_ink_dependencies(&manifest_path, Verbosity::Default); + let res = + assert_compatible_ink_dependencies(&manifest_path, Verbosity::Default); // then assert!(res.is_err()); @@ -1214,8 +1270,9 @@ mod tests_ci_only { // this println is here to debug a spuriously failing CI at the following assert. eprintln!("error: {:?}", res); - assert!(format!("{:?}", res) - .starts_with("Err(Your wasm-opt version is 98, but we require a version >= 99.")); + assert!(format!("{:?}", res).starts_with( + "Err(Your wasm-opt version is 98, but we require a version >= 99." + )); Ok(()) }) @@ -1283,21 +1340,26 @@ mod tests_ci_only { #[test] pub fn debug_mode_must_be_compatible() { - let _ = - assert_debug_mode_supported(&Version::parse("3.0.0-rc4").expect("parsing must work")) - .expect("debug mode must be compatible"); - let _ = - assert_debug_mode_supported(&Version::parse("4.0.0-rc1").expect("parsing must work")) - .expect("debug mode must be compatible"); - let _ = assert_debug_mode_supported(&Version::parse("5.0.0").expect("parsing must work")) - .expect("debug mode must be compatible"); + let _ = assert_debug_mode_supported( + &Version::parse("3.0.0-rc4").expect("parsing must work"), + ) + .expect("debug mode must be compatible"); + let _ = assert_debug_mode_supported( + &Version::parse("4.0.0-rc1").expect("parsing must work"), + ) + .expect("debug mode must be compatible"); + let _ = assert_debug_mode_supported( + &Version::parse("5.0.0").expect("parsing must work"), + ) + .expect("debug mode must be compatible"); } #[test] pub fn debug_mode_must_be_incompatible() { - let res = - assert_debug_mode_supported(&Version::parse("3.0.0-rc3").expect("parsing must work")) - .expect_err("assertion must fail"); + let res = assert_debug_mode_supported( + &Version::parse("3.0.0-rc3").expect("parsing must work"), + ) + .expect_err("assertion must fail"); assert_eq!( res.to_string(), "Building the contract in debug mode requires an ink! version newer than `3.0.0-rc3`!" @@ -1353,8 +1415,8 @@ mod tests_ci_only { std::fs::create_dir_all(new_dir_path).expect("creating dir must work"); std::fs::rename(old_lib_path, new_lib_path).expect("moving file must work"); - let mut manifest = - Manifest::new(manifest_path.clone()).expect("creating manifest must work"); + let mut manifest = Manifest::new(manifest_path.clone()) + .expect("creating manifest must work"); manifest .set_lib_path("srcfoo/lib.rs") .expect("setting lib path must work"); diff --git a/src/cmd/decode.rs b/src/cmd/decode.rs index b0e94f7e857c83258aacfe4c2532921b8964d21f..0ec7ebf7c96dbd5cd48e149acc6eb836c05abb32 100644 --- a/src/cmd/decode.rs +++ b/src/cmd/decode.rs @@ -15,11 +15,17 @@ // along with cargo-contract. If not, see . use crate::{ - cmd::extrinsics::{load_metadata, ContractMessageTranscoder}, + cmd::extrinsics::{ + load_metadata, + ContractMessageTranscoder, + }, util::decode_hex, DEFAULT_KEY_COL_WIDTH, }; -use anyhow::{Context, Result}; +use anyhow::{ + Context, + Result, +}; use colored::Colorize as _; #[derive(Debug, Clone, clap::Args)] @@ -47,12 +53,21 @@ impl DecodeCommand { const ERR_MSG: &str = "Failed to decode specified data as a hex value"; let decoded_data = match self.r#type { - DataType::Event => transcoder - .decode_contract_event(&mut &decode_hex(&self.data).context(ERR_MSG)?[..])?, - DataType::Message => transcoder - .decode_contract_message(&mut &decode_hex(&self.data).context(ERR_MSG)?[..])?, - DataType::Constructor => transcoder - .decode_contract_constructor(&mut &decode_hex(&self.data).context(ERR_MSG)?[..])?, + DataType::Event => { + transcoder.decode_contract_event( + &mut &decode_hex(&self.data).context(ERR_MSG)?[..], + )? + } + DataType::Message => { + transcoder.decode_contract_message( + &mut &decode_hex(&self.data).context(ERR_MSG)?[..], + )? + } + DataType::Constructor => { + transcoder.decode_contract_constructor( + &mut &decode_hex(&self.data).context(ERR_MSG)?[..], + )? + } }; println!( diff --git a/src/cmd/extrinsics/call.rs b/src/cmd/extrinsics/call.rs index e6d255509362530f28139c95d1c64b04f9c8d554..cf7ce7cb799b7acfc946209a3641794d1a6cdbcb 100644 --- a/src/cmd/extrinsics/call.rs +++ b/src/cmd/extrinsics/call.rs @@ -15,17 +15,34 @@ // along with cargo-contract. If not, see . use super::{ - display_contract_exec_result, display_events, load_metadata, parse_balance, - wait_for_success_and_handle_error, Balance, ContractMessageTranscoder, ExtrinsicOpts, - PairSigner, RuntimeApi, EXEC_RESULT_MAX_KEY_COL_WIDTH, + display_contract_exec_result, + display_events, + load_metadata, + parse_balance, + wait_for_success_and_handle_error, + Balance, + ContractMessageTranscoder, + ExtrinsicOpts, + PairSigner, + RuntimeApi, + EXEC_RESULT_MAX_KEY_COL_WIDTH, }; use crate::name_value_println; use anyhow::Result; -use jsonrpsee::{core::client::ClientT, rpc_params, ws_client::WsClientBuilder}; +use jsonrpsee::{ + core::client::ClientT, + rpc_params, + ws_client::WsClientBuilder, +}; use serde::Serialize; use sp_core::Bytes; use std::fmt::Debug; -use subxt::{rpc::NumberOrHex, ClientBuilder, Config, DefaultConfig}; +use subxt::{ + rpc::NumberOrHex, + ClientBuilder, + Config, + DefaultConfig, +}; type ContractExecResult = pallet_contracts_primitives::ContractExecResult; @@ -53,7 +70,8 @@ pub struct CallCommand { impl CallCommand { pub fn run(&self) -> Result<()> { - let (_, contract_metadata) = load_metadata(self.extrinsic_opts.manifest_path.as_ref())?; + let (_, contract_metadata) = + load_metadata(self.extrinsic_opts.manifest_path.as_ref())?; let transcoder = ContractMessageTranscoder::new(&contract_metadata); let call_data = transcoder.encode(&self.message, &self.args)?; log::debug!("message data: {:?}", hex::encode(&call_data)); @@ -95,7 +113,8 @@ impl CallCommand { match result.result { Ok(ref ret_val) => { - let value = transcoder.decode_return(&self.message, &mut &ret_val.data.0[..])?; + let value = + transcoder.decode_return(&self.message, &mut &ret_val.data.0[..])?; name_value_println!( "Result", String::from("Success!"), @@ -106,7 +125,11 @@ impl CallCommand { format!("{:?}", ret_val.did_revert()), EXEC_RESULT_MAX_KEY_COL_WIDTH ); - name_value_println!("Data", format!("{}", value), EXEC_RESULT_MAX_KEY_COL_WIDTH); + name_value_println!( + "Data", + format!("{}", value), + EXEC_RESULT_MAX_KEY_COL_WIDTH + ); } Err(err) => { name_value_println!( diff --git a/src/cmd/extrinsics/events.rs b/src/cmd/extrinsics/events.rs index 98031c9bc84087a215bcdbd518794d00eb248dab..4cf9251fa909c476b594f8b392539484ce1d37ea 100644 --- a/src/cmd/extrinsics/events.rs +++ b/src/cmd/extrinsics/events.rs @@ -16,14 +16,27 @@ use super::{ runtime_api::api::contracts::events::ContractEmitted, - transcode::{env_types, ContractMessageTranscoder, TranscoderBuilder}, + transcode::{ + env_types, + ContractMessageTranscoder, + TranscoderBuilder, + }, RuntimeEvent, }; -use crate::{maybe_println, Verbosity, DEFAULT_KEY_COL_WIDTH}; +use crate::{ + maybe_println, + Verbosity, + DEFAULT_KEY_COL_WIDTH, +}; use colored::Colorize as _; use anyhow::Result; -use subxt::{self, DefaultConfig, Event, TransactionEvents}; +use subxt::{ + self, + DefaultConfig, + Event, + TransactionEvents, +}; pub fn display_events( result: &TransactionEvents, @@ -32,7 +45,7 @@ pub fn display_events( verbosity: &Verbosity, ) -> Result<()> { if matches!(verbosity, Verbosity::Quiet) { - return Ok(()); + return Ok(()) } if matches!(verbosity, Verbosity::Verbose) { @@ -50,7 +63,8 @@ pub fn display_events( let event = event?; log::debug!("displaying event {:?}", event); - let event_metadata = subxt_metadata.event(event.pallet_index, event.variant_index)?; + let event_metadata = + subxt_metadata.event(event.pallet_index, event.variant_index)?; let event_fields = event_metadata.variant().fields(); println!( @@ -82,7 +96,8 @@ pub fn display_events( name }); - let decoded_field = events_transcoder.decode(field.ty().id(), event_data)?; + let decoded_field = + events_transcoder.decode(field.ty().id(), event_data)?; maybe_println!( verbosity, "{:width$}{}", diff --git a/src/cmd/extrinsics/instantiate.rs b/src/cmd/extrinsics/instantiate.rs index 27823aaf992ee45e6ab587f007ab485666bbedfb..969bb8c00a88208f2c03598c3b24525f2a3499e5 100644 --- a/src/cmd/extrinsics/instantiate.rs +++ b/src/cmd/extrinsics/instantiate.rs @@ -15,21 +15,53 @@ // along with cargo-contract. If not, see . use super::{ - display_contract_exec_result, display_events, parse_balance, runtime_api::api, - wait_for_success_and_handle_error, Balance, CodeHash, ContractAccount, - ContractMessageTranscoder, ExtrinsicOpts, PairSigner, RuntimeApi, + display_contract_exec_result, + display_events, + parse_balance, + runtime_api::api, + wait_for_success_and_handle_error, + Balance, + CodeHash, + ContractAccount, + ContractMessageTranscoder, + ExtrinsicOpts, + PairSigner, + RuntimeApi, EXEC_RESULT_MAX_KEY_COL_WIDTH, }; -use crate::{name_value_println, util::decode_hex, Verbosity}; -use anyhow::{anyhow, Context, Result}; -use jsonrpsee::{core::client::ClientT, rpc_params, ws_client::WsClientBuilder}; +use crate::{ + name_value_println, + util::decode_hex, + Verbosity, +}; +use anyhow::{ + anyhow, + Context, + Result, +}; +use jsonrpsee::{ + core::client::ClientT, + rpc_params, + ws_client::WsClientBuilder, +}; use serde::Serialize; -use sp_core::{crypto::Ss58Codec, Bytes}; +use sp_core::{ + crypto::Ss58Codec, + Bytes, +}; use std::{ fs, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, +}; +use subxt::{ + rpc::NumberOrHex, + ClientBuilder, + Config, + DefaultConfig, }; -use subxt::{rpc::NumberOrHex, ClientBuilder, Config, DefaultConfig}; type ContractInstantiateResult = pallet_contracts_primitives::ContractInstantiateResult; @@ -106,9 +138,11 @@ impl InstantiateCommand { } let code = match (self.wasm_path.as_ref(), self.code_hash.as_ref()) { - (Some(_), Some(_)) => Err(anyhow!( - "Specify either `--wasm-path` or `--code-hash` but not both" - )), + (Some(_), Some(_)) => { + Err(anyhow!( + "Specify either `--wasm-path` or `--code-hash` but not both" + )) + } (Some(wasm_path), None) => load_code(wasm_path), (None, None) => { // default to the target contract wasm in the current project, @@ -135,7 +169,9 @@ impl InstantiateCommand { transcoder, }; - async_std::task::block_on(async move { exec.exec(code, self.extrinsic_opts.dry_run).await }) + async_std::task::block_on(async move { + exec.exec(code, self.extrinsic_opts.dry_run).await + }) } } @@ -200,12 +236,13 @@ impl<'a> Exec<'a> { } } display_contract_exec_result(&result)?; - return Ok(()); + return Ok(()) } match code { Code::Upload(code) => { - let (code_hash, contract_account) = self.instantiate_with_code(code).await?; + let (code_hash, contract_account) = + self.instantiate_with_code(code).await?; name_value_println!("Code hash", format!("{:?}", code_hash)); name_value_println!("Contract", contract_account.to_ss58check()); } @@ -217,7 +254,10 @@ impl<'a> Exec<'a> { Ok(()) } - async fn instantiate_with_code(&self, code: Bytes) -> Result<(CodeHash, ContractAccount)> { + async fn instantiate_with_code( + &self, + code: Bytes, + ) -> Result<(CodeHash, ContractAccount)> { let api = self.subxt_api().await?; let tx_progress = api .tx() @@ -336,9 +376,9 @@ mod tests { ) .is_ok()); // without 0x prefix - assert!( - parse_code_hash("d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d") - .is_ok() + assert!(parse_code_hash( + "d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d" ) + .is_ok()) } } diff --git a/src/cmd/extrinsics/integration_tests.rs b/src/cmd/extrinsics/integration_tests.rs index df77309815fb6f3353ed52b0ae30d026e8db1aa4..9e01a7ac05f1972041fe696d070b81e1ee5dbd3a 100644 --- a/src/cmd/extrinsics/integration_tests.rs +++ b/src/cmd/extrinsics/integration_tests.rs @@ -16,8 +16,18 @@ use anyhow::Result; use predicates::prelude::*; -use std::{ffi::OsStr, path::Path, process, str, thread, time}; -use subxt::{Client, ClientBuilder}; +use std::{ + ffi::OsStr, + path::Path, + process, + str, + thread, + time, +}; +use subxt::{ + Client, + ClientBuilder, +}; const CONTRACTS_NODE: &str = "substrate-contracts-node"; @@ -68,22 +78,24 @@ impl ContractsNodeProcess { ); let result = ClientBuilder::new().build().await; if let Ok(client) = result { - break Ok(client); + break Ok(client) } if attempts < MAX_ATTEMPTS { attempts += 1; - continue; + continue } if let Err(err) = result { - break Err(err); + break Err(err) } }; match client { - Ok(client) => Ok(Self { - proc, - client, - tmp_dir, - }), + Ok(client) => { + Ok(Self { + proc, + client, + tmp_dir, + }) + } Err(err) => { let err = anyhow::anyhow!( "Failed to connect to node rpc after {} attempts: {}", diff --git a/src/cmd/extrinsics/mod.rs b/src/cmd/extrinsics/mod.rs index 158a7a14ca3e50abfde15de550cf1feb3332ae02..9c4fbbeb17e9c3cb5d52a2227aa12c7c31a135a7 100644 --- a/src/cmd/extrinsics/mod.rs +++ b/src/cmd/extrinsics/mod.rs @@ -25,22 +25,41 @@ mod upload; #[cfg(feature = "integration-tests")] mod integration_tests; -use anyhow::{anyhow, Context, Result}; -use std::{fs::File, path::PathBuf}; +use anyhow::{ + anyhow, + Context, + Result, +}; +use std::{ + fs::File, + path::PathBuf, +}; use self::events::display_events; use crate::{ - crate_metadata::CrateMetadata, name_value_println, workspace::ManifestPath, Verbosity, + crate_metadata::CrateMetadata, + name_value_println, + workspace::ManifestPath, + Verbosity, VerbosityFlags, }; use pallet_contracts_primitives::ContractResult; -use sp_core::{crypto::Pair, sr25519}; -use subxt::{Config, DefaultConfig}; +use sp_core::{ + crypto::Pair, + sr25519, +}; +use subxt::{ + Config, + DefaultConfig, +}; pub use self::transcode::ContractMessageTranscoder; pub use call::CallCommand; pub use instantiate::InstantiateCommand; -pub use runtime_api::api::{DispatchError as RuntimeDispatchError, Event as RuntimeEvent}; +pub use runtime_api::api::{ + DispatchError as RuntimeDispatchError, + Event as RuntimeEvent, +}; pub use upload::UploadCommand; type Balance = u128; @@ -105,16 +124,18 @@ pub fn load_metadata( if !path.exists() { return Err(anyhow!( "Metadata file not found. Try building with `cargo contract build`." - )); + )) } - let file = - File::open(&path).context(format!("Failed to open metadata file {}", path.display()))?; - let metadata: contract_metadata::ContractMetadata = serde_json::from_reader(file).context( - format!("Failed to deserialize metadata file {}", path.display()), - )?; - let ink_metadata = - serde_json::from_value(serde_json::Value::Object(metadata.abi)).context(format!( + let file = File::open(&path) + .context(format!("Failed to open metadata file {}", path.display()))?; + let metadata: contract_metadata::ContractMetadata = serde_json::from_reader(file) + .context(format!( + "Failed to deserialize metadata file {}", + path.display() + ))?; + let ink_metadata = serde_json::from_value(serde_json::Value::Object(metadata.abi)) + .context(format!( "Failed to deserialize ink project metadata from file {}", path.display() ))?; @@ -142,7 +163,9 @@ const STORAGE_DEPOSIT_KEY: &str = "Storage Deposit"; pub const EXEC_RESULT_MAX_KEY_COL_WIDTH: usize = STORAGE_DEPOSIT_KEY.len() + 1; /// Print to stdout the fields of the result of a `instantiate` or `call` dry-run via RPC. -pub fn display_contract_exec_result(result: &ContractResult) -> Result<()> { +pub fn display_contract_exec_result( + result: &ContractResult, +) -> Result<()> { let mut debug_message_lines = std::str::from_utf8(&result.debug_message) .context("Error decoding UTF8 debug message bytes")? .lines(); diff --git a/src/cmd/extrinsics/runtime_api/mod.rs b/src/cmd/extrinsics/runtime_api/mod.rs index 024b30bbb4bafc270fa6dbb5a2b974dc1afaa8af..db48cff3f279893fd68e96e0179bdb8d3f3a8ed5 100644 --- a/src/cmd/extrinsics/runtime_api/mod.rs +++ b/src/cmd/extrinsics/runtime_api/mod.rs @@ -14,5 +14,7 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -#[subxt::subxt(runtime_metadata_path = "src/cmd/extrinsics/runtime_api/contracts_runtime.scale")] +#[subxt::subxt( + runtime_metadata_path = "src/cmd/extrinsics/runtime_api/contracts_runtime.scale" +)] pub mod api {} diff --git a/src/cmd/extrinsics/transcode/decode.rs b/src/cmd/extrinsics/transcode/decode.rs index 2c1291c6bf5e811a246ba79054ad2c826674f806..f22f061c8bec4f36b39645feaea4f657146a43c7 100644 --- a/src/cmd/extrinsics/transcode/decode.rs +++ b/src/cmd/extrinsics/transcode/decode.rs @@ -16,14 +16,34 @@ use super::{ env_types::EnvTypesTranscoder, - scon::{Map, Tuple, Value}, + scon::{ + Map, + Tuple, + Value, + }, CompositeTypeFields, }; -use anyhow::{Context, Result}; -use scale::{Compact, Decode, Input}; +use anyhow::{ + Context, + Result, +}; +use scale::{ + Compact, + Decode, + Input, +}; use scale_info::{ - form::{Form, PortableForm}, - Field, PortableRegistry, Type, TypeDef, TypeDefCompact, TypeDefPrimitive, TypeDefVariant, + form::{ + Form, + PortableForm, + }, + Field, + PortableRegistry, + Type, + TypeDef, + TypeDefCompact, + TypeDefPrimitive, + TypeDefVariant, }; pub struct Decoder<'a> { @@ -32,7 +52,10 @@ pub struct Decoder<'a> { } impl<'a> Decoder<'a> { - pub fn new(registry: &'a PortableRegistry, env_types: &'a EnvTypesTranscoder) -> Self { + pub fn new( + registry: &'a PortableRegistry, + env_types: &'a EnvTypesTranscoder, + ) -> Self { Self { registry, env_types, @@ -40,10 +63,9 @@ impl<'a> Decoder<'a> { } pub fn decode(&self, type_id: u32, input: &mut &[u8]) -> Result { - let ty = self - .registry - .resolve(type_id) - .ok_or_else(|| anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id))?; + let ty = self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id) + })?; log::debug!( "Decoding input with type id `{:?}` and definition `{:?}`", type_id, @@ -65,10 +87,9 @@ impl<'a> Decoder<'a> { input: &mut &[u8], ) -> Result { let type_id = ty.id(); - let ty = self - .registry - .resolve(type_id) - .ok_or_else(|| anyhow::anyhow!("Failed to find type with id '{}'", type_id))?; + let ty = self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!("Failed to find type with id '{}'", type_id) + })?; if *ty.type_def() == TypeDef::Primitive(TypeDefPrimitive::U8) { let mut bytes = vec![0u8; len]; @@ -84,7 +105,12 @@ impl<'a> Decoder<'a> { } } - fn decode_type(&self, id: u32, ty: &Type, input: &mut &[u8]) -> Result { + fn decode_type( + &self, + id: u32, + ty: &Type, + input: &mut &[u8], + ) -> Result { match ty.type_def() { TypeDef::Composite(composite) => { let ident = ty.path().segments().last().map(|s| s.as_str()); @@ -111,7 +137,9 @@ impl<'a> Decoder<'a> { } TypeDef::Primitive(primitive) => self.decode_primitive(primitive, input), TypeDef::Compact(compact) => self.decode_compact(compact, input), - TypeDef::BitSequence(_) => Err(anyhow::anyhow!("bitvec decoding not yet supported")), + TypeDef::BitSequence(_) => { + Err(anyhow::anyhow!("bitvec decoding not yet supported")) + } } .context(format!("Error decoding type {}: {}", id, ty.path())) } @@ -144,7 +172,9 @@ impl<'a> Decoder<'a> { tuple.into_iter().collect::>(), ))) } - CompositeTypeFields::NoFields => Ok(Value::Tuple(Tuple::new(ident, Vec::new()))), + CompositeTypeFields::NoFields => { + Ok(Value::Tuple(Tuple::new(ident, Vec::new()))) + } } } @@ -185,23 +215,33 @@ impl<'a> Decoder<'a> { } } - fn decode_primitive(&self, primitive: &TypeDefPrimitive, input: &mut &[u8]) -> Result { + fn decode_primitive( + &self, + primitive: &TypeDefPrimitive, + input: &mut &[u8], + ) -> Result { match primitive { TypeDefPrimitive::Bool => Ok(Value::Bool(bool::decode(input)?)), - TypeDefPrimitive::Char => Err(anyhow::anyhow!("scale codec not implemented for char")), + TypeDefPrimitive::Char => { + Err(anyhow::anyhow!("scale codec not implemented for char")) + } TypeDefPrimitive::Str => Ok(Value::String(String::decode(input)?)), TypeDefPrimitive::U8 => decode_uint::(input), TypeDefPrimitive::U16 => decode_uint::(input), TypeDefPrimitive::U32 => decode_uint::(input), TypeDefPrimitive::U64 => decode_uint::(input), TypeDefPrimitive::U128 => decode_uint::(input), - TypeDefPrimitive::U256 => Err(anyhow::anyhow!("U256 currently not supported")), + TypeDefPrimitive::U256 => { + Err(anyhow::anyhow!("U256 currently not supported")) + } TypeDefPrimitive::I8 => decode_int::(input), TypeDefPrimitive::I16 => decode_int::(input), TypeDefPrimitive::I32 => decode_int::(input), TypeDefPrimitive::I64 => decode_int::(input), TypeDefPrimitive::I128 => decode_int::(input), - TypeDefPrimitive::I256 => Err(anyhow::anyhow!("I256 currently not supported")), + TypeDefPrimitive::I256 => { + Err(anyhow::anyhow!("I256 currently not supported")) + } } } @@ -210,55 +250,87 @@ impl<'a> Decoder<'a> { compact: &TypeDefCompact, input: &mut &[u8], ) -> Result { - let mut decode_compact_primitive = |primitive: &TypeDefPrimitive| match primitive { - TypeDefPrimitive::U8 => Ok(Value::UInt(Compact::::decode(input)?.0.into())), - TypeDefPrimitive::U16 => Ok(Value::UInt(Compact::::decode(input)?.0.into())), - TypeDefPrimitive::U32 => Ok(Value::UInt(Compact::::decode(input)?.0.into())), - TypeDefPrimitive::U64 => Ok(Value::UInt(Compact::::decode(input)?.0.into())), - TypeDefPrimitive::U128 => Ok(Value::UInt(Compact::::decode(input)?.into())), - prim => Err(anyhow::anyhow!( - "{:?} not supported. Expected unsigned int primitive.", - prim - )), + let mut decode_compact_primitive = |primitive: &TypeDefPrimitive| { + match primitive { + TypeDefPrimitive::U8 => { + Ok(Value::UInt(Compact::::decode(input)?.0.into())) + } + TypeDefPrimitive::U16 => { + Ok(Value::UInt(Compact::::decode(input)?.0.into())) + } + TypeDefPrimitive::U32 => { + Ok(Value::UInt(Compact::::decode(input)?.0.into())) + } + TypeDefPrimitive::U64 => { + Ok(Value::UInt(Compact::::decode(input)?.0.into())) + } + TypeDefPrimitive::U128 => { + Ok(Value::UInt(Compact::::decode(input)?.into())) + } + prim => { + Err(anyhow::anyhow!( + "{:?} not supported. Expected unsigned int primitive.", + prim + )) + } + } }; let type_id = compact.type_param().id(); - let ty = self - .registry - .resolve(type_id) - .ok_or_else(|| anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id))?; + let ty = self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id) + })?; match ty.type_def() { TypeDef::Primitive(primitive) => decode_compact_primitive(primitive), - TypeDef::Composite(composite) => match composite.fields() { - [field] => { - let type_id = field.ty().id(); - let field_ty = self.registry.resolve(type_id).ok_or_else(|| { - anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id) - })?; - if let TypeDef::Primitive(primitive) = field_ty.type_def() { - let struct_ident = ty.path().segments().last().map(|s| s.as_str()); - let field_value = decode_compact_primitive(primitive)?; - let compact_composite = match field.name() { - Some(name) => Value::Map(Map::new( - struct_ident, - vec![(Value::String(name.to_string()), field_value)] - .into_iter() - .collect(), - )), - None => Value::Tuple(Tuple::new(struct_ident, vec![field_value])), - }; - Ok(compact_composite) - } else { - Err(anyhow::anyhow!( - "Composite type must have a single primitive field" - )) + TypeDef::Composite(composite) => { + match composite.fields() { + [field] => { + let type_id = field.ty().id(); + let field_ty = + self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!( + "Failed to resolve type with id `{:?}`", + type_id + ) + })?; + if let TypeDef::Primitive(primitive) = field_ty.type_def() { + let struct_ident = + ty.path().segments().last().map(|s| s.as_str()); + let field_value = decode_compact_primitive(primitive)?; + let compact_composite = match field.name() { + Some(name) => { + Value::Map(Map::new( + struct_ident, + vec![( + Value::String(name.to_string()), + field_value, + )] + .into_iter() + .collect(), + )) + } + None => { + Value::Tuple(Tuple::new( + struct_ident, + vec![field_value], + )) + } + }; + Ok(compact_composite) + } else { + Err(anyhow::anyhow!( + "Composite type must have a single primitive field" + )) + } } + _ => Err(anyhow::anyhow!("Composite type must have a single field")), } - _ => Err(anyhow::anyhow!("Composite type must have a single field")), - }, - _ => Err(anyhow::anyhow!( - "Compact type must be a primitive or a composite type" - )), + } + _ => { + Err(anyhow::anyhow!( + "Compact type must be a primitive or a composite type" + )) + } } } } diff --git a/src/cmd/extrinsics/transcode/encode.rs b/src/cmd/extrinsics/transcode/encode.rs index 3dab351f5ff757ea97bb3f9d1075fa69771c8a57..13e6782794a915caa39f2db5e339783aeb7c069f 100644 --- a/src/cmd/extrinsics/transcode/encode.rs +++ b/src/cmd/extrinsics/transcode/encode.rs @@ -14,17 +14,36 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use super::{env_types::EnvTypesTranscoder, scon::Value, CompositeTypeFields}; +use super::{ + env_types::EnvTypesTranscoder, + scon::Value, + CompositeTypeFields, +}; use anyhow::Result; use itertools::Itertools; -use scale::{Compact, Encode, Output}; +use scale::{ + Compact, + Encode, + Output, +}; use scale_info::{ - form::{Form, PortableForm}, - Field, PortableRegistry, TypeDef, TypeDefCompact, TypeDefPrimitive, TypeDefTuple, + form::{ + Form, + PortableForm, + }, + Field, + PortableRegistry, + TypeDef, + TypeDefCompact, + TypeDefPrimitive, + TypeDefTuple, TypeDefVariant, }; use std::{ - convert::{TryFrom, TryInto}, + convert::{ + TryFrom, + TryInto, + }, error::Error, fmt::Debug, str::FromStr, @@ -36,7 +55,10 @@ pub struct Encoder<'a> { } impl<'a> Encoder<'a> { - pub fn new(registry: &'a PortableRegistry, env_types: &'a EnvTypesTranscoder) -> Self { + pub fn new( + registry: &'a PortableRegistry, + env_types: &'a EnvTypesTranscoder, + ) -> Self { Self { registry, env_types, @@ -47,10 +69,9 @@ impl<'a> Encoder<'a> { where O: Output + Debug, { - let ty = self - .registry - .resolve(type_id) - .ok_or_else(|| anyhow::anyhow!("Failed to resolve type with id '{:?}'", type_id))?; + let ty = self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!("Failed to resolve type with id '{:?}'", type_id) + })?; log::debug!( "Encoding value `{:?}` with type id `{:?}` and definition `{:?}`", @@ -60,7 +81,9 @@ impl<'a> Encoder<'a> { ); if !self.env_types.try_encode(type_id, value, output)? { self.encode_type(ty.type_def(), value, output) - .map_err(|e| anyhow::anyhow!("Error encoding value for {:?}: {}", ty, e))? + .map_err(|e| { + anyhow::anyhow!("Error encoding value for {:?}: {}", ty, e) + })? } Ok(()) } @@ -76,14 +99,20 @@ impl<'a> Encoder<'a> { self.encode_composite(composite.fields(), value, output) } TypeDef::Variant(variant) => self.encode_variant_type(variant, value, output), - TypeDef::Array(array) => self.encode_seq(array.type_param(), value, false, output), + TypeDef::Array(array) => { + self.encode_seq(array.type_param(), value, false, output) + } TypeDef::Tuple(tuple) => self.encode_tuple(tuple, value, output), TypeDef::Sequence(sequence) => { self.encode_seq(sequence.type_param(), value, true, output) } - TypeDef::Primitive(primitive) => self.encode_primitive(primitive, value, output), + TypeDef::Primitive(primitive) => { + self.encode_primitive(primitive, value, output) + } TypeDef::Compact(compact) => self.encode_compact(compact, value, output), - TypeDef::BitSequence(_) => Err(anyhow::anyhow!("bitvec encoding not yet supported")), + TypeDef::BitSequence(_) => { + Err(anyhow::anyhow!("bitvec encoding not yet supported")) + } } } @@ -96,40 +125,50 @@ impl<'a> Encoder<'a> { let struct_type = CompositeTypeFields::from_fields(fields)?; match value { - Value::Map(map) => match struct_type { - CompositeTypeFields::Unnamed(fields) => { - for (field, value) in fields.iter().zip(map.values()) { - self.encode(field.ty().id(), value, output)?; + Value::Map(map) => { + match struct_type { + CompositeTypeFields::Unnamed(fields) => { + for (field, value) in fields.iter().zip(map.values()) { + self.encode(field.ty().id(), value, output)?; + } + Ok(()) } - Ok(()) - } - CompositeTypeFields::NoFields => Ok(()), - CompositeTypeFields::Named(named_fields) => { - for named_field in named_fields { - let field_name = named_field.name(); - let value = map.get_by_str(field_name).ok_or_else(|| { - anyhow::anyhow!("Missing a field named `{}`", field_name) - })?; - self.encode(named_field.field().ty().id(), value, output) - .map_err(|e| { - anyhow::anyhow!("Error encoding field `{}`: {}", field_name, e) + CompositeTypeFields::NoFields => Ok(()), + CompositeTypeFields::Named(named_fields) => { + for named_field in named_fields { + let field_name = named_field.name(); + let value = map.get_by_str(field_name).ok_or_else(|| { + anyhow::anyhow!("Missing a field named `{}`", field_name) })?; + self.encode(named_field.field().ty().id(), value, output) + .map_err(|e| { + anyhow::anyhow!( + "Error encoding field `{}`: {}", + field_name, + e + ) + })?; + } + Ok(()) } - Ok(()) } - }, - Value::Tuple(tuple) => match struct_type { - CompositeTypeFields::Unnamed(fields) => { - for (field, value) in fields.iter().zip(tuple.values()) { - self.encode(field.ty().id(), value, output)?; + } + Value::Tuple(tuple) => { + match struct_type { + CompositeTypeFields::Unnamed(fields) => { + for (field, value) in fields.iter().zip(tuple.values()) { + self.encode(field.ty().id(), value, output)?; + } + Ok(()) + } + CompositeTypeFields::NoFields => Ok(()), + CompositeTypeFields::Named(_) => { + return Err(anyhow::anyhow!( + "Type is a struct requiring named fields" + )) } - Ok(()) - } - CompositeTypeFields::NoFields => Ok(()), - CompositeTypeFields::Named(_) => { - return Err(anyhow::anyhow!("Type is a struct requiring named fields")) } - }, + } v => { if let Ok(single_field) = fields.iter().exactly_one() { self.encode(single_field.ty().id(), value, output) @@ -176,12 +215,16 @@ impl<'a> Encoder<'a> { output: &mut O, ) -> Result<()> { let variant_ident = match value { - Value::Map(map) => map - .ident() - .ok_or_else(|| anyhow::anyhow!("Missing enum variant identifier for map")), - Value::Tuple(tuple) => tuple - .ident() - .ok_or_else(|| anyhow::anyhow!("Missing enum variant identifier for tuple")), + Value::Map(map) => { + map.ident().ok_or_else(|| { + anyhow::anyhow!("Missing enum variant identifier for map") + }) + } + Value::Tuple(tuple) => { + tuple.ident().ok_or_else(|| { + anyhow::anyhow!("Missing enum variant identifier for tuple") + }) + } v => Err(anyhow::anyhow!("Invalid enum variant value '{:?}'", v)), }?; @@ -206,10 +249,9 @@ impl<'a> Encoder<'a> { encode_len: bool, output: &mut O, ) -> Result<()> { - let ty = self - .registry - .resolve(ty.id()) - .ok_or_else(|| anyhow::anyhow!("Failed to find type with id '{}'", ty.id()))?; + let ty = self.registry.resolve(ty.id()).ok_or_else(|| { + anyhow::anyhow!("Failed to find type with id '{}'", ty.id()) + })?; match value { Value::Seq(values) => { if encode_len { @@ -227,7 +269,9 @@ impl<'a> Encoder<'a> { output.push_byte(*byte); } } - value => return Err(anyhow::anyhow!("{:?} cannot be encoded as an array", value)), + value => { + return Err(anyhow::anyhow!("{:?} cannot be encoded as an array", value)) + } } Ok(()) } @@ -247,7 +291,9 @@ impl<'a> Encoder<'a> { Err(anyhow::anyhow!("Expected a bool value")) } } - TypeDefPrimitive::Char => Err(anyhow::anyhow!("scale codec not implemented for char")), + TypeDefPrimitive::Char => { + Err(anyhow::anyhow!("scale codec not implemented for char")) + } TypeDefPrimitive::Str => { if let Value::String(s) = value { s.encode_to(output); @@ -261,13 +307,17 @@ impl<'a> Encoder<'a> { TypeDefPrimitive::U32 => encode_uint::(value, "u32", output), TypeDefPrimitive::U64 => encode_uint::(value, "u64", output), TypeDefPrimitive::U128 => encode_uint::(value, "u128", output), - TypeDefPrimitive::U256 => Err(anyhow::anyhow!("U256 currently not supported")), + TypeDefPrimitive::U256 => { + Err(anyhow::anyhow!("U256 currently not supported")) + } TypeDefPrimitive::I8 => encode_int::(value, "i8", output), TypeDefPrimitive::I16 => encode_int::(value, "i16", output), TypeDefPrimitive::I32 => encode_int::(value, "i32", output), TypeDefPrimitive::I64 => encode_int::(value, "i64", output), TypeDefPrimitive::I128 => encode_int::(value, "i128", output), - TypeDefPrimitive::I256 => Err(anyhow::anyhow!("I256 currently not supported")), + TypeDefPrimitive::I256 => { + Err(anyhow::anyhow!("I256 currently not supported")) + } } } @@ -278,36 +328,40 @@ impl<'a> Encoder<'a> { output: &mut O, ) -> Result<()> { let mut encode_compact_primitive = - |primitive: &TypeDefPrimitive, value: &Value| match primitive { - TypeDefPrimitive::U8 => { - let uint = uint_from_value::(value, "u8")?; - Compact(uint).encode_to(output); - Ok(()) - } - TypeDefPrimitive::U16 => { - let uint = uint_from_value::(value, "u16")?; - Compact(uint).encode_to(output); - Ok(()) - } - TypeDefPrimitive::U32 => { - let uint = uint_from_value::(value, "u32")?; - Compact(uint).encode_to(output); - Ok(()) - } - TypeDefPrimitive::U64 => { - let uint = uint_from_value::(value, "u64")?; - Compact(uint).encode_to(output); - Ok(()) - } - TypeDefPrimitive::U128 => { - let uint = uint_from_value::(value, "u128")?; - Compact(uint).encode_to(output); - Ok(()) + |primitive: &TypeDefPrimitive, value: &Value| { + match primitive { + TypeDefPrimitive::U8 => { + let uint = uint_from_value::(value, "u8")?; + Compact(uint).encode_to(output); + Ok(()) + } + TypeDefPrimitive::U16 => { + let uint = uint_from_value::(value, "u16")?; + Compact(uint).encode_to(output); + Ok(()) + } + TypeDefPrimitive::U32 => { + let uint = uint_from_value::(value, "u32")?; + Compact(uint).encode_to(output); + Ok(()) + } + TypeDefPrimitive::U64 => { + let uint = uint_from_value::(value, "u64")?; + Compact(uint).encode_to(output); + Ok(()) + } + TypeDefPrimitive::U128 => { + let uint = uint_from_value::(value, "u128")?; + Compact(uint).encode_to(output); + Ok(()) + } + _ => { + Err(anyhow::anyhow!( + "Compact encoding not supported for {:?}", + primitive + )) + } } - _ => Err(anyhow::anyhow!( - "Compact encoding not supported for {:?}", - primitive - )), }; let ty = self @@ -321,14 +375,19 @@ impl<'a> Encoder<'a> { })?; match ty.type_def() { TypeDef::Primitive(primitive) => encode_compact_primitive(primitive, value), - TypeDef::Composite(composite) => match composite.fields() { - [field] => { - let type_id = field.ty().id(); - let field_ty = self.registry.resolve(type_id).ok_or_else(|| { - anyhow::anyhow!("Failed to resolve type with id `{:?}`", type_id) - })?; - if let TypeDef::Primitive(primitive) = field_ty.type_def() { - let field_values: Vec<_> = match value { + TypeDef::Composite(composite) => { + match composite.fields() { + [field] => { + let type_id = field.ty().id(); + let field_ty = + self.registry.resolve(type_id).ok_or_else(|| { + anyhow::anyhow!( + "Failed to resolve type with id `{:?}`", + type_id + ) + })?; + if let TypeDef::Primitive(primitive) = field_ty.type_def() { + let field_values: Vec<_> = match value { Value::Map(map) => Ok(map.values().collect()), Value::Tuple(tuple) => Ok(tuple.values().collect()), x => Err(anyhow::anyhow!( @@ -336,25 +395,28 @@ impl<'a> Encoder<'a> { x )), }?; - if field_values.len() == 1 { - let field_value = field_values[0]; - encode_compact_primitive(primitive, field_value) + if field_values.len() == 1 { + let field_value = field_values[0]; + encode_compact_primitive(primitive, field_value) + } else { + Err(anyhow::anyhow!( + "Compact composite value must have a single field" + )) + } } else { Err(anyhow::anyhow!( - "Compact composite value must have a single field" + "Composite type must have a single primitive field" )) } - } else { - Err(anyhow::anyhow!( - "Composite type must have a single primitive field" - )) } + _ => Err(anyhow::anyhow!("Composite type must have a single field")), } - _ => Err(anyhow::anyhow!("Composite type must have a single field")), - }, - _ => Err(anyhow::anyhow!( - "Compact type must be a primitive or a composite type" - )), + } + _ => { + Err(anyhow::anyhow!( + "Compact type must be a primitive or a composite type" + )) + } } } } @@ -375,11 +437,13 @@ where let uint = T::from_str(&sanitized)?; Ok(uint) } - _ => Err(anyhow::anyhow!( - "Expected a {} or a String value, got {}", - expected, - value - )), + _ => { + Err(anyhow::anyhow!( + "Expected a {} or a String value, got {}", + expected, + value + )) + } } } @@ -417,11 +481,13 @@ where let i = T::from_str(&sanitized)?; Ok(i) } - _ => Err(anyhow::anyhow!( - "Expected a {} or a String value, got {}", - expected, - value - )), + _ => { + Err(anyhow::anyhow!( + "Expected a {} or a String value, got {}", + expected, + value + )) + } }?; int.encode_to(output); Ok(()) diff --git a/src/cmd/extrinsics/transcode/env_types.rs b/src/cmd/extrinsics/transcode/env_types.rs index 9989c7c0070a029e4d818d57a7e2ab2696486895..df4198c2620b069c4a4d51b9d86d0f1eb1c0e03e 100644 --- a/src/cmd/extrinsics/transcode/env_types.rs +++ b/src/cmd/extrinsics/transcode/env_types.rs @@ -16,10 +16,27 @@ use super::scon::Value; use anyhow::Result; -use scale::{Decode, Encode, Output}; -use scale_info::{form::PortableForm, IntoPortable, Path, TypeInfo}; -use sp_core::crypto::{AccountId32, Ss58Codec}; -use std::{boxed::Box, collections::HashMap, convert::TryFrom, str::FromStr}; +use scale::{ + Decode, + Encode, + Output, +}; +use scale_info::{ + form::PortableForm, + IntoPortable, + Path, + TypeInfo, +}; +use sp_core::crypto::{ + AccountId32, + Ss58Codec, +}; +use std::{ + boxed::Box, + collections::HashMap, + convert::TryFrom, + str::FromStr, +}; /// Provides custom encoding and decoding for predefined environment types. #[derive(Default)] @@ -39,7 +56,12 @@ impl EnvTypesTranscoder { /// # Errors /// /// - If the custom encoding fails. - pub fn try_encode(&self, type_id: u32, value: &Value, output: &mut O) -> Result + pub fn try_encode( + &self, + type_id: u32, + value: &Value, + output: &mut O, + ) -> Result where O: Output, { @@ -116,15 +138,29 @@ impl CustomTypeTranscoder for AccountId { } fn encode_value(&self, value: &Value) -> Result> { let account_id = match value { - Value::Literal(literal) => AccountId32::from_str(literal).map_err(|e| { - anyhow::anyhow!("Error parsing AccountId from literal `{}`: {}", literal, e) - })?, - Value::String(string) => AccountId32::from_str(string).map_err(|e| { - anyhow::anyhow!("Error parsing AccountId from string '{}': {}", string, e) - })?, - Value::Bytes(bytes) => AccountId32::try_from(bytes.bytes()).map_err(|_| { - anyhow::anyhow!("Error converting bytes `{:?}` to AccountId", bytes) - })?, + Value::Literal(literal) => { + AccountId32::from_str(literal).map_err(|e| { + anyhow::anyhow!( + "Error parsing AccountId from literal `{}`: {}", + literal, + e + ) + })? + } + Value::String(string) => { + AccountId32::from_str(string).map_err(|e| { + anyhow::anyhow!( + "Error parsing AccountId from string '{}': {}", + string, + e + ) + })? + } + Value::Bytes(bytes) => { + AccountId32::try_from(bytes.bytes()).map_err(|_| { + anyhow::anyhow!("Error converting bytes `{:?}` to AccountId", bytes) + })? + } _ => { return Err(anyhow::anyhow!( "Expected a string or a literal for an AccountId" diff --git a/src/cmd/extrinsics/transcode/mod.rs b/src/cmd/extrinsics/transcode/mod.rs index 421b2e585b404fb9c5ec406adc23b5d35fde3732..898575c2e595fc47cc67bb6e3cb07e25f3e53a67 100644 --- a/src/cmd/extrinsics/transcode/mod.rs +++ b/src/cmd/extrinsics/transcode/mod.rs @@ -88,15 +88,32 @@ mod scon; mod transcoder; pub use self::{ - scon::{Map, Value}, - transcoder::{Transcoder, TranscoderBuilder}, + scon::{ + Map, + Value, + }, + transcoder::{ + Transcoder, + TranscoderBuilder, + }, }; use anyhow::Result; -use ink_metadata::{ConstructorSpec, InkProject, MessageSpec}; -use scale::{Compact, Decode, Input}; +use ink_metadata::{ + ConstructorSpec, + InkProject, + MessageSpec, +}; +use scale::{ + Compact, + Decode, + Input, +}; use scale_info::{ - form::{Form, PortableForm}, + form::{ + Form, + PortableForm, + }, Field, }; use std::fmt::Debug; @@ -132,9 +149,9 @@ impl<'a> ContractMessageTranscoder<'a> { (None, Some(m)) => (m.selector(), m.args()), (Some(_), Some(_)) => { return Err(anyhow::anyhow!( - "Invalid metadata: both a constructor and message found with name '{}'", - name - )) + "Invalid metadata: both a constructor and message found with name '{}'", + name + )) } (None, None) => { return Err(anyhow::anyhow!( @@ -166,7 +183,10 @@ impl<'a> ContractMessageTranscoder<'a> { .find(|msg| msg.label().contains(&name.to_string())) } - fn find_constructor_spec(&self, name: &str) -> Option<&ConstructorSpec> { + fn find_constructor_spec( + &self, + name: &str, + ) -> Option<&ConstructorSpec> { self.constructors() .find(|msg| msg.label().contains(&name.to_string())) } @@ -257,9 +277,9 @@ impl<'a> ContractMessageTranscoder<'a> { } pub fn decode_return(&self, name: &str, data: &mut &[u8]) -> Result { - let msg_spec = self - .find_message_spec(name) - .ok_or_else(|| anyhow::anyhow!("Failed to find message spec with name '{}'", name))?; + let msg_spec = self.find_message_spec(name).ok_or_else(|| { + anyhow::anyhow!("Failed to find message spec with name '{}'", name) + })?; if let Some(return_ty) = msg_spec.return_type().opt_type() { self.transcoder.decode(return_ty.ty().id(), data) } else { @@ -298,12 +318,14 @@ impl CompositeTypeFields { } else if fields.iter().all(|f| f.name().is_some()) { let fields = fields .iter() - .map(|field| CompositeTypeNamedField { - name: field - .name() - .expect("All fields have a name; qed") - .to_owned(), - field: field.clone(), + .map(|field| { + CompositeTypeNamedField { + name: field + .name() + .expect("All fields have a name; qed") + .to_owned(), + field: field.clone(), + } }) .collect(); Ok(Self::Named(fields)) diff --git a/src/cmd/extrinsics/transcode/scon/display.rs b/src/cmd/extrinsics/transcode/scon/display.rs index 1f35733d596c7fbd8aabd7d99a9ae073307d29f5..8ccaf8e39ef4a55fe19bc9e3463516ee9fbf15b3 100644 --- a/src/cmd/extrinsics/transcode/scon/display.rs +++ b/src/cmd/extrinsics/transcode/scon/display.rs @@ -14,8 +14,20 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use super::{Bytes, Map, Seq, Tuple, Value}; -use std::fmt::{Debug, Display, Formatter, LowerHex, Result}; +use super::{ + Bytes, + Map, + Seq, + Tuple, + Value, +}; +use std::fmt::{ + Debug, + Display, + Formatter, + LowerHex, + Result, +}; /// Wraps Value for custom Debug impl to provide pretty-printed Display struct DisplayValue<'a>(&'a Value); diff --git a/src/cmd/extrinsics/transcode/scon/mod.rs b/src/cmd/extrinsics/transcode/scon/mod.rs index 5d4acebd4cfedd59435b574241feb0f95b40fd72..66b45ca258cc9018f4bc986c0d99b3b45e1b0bff 100644 --- a/src/cmd/extrinsics/transcode/scon/mod.rs +++ b/src/cmd/extrinsics/transcode/scon/mod.rs @@ -22,10 +22,19 @@ mod parse; use indexmap::IndexMap; use std::{ - cmp::{Eq, Ordering}, - hash::{Hash, Hasher}, + cmp::{ + Eq, + Ordering, + }, + hash::{ + Hash, + Hasher, + }, iter::FromIterator, - ops::{Index, IndexMut}, + ops::{ + Index, + IndexMut, + }, }; pub use self::parse::parse_value; @@ -83,7 +92,7 @@ impl Ord for Map { impl PartialEq for Map { fn eq(&self, other: &Map) -> bool { if self.map.len() != other.map.len() { - return false; + return false } self.iter().zip(other.iter()).all(|(a, b)| a == b) } diff --git a/src/cmd/extrinsics/transcode/scon/parse.rs b/src/cmd/extrinsics/transcode/scon/parse.rs index 173fba18d7e74320d9a9053985b55e366e79efc2..2e6ae484258d25ddb6e2d83f6a2ba909b3ae236a 100644 --- a/src/cmd/extrinsics/transcode/scon/parse.rs +++ b/src/cmd/extrinsics/transcode/scon/parse.rs @@ -14,22 +14,50 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use super::{Bytes, Map, Tuple, Value}; +use super::{ + Bytes, + Map, + Tuple, + Value, +}; use escape8259::unescape; use nom::{ branch::alt, - bytes::complete::{tag, take_while1}, - character::complete::{alphanumeric1, anychar, char, digit1, hex_digit1, multispace0}, - multi::{many0, separated_list0}, - sequence::{delimited, pair, separated_pair, tuple}, - AsChar, IResult, Parser, + bytes::complete::{ + tag, + take_while1, + }, + character::complete::{ + alphanumeric1, + anychar, + char, + digit1, + hex_digit1, + multispace0, + }, + multi::{ + many0, + separated_list0, + }, + sequence::{ + delimited, + pair, + separated_pair, + tuple, + }, + AsChar, + IResult, + Parser, +}; +use nom_supreme::{ + error::ErrorTree, + ParserExt, }; -use nom_supreme::{error::ErrorTree, ParserExt}; /// Attempt to parse a SCON value pub fn parse_value(input: &str) -> anyhow::Result { - let (_, value) = - scon_value(input).map_err(|err| anyhow::anyhow!("Error parsing Value: {}", err))?; + let (_, value) = scon_value(input) + .map_err(|err| anyhow::anyhow!("Error parsing Value: {}", err))?; Ok(value) } @@ -367,7 +395,9 @@ mod tests { scon_value(trailing).unwrap(), ( "", - Value::Seq(vec![Value::String("a".into()), Value::String("b".into())].into()) + Value::Seq( + vec![Value::String("a".into()), Value::String("b".into())].into() + ) ) ); } diff --git a/src/cmd/extrinsics/transcode/transcoder.rs b/src/cmd/extrinsics/transcode/transcoder.rs index 2bd8570a5dc48b5885eaa819c0a9e2def749995d..00e8d35c140255d02c64b1b63ab308fa0eba0481 100644 --- a/src/cmd/extrinsics/transcode/transcoder.rs +++ b/src/cmd/extrinsics/transcode/transcoder.rs @@ -17,14 +17,25 @@ use super::{ decode::Decoder, encode::Encoder, - env_types::{CustomTypeTranscoder, EnvTypesTranscoder, PathKey, TypesByPath}, + env_types::{ + CustomTypeTranscoder, + EnvTypesTranscoder, + PathKey, + TypesByPath, + }, scon::Value, }; use anyhow::Result; use scale::Output; -use scale_info::{PortableRegistry, TypeInfo}; -use std::{collections::HashMap, fmt::Debug}; +use scale_info::{ + PortableRegistry, + TypeInfo, +}; +use std::{ + collections::HashMap, + fmt::Debug, +}; /// Encode strings to SCALE encoded output. /// Decode SCALE encoded input into `Value` objects. @@ -119,11 +130,22 @@ impl<'a> TranscoderBuilder<'a> { #[cfg(test)] mod tests { - use super::super::scon::{self, Map, Tuple, Value}; - use super::*; + use super::{ + super::scon::{ + self, + Map, + Tuple, + Value, + }, + *, + }; use crate::cmd::extrinsics::transcode; use scale::Encode; - use scale_info::{MetaType, Registry, TypeInfo}; + use scale_info::{ + MetaType, + Registry, + TypeInfo, + }; fn registry_with_type() -> Result<(PortableRegistry, u32)> where @@ -142,7 +164,9 @@ mod tests { { let (registry, ty) = registry_with_type::()?; let transcoder = TranscoderBuilder::new(®istry) - .register_custom_type::(transcode::env_types::AccountId) + .register_custom_type::( + transcode::env_types::AccountId, + ) .done(); let value = scon::parse_value(input)?; @@ -213,7 +237,10 @@ mod tests { transcode_roundtrip::("-2147483648", Value::Int(i32::min_value().into()))?; transcode_roundtrip::("2147483647", Value::Int(i32::max_value().into()))?; - transcode_roundtrip::("-9223372036854775808", Value::Int(i64::min_value().into()))?; + transcode_roundtrip::( + "-9223372036854775808", + Value::Int(i64::min_value().into()), + )?; transcode_roundtrip::( "\"9_223_372_036_854_775_807\"", Value::Int(i64::max_value().into()), @@ -231,7 +258,10 @@ mod tests { #[test] fn transcode_byte_array() -> Result<()> { - transcode_roundtrip::<[u8; 2]>(r#"0x0000"#, Value::Bytes(vec![0x00, 0x00].into()))?; + transcode_roundtrip::<[u8; 2]>( + r#"0x0000"#, + Value::Bytes(vec![0x00, 0x00].into()), + )?; transcode_roundtrip::<[u8; 4]>( r#"0xDEADBEEF"#, Value::Bytes(vec![0xDE, 0xAD, 0xBE, 0xEF].into()), @@ -335,7 +365,10 @@ mod tests { ( Value::String("d".to_string()), Value::Seq( - Vec::new().into_iter().collect::>().into(), + Vec::new() + .into_iter() + .collect::>() + .into(), ), ), ] @@ -555,11 +588,15 @@ mod tests { vec![ ( Value::String("no_alias".into()), - Value::Literal("5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY".into()), + Value::Literal( + "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY".into(), + ), ), ( Value::String("aliased".into()), - Value::Literal("5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty".into()), + Value::Literal( + "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty".into(), + ), ), ] .into_iter() @@ -599,7 +636,10 @@ mod tests { Some("S"), vec![( Value::String("a".to_string()), - Value::Tuple(Tuple::new(Some("CompactStruct"), vec![Value::UInt(33)])), + Value::Tuple(Tuple::new( + Some("CompactStruct"), + vec![Value::UInt(33)], + )), )] .into_iter() .collect(), diff --git a/src/cmd/extrinsics/upload.rs b/src/cmd/extrinsics/upload.rs index b9db64c71973d4ada4751ba12c1473bdaf71a0b0..29f09a5aff0a574eda0eff3e43af9b0480261ac3 100644 --- a/src/cmd/extrinsics/upload.rs +++ b/src/cmd/extrinsics/upload.rs @@ -15,19 +15,42 @@ // along with cargo-contract. If not, see . use super::{ - display_events, runtime_api::api, wait_for_success_and_handle_error, Balance, CodeHash, - ContractMessageTranscoder, ExtrinsicOpts, PairSigner, RuntimeApi, + display_events, + runtime_api::api, + wait_for_success_and_handle_error, + Balance, + CodeHash, + ContractMessageTranscoder, + ExtrinsicOpts, + PairSigner, + RuntimeApi, }; use crate::name_value_println; -use anyhow::{Context, Result}; -use jsonrpsee::{core::client::ClientT, rpc_params, ws_client::WsClientBuilder}; +use anyhow::{ + Context, + Result, +}; +use jsonrpsee::{ + core::client::ClientT, + rpc_params, + ws_client::WsClientBuilder, +}; use serde::Serialize; use sp_core::Bytes; -use std::{fmt::Debug, path::PathBuf}; -use subxt::{rpc::NumberOrHex, ClientBuilder, Config, DefaultConfig}; +use std::{ + fmt::Debug, + path::PathBuf, +}; +use subxt::{ + rpc::NumberOrHex, + ClientBuilder, + Config, + DefaultConfig, +}; type CodeUploadResult = pallet_contracts_primitives::CodeUploadResult; -type CodeUploadReturnValue = pallet_contracts_primitives::CodeUploadReturnValue; +type CodeUploadReturnValue = + pallet_contracts_primitives::CodeUploadReturnValue; #[derive(Debug, clap::Args)] #[clap(name = "upload", about = "Upload a contract's code")] @@ -92,7 +115,8 @@ impl UploadCommand { }; let params = rpc_params!(call_request); - let result: CodeUploadResult = cli.request("contracts_upload_code", params).await?; + let result: CodeUploadResult = + cli.request("contracts_upload_code", params).await?; result.map_err(|e| anyhow::anyhow!("Failed to execute call via rpc: {:?}", e)) } diff --git a/src/cmd/metadata.rs b/src/cmd/metadata.rs index 687ed8a5b3bb861383b580bcd6ab24caed256d88..0e759e9c08ff1b467300199371b1b98e1a11372c 100644 --- a/src/cmd/metadata.rs +++ b/src/cmd/metadata.rs @@ -16,22 +16,42 @@ use crate::{ crate_metadata::CrateMetadata, - maybe_println, util, - workspace::{ManifestPath, Workspace}, - Network, UnstableFlags, Verbosity, + maybe_println, + util, + workspace::{ + ManifestPath, + Workspace, + }, + Network, + UnstableFlags, + Verbosity, }; use anyhow::Result; -use blake2::digest::{consts::U32, Digest as _}; +use blake2::digest::{ + consts::U32, + Digest as _, +}; use colored::Colorize; use contract_metadata::{ - CodeHash, Compiler, Contract, ContractMetadata, Language, Source, SourceCompiler, - SourceLanguage, SourceWasm, User, + CodeHash, + Compiler, + Contract, + ContractMetadata, + Language, + Source, + SourceCompiler, + SourceLanguage, + SourceWasm, + User, }; use semver::Version; use std::{ fs, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, }; use url::Url; @@ -87,7 +107,8 @@ pub(crate) fn execute( format!("[{}/{}]", current_progress, total_steps).bold(), "Generating metadata".bright_green().bold() ); - let target_dir_arg = format!("--target-dir={}", target_directory.to_string_lossy()); + let target_dir_arg = + format!("--target-dir={}", target_directory.to_string_lossy()); let stdout = util::invoke_cargo( "run", &[ @@ -103,7 +124,8 @@ pub(crate) fn execute( vec![], )?; - let ink_meta: serde_json::Map = serde_json::from_slice(&stdout)?; + let ink_meta: serde_json::Map = + serde_json::from_slice(&stdout)?; let metadata = ContractMetadata::new(source, contract, user, ink_meta); { let mut metadata = metadata.clone(); @@ -135,7 +157,9 @@ pub(crate) fn execute( .with_profile_release_lto(false)?; Ok(()) })? - .with_metadata_gen_package(crate_metadata.manifest_path.absolute_directory()?)? + .with_metadata_gen_package( + crate_metadata.manifest_path.absolute_directory()?, + )? .using_temp(generate_metadata)?; } @@ -201,9 +225,9 @@ fn extended_metadata( builder.license(license); } - let contract = builder - .build() - .map_err(|err| anyhow::anyhow!("Invalid contract metadata builder state: {}", err))?; + let contract = builder.build().map_err(|err| { + anyhow::anyhow!("Invalid contract metadata builder state: {}", err) + })?; // user defined metadata let user = crate_metadata.user.clone().map(User::new); @@ -226,14 +250,23 @@ fn blake2_hash(code: &[u8]) -> CodeHash { #[cfg(feature = "test-ci-only")] #[cfg(test)] mod tests { - use crate::cmd::metadata::blake2_hash; use crate::{ - cmd, crate_metadata::CrateMetadata, util::tests::with_new_contract_project, ManifestPath, + cmd, + cmd::metadata::blake2_hash, + crate_metadata::CrateMetadata, + util::tests::with_new_contract_project, + ManifestPath, }; use anyhow::Context; use contract_metadata::*; - use serde_json::{Map, Value}; - use std::{fmt::Write, fs}; + use serde_json::{ + Map, + Value, + }; + use std::{ + fmt::Write, + fs, + }; use toml::value; struct TestContractManifest { @@ -301,13 +334,18 @@ mod tests { with_new_contract_project(|manifest_path| { // add optional metadata fields let mut test_manifest = TestContractManifest::new(manifest_path)?; - test_manifest.add_package_value("description", "contract description".into())?; - test_manifest.add_package_value("documentation", "http://documentation.com".into())?; - test_manifest.add_package_value("repository", "http://repository.com".into())?; + test_manifest + .add_package_value("description", "contract description".into())?; + test_manifest + .add_package_value("documentation", "http://documentation.com".into())?; + test_manifest + .add_package_value("repository", "http://repository.com".into())?; test_manifest.add_package_value("homepage", "http://homepage.com".into())?; test_manifest.add_package_value("license", "Apache-2.0".into())?; - test_manifest - .add_user_metadata_value("some-user-provided-field", "and-its-value".into())?; + test_manifest.add_user_metadata_value( + "some-user-provided-field", + "and-its-value".into(), + )?; test_manifest.add_user_metadata_value( "more-user-provided-fields", vec!["and", "their", "values"].into(), @@ -378,7 +416,8 @@ mod tests { let expected_wasm = build_byte_str(&fs_wasm); let expected_language = - SourceLanguage::new(Language::Ink, crate_metadata.ink_version).to_string(); + SourceLanguage::new(Language::Ink, crate_metadata.ink_version) + .to_string(); let expected_rustc_version = semver::Version::parse(&rustc_version::version()?.to_string())?; let expected_compiler = @@ -388,7 +427,11 @@ mod tests { .insert("some-user-provided-field".into(), "and-its-value".into()); expected_user_metadata.insert( "more-user-provided-fields".into(), - serde_json::Value::Array(vec!["and".into(), "their".into(), "values".into()]), + serde_json::Value::Array(vec![ + "and".into(), + "their".into(), + "values".into(), + ]), ); assert_eq!(build_byte_str(&expected_hash.0[..]), hash.as_str().unwrap()); diff --git a/src/cmd/mod.rs b/src/cmd/mod.rs index afee32e888b8c6c7f00de4c0b40e7df029de67e3..c40a997139ac272702dd007246e3817af1a588ca 100644 --- a/src/cmd/mod.rs +++ b/src/cmd/mod.rs @@ -21,10 +21,17 @@ pub mod new; pub mod test; pub(crate) use self::{ - build::{BuildCommand, CheckCommand}, + build::{ + BuildCommand, + CheckCommand, + }, decode::DecodeCommand, test::TestCommand, }; mod extrinsics; -pub(crate) use self::extrinsics::{CallCommand, InstantiateCommand, UploadCommand}; +pub(crate) use self::extrinsics::{ + CallCommand, + InstantiateCommand, + UploadCommand, +}; diff --git a/src/cmd/new.rs b/src/cmd/new.rs index f0b95e3078983ff1f5fdd0568608910da9e082e0..9cff6e73dcab6aa44806e8b2bc41c49d32b079f2 100644 --- a/src/cmd/new.rs +++ b/src/cmd/new.rs @@ -14,7 +14,11 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use std::{env, fs, path::Path}; +use std::{ + env, + fs, + path::Path, +}; use anyhow::Result; @@ -23,7 +27,9 @@ where P: AsRef, { if !name.chars().all(|c| c.is_alphanumeric() || c == '_') { - anyhow::bail!("Contract names can only contain alphanumeric characters and underscores"); + anyhow::bail!( + "Contract names can only contain alphanumeric characters and underscores" + ); } if !name @@ -55,7 +61,10 @@ where #[cfg(test)] mod tests { use super::*; - use crate::util::tests::{with_new_contract_project, with_tmp_dir}; + use crate::util::tests::{ + with_new_contract_project, + with_tmp_dir, + }; #[test] fn rejects_hyphenated_name() { diff --git a/src/cmd/test.rs b/src/cmd/test.rs index 2d3d28869c0e3563e7f8b252ad77e53d7f93d2a1..96e13009195f095d8b718acd576b0db6646cefea 100644 --- a/src/cmd/test.rs +++ b/src/cmd/test.rs @@ -14,10 +14,19 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use crate::{maybe_println, util, workspace::ManifestPath, Verbosity, VerbosityFlags}; +use crate::{ + maybe_println, + util, + workspace::ManifestPath, + Verbosity, + VerbosityFlags, +}; use anyhow::Result; use colored::Colorize; -use std::{convert::TryFrom, path::PathBuf}; +use std::{ + convert::TryFrom, + path::PathBuf, +}; /// Executes smart contract tests off-chain by delegating to `cargo test`. #[derive(Debug, clap::Args)] @@ -54,7 +63,10 @@ impl TestResult { } /// Executes `cargo +nightly test`. -pub(crate) fn execute(manifest_path: &ManifestPath, verbosity: Verbosity) -> Result { +pub(crate) fn execute( + manifest_path: &ManifestPath, + verbosity: Verbosity, +) -> Result { util::assert_channel()?; maybe_println!( @@ -64,7 +76,8 @@ pub(crate) fn execute(manifest_path: &ManifestPath, verbosity: Verbosity) -> Res "Running tests".bright_green().bold() ); - let stdout = util::invoke_cargo("test", &[""], manifest_path.directory(), verbosity, vec![])?; + let stdout = + util::invoke_cargo("test", &[""], manifest_path.directory(), verbosity, vec![])?; Ok(TestResult { stdout, verbosity }) } @@ -72,7 +85,10 @@ pub(crate) fn execute(manifest_path: &ManifestPath, verbosity: Verbosity) -> Res #[cfg(feature = "test-ci-only")] #[cfg(test)] mod tests_ci_only { - use crate::{util::tests::with_new_contract_project, Verbosity}; + use crate::{ + util::tests::with_new_contract_project, + Verbosity, + }; use regex::Regex; #[test] @@ -82,8 +98,8 @@ mod tests_ci_only { Regex::new(r"test result: ok. \d+ passed; 0 failed; \d+ ignored") .expect("regex pattern compilation failed"); - let res = - super::execute(&manifest_path, Verbosity::Default).expect("test execution failed"); + let res = super::execute(&manifest_path, Verbosity::Default) + .expect("test execution failed"); assert!(ok_output_pattern.is_match(&String::from_utf8_lossy(&res.stdout))); diff --git a/src/crate_metadata.rs b/src/crate_metadata.rs index a4503449941f2f171b1aa5a49b4750b698c3aee5..ee13412d05987a566e531cea5db30ca3df03f134 100644 --- a/src/crate_metadata.rs +++ b/src/crate_metadata.rs @@ -15,11 +15,24 @@ // along with cargo-contract. If not, see . use crate::ManifestPath; -use anyhow::{Context, Result}; -use cargo_metadata::{Metadata as CargoMetadata, MetadataCommand, Package}; +use anyhow::{ + Context, + Result, +}; +use cargo_metadata::{ + Metadata as CargoMetadata, + MetadataCommand, + Package, +}; use semver::Version; -use serde_json::{Map, Value}; -use std::{fs, path::PathBuf}; +use serde_json::{ + Map, + Value, +}; +use std::{ + fs, + path::PathBuf, +}; use toml::value; use url::Url; diff --git a/src/main.rs b/src/main.rs index 5711fce6a66026e104b5de88e4f38c316dae4a09..92f4ec46e190ab77fb9f4d21916a8f550d548e0d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -22,8 +22,14 @@ mod workspace; use self::{ cmd::{ - metadata::MetadataResult, BuildCommand, CallCommand, CheckCommand, DecodeCommand, - InstantiateCommand, TestCommand, UploadCommand, + metadata::MetadataResult, + BuildCommand, + CallCommand, + CheckCommand, + DecodeCommand, + InstantiateCommand, + TestCommand, + UploadCommand, }, util::DEFAULT_KEY_COL_WIDTH, workspace::ManifestPath, @@ -31,13 +37,25 @@ use self::{ use std::{ convert::TryFrom, - fmt::{Display, Formatter, Result as DisplayResult}, + fmt::{ + Display, + Formatter, + Result as DisplayResult, + }, path::PathBuf, str::FromStr, }; -use anyhow::{Error, Result}; -use clap::{AppSettings, Args, Parser, Subcommand}; +use anyhow::{ + Error, + Result, +}; +use clap::{ + AppSettings, + Args, + Parser, + Subcommand, +}; use colored::Colorize; #[derive(Debug, Parser)] @@ -368,7 +386,7 @@ impl BuildResult { .to_string() .bold() ); - return out; + return out }; let mut out = format!( diff --git a/src/util.rs b/src/util.rs index 59acf86e07f529c0b06bbb7a5a16acff30468c6a..676ae2030ac4f608ae1bdbb3f0df94c1dd32c097 100644 --- a/src/util.rs +++ b/src/util.rs @@ -15,14 +15,26 @@ // along with cargo-contract. If not, see . use crate::Verbosity; -use anyhow::{Context, Result}; +use anyhow::{ + Context, + Result, +}; use heck::ToUpperCamelCase as _; use rustc_version::Channel; use std::{ ffi::OsStr, fs, - io::{Cursor, Read, Seek, SeekFrom, Write}, - path::{Path, PathBuf}, + io::{ + Cursor, + Read, + Seek, + SeekFrom, + Write, + }, + path::{ + Path, + PathBuf, + }, process::Command, }; @@ -164,8 +176,13 @@ macro_rules! name_value_println { #[cfg(test)] pub mod tests { use crate::ManifestPath; - use std::path::Path; - use std::sync::atomic::{AtomicU32, Ordering}; + use std::{ + path::Path, + sync::atomic::{ + AtomicU32, + Ordering, + }, + }; /// Creates a temporary directory and passes the `tmp_dir` path to `f`. /// Panics if `f` returns an `Err`. @@ -179,7 +196,8 @@ pub mod tests { .expect("temporary directory creation failed"); // catch test panics in order to clean up temp dir which will be very large - f(&tmp_dir.path().canonicalize().unwrap()).expect("Error executing test with tmp dir") + f(&tmp_dir.path().canonicalize().unwrap()) + .expect("Error executing test with tmp dir") } /// Global counter to generate unique contract names in `with_new_contract_project`. @@ -209,7 +227,8 @@ pub mod tests { F: FnOnce(ManifestPath) -> anyhow::Result<()>, { with_tmp_dir(|tmp_dir| { - let unique_name = format!("new_project_{}", COUNTER.fetch_add(1, Ordering::SeqCst)); + let unique_name = + format!("new_project_{}", COUNTER.fetch_add(1, Ordering::SeqCst)); crate::cmd::new::execute(&unique_name, Some(tmp_dir)) .expect("new project creation failed"); @@ -261,7 +280,8 @@ pub fn unzip(template: &[u8], out_dir: PathBuf, name: Option<&str>) -> Result<() let mut contents = String::new(); file.read_to_string(&mut contents)?; let contents = contents.replace("{{name}}", name); - let contents = contents.replace("{{camel_name}}", &name.to_upper_camel_case()); + let contents = + contents.replace("{{camel_name}}", &name.to_upper_camel_case()); outfile.write_all(contents.as_bytes())?; } else { let mut v = Vec::new(); diff --git a/src/validate_wasm.rs b/src/validate_wasm.rs index 250b31eb94ff358acf0a70ba8178274495f65ac7..c80ed4abe62e27ea5d3d42600f25bbc229eff6d4 100644 --- a/src/validate_wasm.rs +++ b/src/validate_wasm.rs @@ -80,7 +80,7 @@ pub fn validate_import_section(module: &Module) -> Result<()> { None => { // the module does not contain any imports, // hence no further validation is necessary. - return Ok(()); + return Ok(()) } }; let original_imports_len = imports.len(); @@ -218,10 +218,9 @@ mod tests { // then assert!(res.is_err()); - assert!(res - .unwrap_err() - .to_string() - .contains("An unexpected panic function import was found in the contract Wasm.")); + assert!(res.unwrap_err().to_string().contains( + "An unexpected panic function import was found in the contract Wasm." + )); } #[test] @@ -233,7 +232,8 @@ mod tests { (import "env" "__ink_enforce_error_0x0110466c697010666c6970aa97cade01" (func $__ink_enforce_error_0x0110466c697010666c6970aa97cade01 (type 0))) )"#; let wasm = wabt::wat2wasm(contract).expect("invalid wabt"); - let module = parity_wasm::deserialize_buffer(&wasm).expect("deserializing must work"); + let module = + parity_wasm::deserialize_buffer(&wasm).expect("deserializing must work"); // when let res = validate_import_section(&module); @@ -283,10 +283,9 @@ mod tests { // then assert!(res.is_err()); - assert!(res - .unwrap_err() - .to_string() - .contains("An unexpected import function was found in the contract Wasm: some_fn.")); + assert!(res.unwrap_err().to_string().contains( + "An unexpected import function was found in the contract Wasm: some_fn." + )); } #[test] diff --git a/src/workspace/manifest.rs b/src/workspace/manifest.rs index e9f5bf553e4313c66fb875411071d442b2882162..f5c57b9a74de6bfbdd14ba59f3a71baac2d2b4b3 100644 --- a/src/workspace/manifest.rs +++ b/src/workspace/manifest.rs @@ -14,16 +14,25 @@ // You should have received a copy of the GNU General Public License // along with cargo-contract. If not, see . -use anyhow::{Context, Result}; +use anyhow::{ + Context, + Result, +}; -use super::{metadata, Profile}; +use super::{ + metadata, + Profile, +}; use crate::OptimizationPasses; use std::{ collections::HashSet, convert::TryFrom, fs, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, }; use toml::value; @@ -53,10 +62,9 @@ impl ManifestPath { /// Create an arg `--manifest-path=` for `cargo` command pub fn cargo_arg(&self) -> Result { - let path = self - .path - .canonicalize() - .map_err(|err| anyhow::anyhow!("Failed to canonicalize {:?}: {:?}", self.path, err))?; + let path = self.path.canonicalize().map_err(|err| { + anyhow::anyhow!("Failed to canonicalize {:?}: {:?}", self.path, err) + })?; Ok(format!("--manifest-path={}", path.to_string_lossy())) } @@ -222,7 +230,9 @@ impl Manifest { .get_mut(dependency) .ok_or_else(|| anyhow::anyhow!("{} dependency not found", dependency))? .as_table_mut() - .ok_or_else(|| anyhow::anyhow!("{} dependency should be a table", dependency))? + .ok_or_else(|| { + anyhow::anyhow!("{} dependency should be a table", dependency) + })? .insert("version".into(), value::Value::String(version.into()))) } @@ -281,7 +291,10 @@ impl Manifest { /// /// Existing user defined settings for this section are preserved. Only if a setting is not /// defined is the preferred default set. - pub fn with_profile_release_defaults(&mut self, defaults: Profile) -> Result<&mut Self> { + pub fn with_profile_release_defaults( + &mut self, + defaults: Profile, + ) -> Result<&mut Self> { let profile_release = self.get_profile_release_table_mut()?; defaults.merge(profile_release); Ok(self) @@ -369,7 +382,10 @@ impl Manifest { /// - `[dependencies]` /// /// Dependencies with package names specified in `exclude_deps` will not be rewritten. - pub(super) fn rewrite_relative_paths(&mut self, exclude_deps: I) -> Result<&mut Self> + pub(super) fn rewrite_relative_paths( + &mut self, + exclude_deps: I, + ) -> Result<&mut Self> where I: IntoIterator, S: AsRef, @@ -379,7 +395,9 @@ impl Manifest { .parent() .expect("The manifest path is a file path so has a parent; qed"); - let to_absolute = |value_id: String, existing_path: &mut value::Value| -> Result<()> { + let to_absolute = |value_id: String, + existing_path: &mut value::Value| + -> Result<()> { let path_str = existing_path .as_str() .ok_or_else(|| anyhow::anyhow!("{} should be a string", value_id))?; @@ -396,33 +414,34 @@ impl Manifest { Ok(()) }; - let rewrite_path = |table_value: &mut value::Value, table_section: &str, default: &str| { - let table = table_value.as_table_mut().ok_or_else(|| { - anyhow::anyhow!("'[{}]' section should be a table", table_section) - })?; + let rewrite_path = + |table_value: &mut value::Value, table_section: &str, default: &str| { + let table = table_value.as_table_mut().ok_or_else(|| { + anyhow::anyhow!("'[{}]' section should be a table", table_section) + })?; - match table.get_mut("path") { - Some(existing_path) => { - to_absolute(format!("[{}]/path", table_section), existing_path) - } - None => { - let default_path = PathBuf::from(default); - if !default_path.exists() { - anyhow::bail!( - "No path specified, and the default `{}` was not found", - default - ) + match table.get_mut("path") { + Some(existing_path) => { + to_absolute(format!("[{}]/path", table_section), existing_path) + } + None => { + let default_path = PathBuf::from(default); + if !default_path.exists() { + anyhow::bail!( + "No path specified, and the default `{}` was not found", + default + ) + } + let path = abs_dir.join(default_path); + log::debug!("Adding default path '{}'", path.display()); + table.insert( + "path".into(), + value::Value::String(path.to_string_lossy().into()), + ); + Ok(()) } - let path = abs_dir.join(default_path); - log::debug!("Adding default path '{}'", path.display()); - table.insert( - "path".into(), - value::Value::String(path.to_string_lossy().into()), - ); - Ok(()) } - } - }; + }; // Rewrite `[lib] path = /path/to/lib.rs` if let Some(lib) = self.toml.get_mut("lib") { @@ -431,9 +450,9 @@ impl Manifest { // Rewrite `[[bin]] path = /path/to/main.rs` if let Some(bin) = self.toml.get_mut("bin") { - let bins = bin - .as_array_mut() - .ok_or_else(|| anyhow::anyhow!("'[[bin]]' section should be a table array"))?; + let bins = bin.as_array_mut().ok_or_else(|| { + anyhow::anyhow!("'[[bin]]' section should be a table array") + })?; // Rewrite `[[bin]] path =` value to an absolute path. for bin in bins { @@ -460,7 +479,10 @@ impl Manifest { if !exclude.contains(&package_name) { if let Some(dependency) = value.as_table_mut() { if let Some(dep_path) = dependency.get_mut("path") { - to_absolute(format!("dependency {}", package_name), dep_path)?; + to_absolute( + format!("dependency {}", package_name), + dep_path, + )?; } } } @@ -473,7 +495,8 @@ impl Manifest { /// Writes the amended manifest to the given path. pub fn write(&self, manifest_path: &ManifestPath) -> Result<()> { if let Some(dir) = manifest_path.directory() { - fs::create_dir_all(dir).context(format!("Creating directory '{}'", dir.display()))?; + fs::create_dir_all(dir) + .context(format!("Creating directory '{}'", dir.display()))?; } if self.metadata_package { @@ -483,7 +506,8 @@ impl Manifest { METADATA_PACKAGE_PATH.into() }; - fs::create_dir_all(&dir).context(format!("Creating directory '{}'", dir.display()))?; + fs::create_dir_all(&dir) + .context(format!("Creating directory '{}'", dir.display()))?; let contract_package_name = self .toml @@ -501,7 +525,9 @@ impl Manifest { .get("ink_metadata") .ok_or_else(|| anyhow::anyhow!("ink_metadata dependency not found"))? .as_table() - .ok_or_else(|| anyhow::anyhow!("ink_metadata dependency should be a table"))?; + .ok_or_else(|| { + anyhow::anyhow!("ink_metadata dependency should be a table") + })?; metadata::generate_package(dir, contract_package_name, ink_metadata.clone())?; } @@ -534,8 +560,8 @@ mod test { // given let cargo_toml_path = path.join("Cargo.toml"); let _ = fs::File::create(&cargo_toml_path).expect("file creation failed"); - let manifest_path = - ManifestPath::new(cargo_toml_path).expect("manifest path creation failed"); + let manifest_path = ManifestPath::new(cargo_toml_path) + .expect("manifest path creation failed"); // when let absolute_path = manifest_path diff --git a/src/workspace/metadata.rs b/src/workspace/metadata.rs index 7d8715bcd882f93b08cf511a5c0dac01fd874637..6d953254d573b6533ea4c301f06e2e4ccdf5b395 100644 --- a/src/workspace/metadata.rs +++ b/src/workspace/metadata.rs @@ -15,7 +15,10 @@ // along with cargo-contract. If not, see . use anyhow::Result; -use std::{fs, path::Path}; +use std::{ + fs, + path::Path, +}; use toml::value; /// Generates a cargo workspace package `metadata-gen` which will be invoked via `cargo run` to diff --git a/src/workspace/mod.rs b/src/workspace/mod.rs index 5b9ccbcdae77df26ddc7b36193f4093fda9ef378..4808b942fa49ef8a0fb5972504a5d932ae5f104c 100644 --- a/src/workspace/mod.rs +++ b/src/workspace/mod.rs @@ -20,16 +20,26 @@ mod profile; #[doc(inline)] pub use self::{ - manifest::{Manifest, ManifestPath}, + manifest::{ + Manifest, + ManifestPath, + }, profile::Profile, }; use anyhow::Result; -use cargo_metadata::{Metadata as CargoMetadata, Package, PackageId}; +use cargo_metadata::{ + Metadata as CargoMetadata, + Package, + PackageId, +}; use std::{ collections::HashMap, - path::{Path, PathBuf}, + path::{ + Path, + PathBuf, + }, }; /// Make a copy of a cargo workspace, maintaining only the directory structure and manifest @@ -47,21 +57,22 @@ pub struct Workspace { impl Workspace { /// Create a new Workspace from the supplied cargo metadata. pub fn new(metadata: &CargoMetadata, root_package: &PackageId) -> Result { - let member_manifest = |package_id: &PackageId| -> Result<(PackageId, (Package, Manifest))> { - let package = metadata - .packages - .iter() - .find(|p| p.id == *package_id) - .unwrap_or_else(|| { - panic!( - "Package '{}' is a member and should be in the packages list", - package_id - ) - }); - let manifest_path = ManifestPath::new(&package.manifest_path)?; - let manifest = Manifest::new(manifest_path)?; - Ok((package_id.clone(), (package.clone(), manifest))) - }; + let member_manifest = + |package_id: &PackageId| -> Result<(PackageId, (Package, Manifest))> { + let package = metadata + .packages + .iter() + .find(|p| p.id == *package_id) + .unwrap_or_else(|| { + panic!( + "Package '{}' is a member and should be in the packages list", + package_id + ) + }); + let manifest_path = ManifestPath::new(&package.manifest_path)?; + let manifest = Manifest::new(manifest_path)?; + Ok((package_id.clone(), (package.clone(), manifest))) + }; let members = metadata .workspace_members @@ -100,7 +111,11 @@ impl Workspace { } /// Amend the manifest of the package at `package_path` using the supplied function. - pub fn with_contract_manifest(&mut self, package_path: &Path, f: F) -> Result<&mut Self> + pub fn with_contract_manifest( + &mut self, + package_path: &Path, + f: F, + ) -> Result<&mut Self> where F: FnOnce(&mut Manifest) -> Result<()>, { @@ -112,9 +127,9 @@ impl Workspace { // canonicalize the manifest's directory path as well in order to compare // both of them. let manifest_path = manifest.path().directory()?; - let manifest_path = manifest_path - .canonicalize() - .unwrap_or_else(|_| panic!("Cannot canonicalize {}", manifest_path.display())); + let manifest_path = manifest_path.canonicalize().unwrap_or_else(|_| { + panic!("Cannot canonicalize {}", manifest_path.display()) + }); if manifest_path == package_path { Some(manifest) } else { @@ -134,7 +149,10 @@ impl Workspace { /// Generates a package to invoke for generating contract metadata. /// /// The contract metadata will be generated for the package found at `package_path`. - pub(super) fn with_metadata_gen_package(&mut self, package_path: PathBuf) -> Result<&mut Self> { + pub(super) fn with_metadata_gen_package( + &mut self, + package_path: PathBuf, + ) -> Result<&mut Self> { self.with_contract_manifest(&package_path, |manifest| { manifest.with_metadata_package()?; Ok(()) @@ -148,7 +166,10 @@ impl Workspace { /// intra-workspace relative dependency paths which will be preserved. /// /// Returns the paths of the new manifests. - pub fn write>(&mut self, target: P) -> Result> { + pub fn write>( + &mut self, + target: P, + ) -> Result> { let exclude_member_package_names = self .members .iter()