diff --git a/Cargo.lock b/Cargo.lock index 93ec26e3f77c4e5b6e28b9e71ef97f7f5d0a729d..2fb923e487611517d45a1d5401303124bf39a4a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1215,6 +1215,7 @@ dependencies = [ "display_derive 0.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "network 0.1.0", "parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "primitives 0.1.0", "serialization 0.1.0", diff --git a/miner/Cargo.toml b/miner/Cargo.toml index aa6bc5b1324bf7b499a9cf61fba51573d9f9131b..78f2efc8018fe32ad3c28cf445948959b211b254 100644 --- a/miner/Cargo.toml +++ b/miner/Cargo.toml @@ -19,3 +19,6 @@ script = { path = "../script" } [dev-dependencies] test-data = { path = "../test-data" } + +[features] +test-helpers = [] \ No newline at end of file diff --git a/miner/src/block_assembler.rs b/miner/src/block_assembler.rs index bd8d4bb4d9b302037ff622f138de242b7a5d9da6..69d9df4c819cb9a4416c0fab8f4cbbf3817c72af 100644 --- a/miner/src/block_assembler.rs +++ b/miner/src/block_assembler.rs @@ -311,8 +311,11 @@ mod tests { use db::BlockChainDatabase; use primitives::hash::H256; use storage::SharedStore; + use chain::IndexedTransaction; use network::{ConsensusParams, ConsensusFork, Network, BitcoinCashConsensusParams}; use memory_pool::MemoryPool; + use verification::block_reward_satoshi; + use fee::{FeeCalculator, NonZeroFeeCalculator}; use self::test_data::{ChainBuilder, TransactionBuilder}; use super::{BlockAssembler, SizePolicy, NextStep, BlockTemplate}; @@ -372,8 +375,8 @@ mod tests { let mut pool = MemoryPool::new(); let storage: SharedStore = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); - pool.insert_verified(chain.at(0).into()); - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); (BlockAssembler { max_block_size: 0xffffffff, @@ -397,4 +400,24 @@ mod tests { assert_eq!(block.transactions[0].hash, hash1); assert_eq!(block.transactions[1].hash, hash0); } + + #[test] + fn block_assembler_miner_fee() { + let input_tx = test_data::genesis().transactions[0].clone(); + let tx0: IndexedTransaction = TransactionBuilder::with_input(&input_tx, 0).set_output(100_000).into(); + let expected_tx0_fee = input_tx.total_spends() - tx0.raw.total_spends(); + + let storage: SharedStore = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); + let mut pool = MemoryPool::new(); + pool.insert_verified(tx0, &FeeCalculator(storage.as_transaction_output_provider())); + + let consensus = ConsensusParams::new(Network::Mainnet, ConsensusFork::BitcoinCore); + let block = BlockAssembler { + max_block_size: 0xffffffff, + max_block_sigops: 0xffffffff, + }.create_new_block(&storage, &pool, 0, 0, &consensus); + + let expected_coinbase_value = block_reward_satoshi(1) + expected_tx0_fee; + assert_eq!(block.coinbase_value, expected_coinbase_value); + } } diff --git a/miner/src/fee.rs b/miner/src/fee.rs index 783d48f333990a374fb951b9e6e2524c4530b4af..5aa042358c7b5167ef626c38fa370fe28c5a8985 100644 --- a/miner/src/fee.rs +++ b/miner/src/fee.rs @@ -1,19 +1,49 @@ use chain::Transaction; use ser::Serializable; -use storage::TransactionProvider; - -pub fn transaction_fee(store: &TransactionProvider, transaction: &Transaction) -> u64 { - let inputs_sum = transaction.inputs.iter().map(|input| { - let input_transaction = store.transaction(&input.previous_output.hash) - .expect("transaction must be verified by caller"); - input_transaction.outputs[input.previous_output.index as usize].value - }).sum::(); - let outputs_sum = transaction.outputs.iter().map(|output| output.value).sum(); - inputs_sum.saturating_sub(outputs_sum) +use storage::{TransactionOutputProvider, DuplexTransactionOutputProvider}; +use MemoryPool; + +/// Transaction fee calculator for memory pool +pub trait MemoryPoolFeeCalculator { + /// Compute transaction fee + fn calculate(&self, memory_pool: &MemoryPool, tx: &Transaction) -> u64; +} + +/// Fee calculator that computes sum of real transparent fee + real shielded fee. +pub struct FeeCalculator<'a>(pub &'a TransactionOutputProvider); + +impl<'a> MemoryPoolFeeCalculator for FeeCalculator<'a> { + fn calculate(&self, memory_pool: &MemoryPool, tx: &Transaction) -> u64 { + let tx_out_provider = DuplexTransactionOutputProvider::new(self.0, memory_pool); + transaction_fee(&tx_out_provider, tx) + } +} + +/// Used in tests in this && external crates +#[cfg(any(test, feature = "test-helpers"))] +pub struct NonZeroFeeCalculator; + +#[cfg(any(test, feature = "test-helpers"))] +impl MemoryPoolFeeCalculator for NonZeroFeeCalculator { + fn calculate(&self, _: &MemoryPool, tx: &Transaction) -> u64 { + // add 100_000_000 to make sure tx won't be rejected by txpoool because of fee + // + but keep ordering by outputs sum + 100_000_000 + tx.outputs.iter().fold(0, |acc, output| acc + output.value) + } +} + +pub fn transaction_fee(store: &TransactionOutputProvider, tx: &Transaction) -> u64 { + let input_value = tx.inputs.iter().fold(0, |acc, input| acc + store + .transaction_output(&input.previous_output, ::std::usize::MAX) + .map(|output| output.value) + .unwrap_or(0)); + let output_value = tx.outputs.iter().fold(0, |acc, output| acc + output.value); + + input_value.saturating_sub(output_value) } -pub fn transaction_fee_rate(store: &TransactionProvider, transaction: &Transaction) -> u64 { - transaction_fee(store, transaction) / transaction.serialized_size() as u64 +pub fn transaction_fee_rate(store: &TransactionOutputProvider, tx: &Transaction) -> u64 { + transaction_fee(store, tx) / tx.serialized_size() as u64 } #[cfg(test)] @@ -46,10 +76,10 @@ mod tests { let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into(), b1.into()])); - assert_eq!(transaction_fee(db.as_transaction_provider(), &tx0), 0); - assert_eq!(transaction_fee(db.as_transaction_provider(), &tx2), 500_000); + assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx0), 0); + assert_eq!(transaction_fee(db.as_transaction_output_provider(), &tx2), 500_000); - assert_eq!(transaction_fee_rate(db.as_transaction_provider(), &tx0), 0); - assert_eq!(transaction_fee_rate(db.as_transaction_provider(), &tx2), 4_901); + assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx0), 0); + assert_eq!(transaction_fee_rate(db.as_transaction_output_provider(), &tx2), 4_901); } } diff --git a/miner/src/lib.rs b/miner/src/lib.rs index bc7e01ee5c4992241a95505b9b2967aa11fca465..675c248e27b6087607e92d558bd8c7ec78ef0e0b 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -21,4 +21,7 @@ pub use block_assembler::{BlockAssembler, BlockTemplate}; pub use cpu_miner::find_solution; pub use memory_pool::{MemoryPool, HashedOutPoint, Information as MemoryPoolInformation, OrderingStrategy as MemoryPoolOrderingStrategy, DoubleSpendCheckResult, NonFinalDoubleSpendSet}; -pub use fee::{transaction_fee, transaction_fee_rate}; +pub use fee::{FeeCalculator, transaction_fee, transaction_fee_rate}; + +#[cfg(feature = "test-helpers")] +pub use fee::NonZeroFeeCalculator; \ No newline at end of file diff --git a/miner/src/memory_pool.rs b/miner/src/memory_pool.rs index 9f7b9feee1b1cb8fb1e5ca5a4836b0ef674e9413..e58d25c03af5ec6f1348b43131318f5de22b8770 100644 --- a/miner/src/memory_pool.rs +++ b/miner/src/memory_pool.rs @@ -17,6 +17,7 @@ use std::collections::VecDeque; use std::hash::{Hash, Hasher}; use ser::{Serializable, serialize}; use heapsize::HeapSizeOf; +use fee::MemoryPoolFeeCalculator; /// Transactions ordering strategy #[cfg_attr(feature="cargo-clippy", allow(enum_variant_names))] @@ -60,13 +61,13 @@ pub struct Entry { /// Throughout index of this transaction in memory pool (non persistent) pub storage_index: u64, /// Transaction fee (stored for efficiency) - pub miner_fee: i64, + pub miner_fee: u64, /// Virtual transaction fee (a way to prioritize/penalize transaction) pub miner_virtual_fee: i64, /// size + Sum(size) for all in-pool descendants pub package_size: usize, /// miner_fee + Sum(miner_fee) for all in-pool descendants - pub package_miner_fee: i64, + pub package_miner_fee: u64, /// miner_virtual_fee + Sum(miner_virtual_fee) for all in-pool descendants pub package_miner_virtual_fee: i64, } @@ -123,7 +124,7 @@ struct ByTransactionScoreOrderedEntry { /// Transaction size size: usize, /// Transaction fee - miner_fee: i64, + miner_fee: u64, /// Virtual transaction fee miner_virtual_fee: i64, } @@ -135,7 +136,7 @@ struct ByPackageScoreOrderedEntry { /// size + Sum(size) for all in-pool descendants package_size: usize, /// miner_fee + Sum(miner_fee) for all in-pool descendants - package_miner_fee: i64, + package_miner_fee: u64, /// miner_virtual_fee + Sum(miner_virtual_fee) for all in-pool descendants package_miner_virtual_fee: i64, } @@ -240,8 +241,8 @@ impl PartialOrd for ByTransactionScoreOrderedEntry { impl Ord for ByTransactionScoreOrderedEntry { fn cmp(&self, other: &Self) -> Ordering { // lesser miner score means later removal - let left = (self.miner_fee + self.miner_virtual_fee) * (other.size as i64); - let right = (other.miner_fee + other.miner_virtual_fee) * (self.size as i64); + let left = (self.miner_fee as i64 + self.miner_virtual_fee) * (other.size as i64); + let right = (other.miner_fee as i64 + other.miner_virtual_fee) * (self.size as i64); let order = right.cmp(&left); if order != Ordering::Equal { return order @@ -260,8 +261,8 @@ impl PartialOrd for ByPackageScoreOrderedEntry { impl Ord for ByPackageScoreOrderedEntry { fn cmp(&self, other: &Self) -> Ordering { // lesser miner score means later removal - let left = (self.package_miner_fee + self.package_miner_virtual_fee) * (other.package_size as i64); - let right = (other.package_miner_fee + other.package_miner_virtual_fee) * (self.package_size as i64); + let left = (self.package_miner_fee as i64 + self.package_miner_virtual_fee) * (other.package_size as i64); + let right = (other.package_miner_fee as i64 + other.package_miner_virtual_fee) * (self.package_size as i64); let order = right.cmp(&left); if order != Ordering::Equal { return order @@ -649,14 +650,16 @@ impl MemoryPool { } /// Insert verified transaction to the `MemoryPool` - pub fn insert_verified(&mut self, t: IndexedTransaction) { - let entry = self.make_entry(t); - let descendants = self.storage.remove_by_parent_hash(&entry.hash); - self.storage.insert(entry); - if let Some(descendants_iter) = descendants.map(|d| d.into_iter()) { - for descendant in descendants_iter { - let descendant_entry = self.make_entry(descendant); - self.storage.insert(descendant_entry); + pub fn insert_verified(&mut self, t: IndexedTransaction, fc: &FC) { + if let Some(entry) = self.make_entry(t, fc) { + let descendants = self.storage.remove_by_parent_hash(&entry.hash); + self.storage.insert(entry); + if let Some(descendants_iter) = descendants.map(|d| d.into_iter()) { + for descendant in descendants_iter { + if let Some(descendant_entry) = self.make_entry(descendant, fc) { + self.storage.insert(descendant_entry); + } + } } } } @@ -747,12 +750,18 @@ impl MemoryPool { self.storage.is_output_spent(prevout) } - fn make_entry(&mut self, t: IndexedTransaction) -> Entry { + fn make_entry(&mut self, t: IndexedTransaction, fc: &FC) -> Option { let ancestors = self.get_ancestors(&t.raw); let size = self.get_transaction_size(&t.raw); let storage_index = self.get_storage_index(); - let miner_fee = self.get_transaction_miner_fee(&t.raw); - Entry { + let miner_fee = fc.calculate(self, &t.raw); + + // do not accept any transactions that have negative OR zero fee + if miner_fee == 0 { + return None; + } + + Some(Entry { transaction: t.raw, hash: t.hash, ancestors: ancestors, @@ -764,7 +773,7 @@ impl MemoryPool { package_size: size, package_miner_fee: miner_fee, package_miner_virtual_fee: 0, - } + }) } fn get_ancestors(&self, t: &Transaction) -> HashSet { @@ -784,12 +793,6 @@ impl MemoryPool { t.serialized_size() } - fn get_transaction_miner_fee(&self, t: &Transaction) -> i64 { - let input_value = 0; // TODO: sum all inputs of transaction - let output_value = t.outputs.iter().fold(0, |acc, output| acc + output.value); - (output_value - input_value) as i64 - } - #[cfg(not(test))] fn get_storage_index(&mut self) -> u64 { self.storage.counter += 1; @@ -868,33 +871,38 @@ impl<'a> Iterator for MemoryPoolIterator<'a> { } #[cfg(test)] -mod tests { +pub mod tests { extern crate test_data; use chain::{Transaction, OutPoint}; use heapsize::HeapSizeOf; + use fee::NonZeroFeeCalculator; use super::{MemoryPool, OrderingStrategy, DoubleSpendCheckResult}; use self::test_data::{ChainBuilder, TransactionBuilder}; fn to_memory_pool(chain: &mut ChainBuilder) -> MemoryPool { let mut pool = MemoryPool::new(); for transaction in chain.transactions.iter().cloned() { - pool.insert_verified(transaction.into()); + pool.insert_verified(transaction.into(), &NonZeroFeeCalculator); } pool } + fn default_tx() -> Transaction { + TransactionBuilder::with_output(1).into() + } + #[test] fn test_memory_pool_heap_size() { let mut pool = MemoryPool::new(); let size1 = pool.heap_size_of_children(); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); let size2 = pool.heap_size_of_children(); assert!(size2 > size1); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); let size3 = pool.heap_size_of_children(); assert!(size3 > size2); } @@ -902,11 +910,11 @@ mod tests { #[test] fn test_memory_pool_insert_same_transaction() { let mut pool = MemoryPool::new(); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); assert_eq!(pool.get_transactions_ids().len(), 1); // insert the same transaction again - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); assert_eq!(pool.get_transactions_ids().len(), 1); } @@ -916,11 +924,11 @@ mod tests { assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), None); assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]); - pool.insert_verified(Transaction::default().into()); - assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(Transaction::default().hash())); - assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![Transaction::default().hash()]); - assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(Transaction::default().hash())); - assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![Transaction::default().hash()]); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); + assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(default_tx().hash())); + assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![default_tx().hash()]); + assert_eq!(pool.read_with_strategy(OrderingStrategy::ByTimestamp), Some(default_tx().hash())); + assert_eq!(pool.read_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![default_tx().hash()]); } #[test] @@ -929,15 +937,15 @@ mod tests { assert_eq!(pool.remove_with_strategy(OrderingStrategy::ByTimestamp), None); assert_eq!(pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); let removed = pool.remove_with_strategy(OrderingStrategy::ByTimestamp); assert!(removed.is_some()); - assert_eq!(removed.unwrap(), Transaction::default().into()); + assert_eq!(removed.unwrap(), default_tx().into()); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); let removed = pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp); assert_eq!(removed.len(), 1); - assert_eq!(removed[0], Transaction::default().into()); + assert_eq!(removed[0], default_tx().into()); assert_eq!(pool.remove_with_strategy(OrderingStrategy::ByTimestamp), None); assert_eq!(pool.remove_n_with_strategy(100, OrderingStrategy::ByTimestamp), vec![]); @@ -947,13 +955,13 @@ mod tests { fn test_memory_pool_remove_by_hash() { let mut pool = MemoryPool::new(); - pool.insert_verified(Transaction::default().into()); + pool.insert_verified(default_tx().into(), &NonZeroFeeCalculator); assert_eq!(pool.get_transactions_ids().len(), 1); // remove and check remaining transactions - let removed = pool.remove_by_hash(&Transaction::default().hash()); + let removed = pool.remove_by_hash(&default_tx().hash()); assert!(removed.is_some()); - assert_eq!(removed.unwrap(), Transaction::default()); + assert_eq!(removed.unwrap(), default_tx()); assert_eq!(pool.get_transactions_ids().len(), 0); // remove non-existant transaction @@ -970,9 +978,9 @@ mod tests { // insert child, then parent let mut pool = MemoryPool::new(); - pool.insert_verified(chain.at(2).into()); // timestamp 0 - pool.insert_verified(chain.at(1).into()); // timestamp 1 - pool.insert_verified(chain.at(0).into()); // timestamp 2 + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); // timestamp 0 + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); // timestamp 1 + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); // timestamp 2 // check that parent transaction was removed before child trnasaction let transactions = pool.remove_n_with_strategy(3, OrderingStrategy::ByTimestamp); @@ -1015,7 +1023,7 @@ mod tests { assert_eq!(pool.get_transactions_ids().len(), 2); // insert child transaction back to the pool & assert transactions are removed in correct order - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); let transactions = pool.remove_n_with_strategy(3, OrderingStrategy::ByTransactionScore); assert_eq!(transactions.len(), 3); assert_eq!(transactions[0], chain.at(0).into()); @@ -1034,7 +1042,7 @@ mod tests { let mut transactions_size = 0; for transaction_index in 0..4 { - pool.insert_verified(chain.at(transaction_index).into()); + pool.insert_verified(chain.at(transaction_index).into(), &NonZeroFeeCalculator); transactions_size += chain.size(transaction_index); let info = pool.information(); @@ -1113,7 +1121,7 @@ mod tests { .into_input(0).set_output(50).store(chain) // transaction0 -> transaction1 .set_default_input(1).set_output(35).store(chain) // transaction2 .into_input(0).set_output(10).store(chain) // transaction2 -> transaction3 - .into_input(0).set_output(100).store(chain); // transaction2 -> transaction3 -> transaction4 + .into_input(0).set_output(100).store(chain); // transaction2 -> transaction3 -> transaction4 let mut pool = MemoryPool::new(); @@ -1122,8 +1130,8 @@ mod tests { // < // score({ transaction2 }) = 35/60 let expected = vec![chain.hash(2), chain.hash(0)]; - pool.insert_verified(chain.at(0).into()); - pool.insert_verified(chain.at(2).into()); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); assert_eq!(pool.read_n_with_strategy(2, OrderingStrategy::ByPackageScore), expected); // { transaction0, transaction1 } now have bigger score than { transaction2 }: @@ -1132,7 +1140,7 @@ mod tests { // score({ transaction2 }) = 35/60 ~ 0.583 // => chain1 is boosted // => so transaction with lesser individual score (but with bigger package score) is mined first - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(0), chain.hash(1), chain.hash(2)]; assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected); @@ -1141,7 +1149,7 @@ mod tests { // > // score({ transaction2, transaction3 }) = (35 + 10) / 120 ~ 0.375 // => chain2 is not boosted - pool.insert_verified(chain.at(3).into()); + pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(0), chain.hash(1), chain.hash(2), chain.hash(3)]; assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected); @@ -1150,7 +1158,7 @@ mod tests { // < // score({ transaction2, transaction3, transaction4 }) = (35 + 10 + 100) / 180 ~ 0.806 // => chain2 is boosted - pool.insert_verified(chain.at(4).into()); + pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(2), chain.hash(3), chain.hash(4), chain.hash(0), chain.hash(1)]; assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByPackageScore), expected); @@ -1171,18 +1179,18 @@ mod tests { let mut pool = MemoryPool::new(); - // chain1_parent is not linked to the chain1_grandchild + // transaction0 is not linked to the transaction2 // => they are in separate chains now - // => chain2 has greater score than both of these chains - pool.insert_verified(chain.at(3).into()); - pool.insert_verified(chain.at(0).into()); - pool.insert_verified(chain.at(2).into()); + // => transaction3 has greater score than both of these chains + pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(3), chain.hash(0), chain.hash(2)]; assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected); // insert the missing transaction to link together chain1 // => it now will have better score than chain2 - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(0), chain.hash(1), chain.hash(3), chain.hash(2)]; assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected); } @@ -1206,9 +1214,9 @@ mod tests { // insert level1 + level2. There are two chains: // score({ transaction3, transaction5 }) = 40 + 60 // score({ transaction4, transaction5 }) = 50 + 60 - pool.insert_verified(chain.at(5).into()); - pool.insert_verified(chain.at(3).into()); - pool.insert_verified(chain.at(4).into()); + pool.insert_verified(chain.at(5).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(4), chain.hash(3), chain.hash(5)]; assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByTransactionScore), expected); assert_eq!(pool.read_n_with_strategy(3, OrderingStrategy::ByPackageScore), expected); @@ -1217,7 +1225,7 @@ mod tests { // score({ transaction3, transaction5 }) = 40 + 60 // score({ transaction4, transaction5 }) = 50 + 60 // score({ transaction2, transaction5 }) = 30 + 60 - pool.insert_verified(chain.at(2).into()); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(4), chain.hash(3), chain.hash(2), chain.hash(5)]; assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByTransactionScore), expected); assert_eq!(pool.read_n_with_strategy(4, OrderingStrategy::ByPackageScore), expected); @@ -1227,7 +1235,7 @@ mod tests { // score({ transaction1, transaction4, transaction5 }) = 20 + 50 + 60 / 3 ~ 0.333 // score({ transaction2, transaction5 }) = 30 + 60 / 2 = 0.45 // but second chain will be removed first anyway because previous #1 ({ transaction4, transaction5}) now depends on level 01 - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(3), chain.hash(2), chain.hash(1), chain.hash(4), chain.hash(5)]; assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByTransactionScore), expected); assert_eq!(pool.read_n_with_strategy(5, OrderingStrategy::ByPackageScore), expected); @@ -1237,7 +1245,7 @@ mod tests { // score({ transaction0, transaction4, transaction5 }) = (10 + 50 + 60) / (60 + 60 + 142) ~ 0.458 // score({ transaction1, transaction3, transaction5 }) = (20 + 50 + 60) / (60 + 60 + 142) ~ 0.496 // score({ transaction2, transaction5 }) = (30 + 60) / (60 + 142) ~ 0.445 - pool.insert_verified(chain.at(0).into()); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); let expected = vec![chain.hash(2), chain.hash(1), chain.hash(0), chain.hash(4), chain.hash(3), chain.hash(5)]; assert_eq!(pool.read_n_with_strategy(6, OrderingStrategy::ByTransactionScore), expected); assert_eq!(pool.read_n_with_strategy(6, OrderingStrategy::ByPackageScore), expected); @@ -1258,17 +1266,17 @@ mod tests { assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, })); - pool.insert_verified(chain.at(0).into()); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, })); - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, })); - pool.insert_verified(chain.at(2).into()); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); assert!(pool.is_spent(&OutPoint { hash: chain.hash(0), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(1), index: 0, })); assert!(!pool.is_spent(&OutPoint { hash: chain.hash(2), index: 0, })); @@ -1291,10 +1299,10 @@ mod tests { .reset().add_output(40).store(chain); // transaction3 let mut pool = MemoryPool::new(); - pool.insert_verified(chain.at(0).into()); - pool.insert_verified(chain.at(1).into()); - pool.insert_verified(chain.at(2).into()); - pool.insert_verified(chain.at(3).into()); + pool.insert_verified(chain.at(0).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(3).into(), &NonZeroFeeCalculator); assert_eq!(pool.information().transactions_count, 4); assert_eq!(pool.remove_by_prevout(&OutPoint { hash: chain.hash(0), index: 0 }), Some(vec![chain.at(1).into(), chain.at(2).into()])); @@ -1314,9 +1322,9 @@ mod tests { .reset().set_input(&chain.at(0), 2).add_output(70).store(chain); // no double spend: t0[2] -> t6 let mut pool = MemoryPool::new(); - pool.insert_verified(chain.at(1).into()); - pool.insert_verified(chain.at(2).into()); - pool.insert_verified(chain.at(4).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(2).into(), &NonZeroFeeCalculator); + pool.insert_verified(chain.at(4).into(), &NonZeroFeeCalculator); // when output is spent by nonfinal transaction match pool.check_double_spend(&chain.at(3)) { DoubleSpendCheckResult::NonFinalDoubleSpend(set) => { @@ -1359,7 +1367,7 @@ mod tests { .reset().set_input(&chain.at(0), 0).add_output(40).store(chain); // good replacement: t0[0] -> t2 let mut pool = MemoryPool::new(); - pool.insert_verified(chain.at(1).into()); + pool.insert_verified(chain.at(1).into(), &NonZeroFeeCalculator); // when output is spent by nonfinal transaction match pool.check_double_spend(&chain.at(2)) { @@ -1386,13 +1394,13 @@ mod tests { } #[test] - fn test_memory_poolis_spent() { - let tx1: Transaction = TransactionBuilder::with_default_input(0).into(); - let tx2: Transaction = TransactionBuilder::with_default_input(1).into(); + fn test_memory_pool_is_spent() { + let tx1: Transaction = TransactionBuilder::with_default_input(0).set_output(1).into(); + let tx2: Transaction = TransactionBuilder::with_default_input(1).set_output(1).into(); let out1 = tx1.inputs[0].previous_output.clone(); let out2 = tx2.inputs[0].previous_output.clone(); let mut memory_pool = MemoryPool::new(); - memory_pool.insert_verified(tx1.into()); + memory_pool.insert_verified(tx1.into(), &NonZeroFeeCalculator); assert!(memory_pool.is_spent(&out1)); assert!(!memory_pool.is_spent(&out2)); } diff --git a/storage/Cargo.toml b/storage/Cargo.toml index 650df00e70501dae23a44e73cfdf464bd3fe8712..6c0d4692f229c969e22f280f726bce5d9343e972 100644 --- a/storage/Cargo.toml +++ b/storage/Cargo.toml @@ -12,3 +12,4 @@ primitives = { path = "../primitives" } serialization = { path = "../serialization" } chain = { path = "../chain" } display_derive = "0.0.0" +network = { path = "../network" } diff --git a/verification/src/duplex_store.rs b/storage/src/duplex_store.rs similarity index 98% rename from verification/src/duplex_store.rs rename to storage/src/duplex_store.rs index a9ed797b0aa3ad91ec417e8f69768421c8a59e63..2b02a0913396180f3a62f83f843cfd9f1e631cf6 100644 --- a/verification/src/duplex_store.rs +++ b/storage/src/duplex_store.rs @@ -3,7 +3,7 @@ use chain::{OutPoint, TransactionOutput}; use network::TransactionOrdering; -use storage::TransactionOutputProvider; +use TransactionOutputProvider; #[derive(Clone, Copy)] pub struct DuplexTransactionOutputProvider<'a> { @@ -57,4 +57,4 @@ pub fn transaction_index_for_output_check(ordering: TransactionOrdering, tx_idx: TransactionOrdering::Topological => tx_idx, TransactionOrdering::Canonical => ::std::usize::MAX, } -} \ No newline at end of file +} diff --git a/storage/src/lib.rs b/storage/src/lib.rs index a58d11737b20cc291e73c0e1928555983f262da7..1efb206131222b06763da42ff85ae77226e58eef 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -8,6 +8,7 @@ extern crate display_derive; extern crate primitives; extern crate serialization as ser; extern crate chain; +extern crate network; mod best_block; mod block_ancestors; @@ -17,6 +18,7 @@ mod block_iterator; mod block_origin; mod block_provider; mod block_ref; +mod duplex_store; mod error; mod store; mod transaction_meta; @@ -31,6 +33,7 @@ pub use block_iterator::BlockIterator; pub use block_origin::{BlockOrigin, SideChainOrigin}; pub use block_provider::{BlockHeaderProvider, BlockProvider, IndexedBlockProvider}; pub use block_ref::BlockRef; +pub use duplex_store::{DuplexTransactionOutputProvider, NoopStore, transaction_index_for_output_check}; pub use error::Error; pub use store::{AsSubstore, Store, SharedStore, CanonStore, ConfigStore}; pub use transaction_meta::TransactionMeta; diff --git a/sync/Cargo.toml b/sync/Cargo.toml index 7ed704747e57649d6267e59f244c1951c8c4a87b..3b222904c6d2e12e6c650bcaa432873782e409fc 100644 --- a/sync/Cargo.toml +++ b/sync/Cargo.toml @@ -29,6 +29,4 @@ network = { path = "../network" } [dev-dependencies] test-data = { path = "../test-data" } - -[features] -dev = [] +miner = { path = "../miner", features = ["test-helpers"] } diff --git a/sync/src/synchronization_chain.rs b/sync/src/synchronization_chain.rs index 18d5c12c78e0f8d9265c1acc45c79d24a2ea3174..d7d574688b5dab9e6645fe212d520564af96916f 100644 --- a/sync/src/synchronization_chain.rs +++ b/sync/src/synchronization_chain.rs @@ -1,9 +1,9 @@ use std::collections::{VecDeque, HashSet}; use std::fmt; use linked_hash_map::LinkedHashMap; -use chain::{BlockHeader, Transaction, IndexedBlockHeader, IndexedBlock, IndexedTransaction}; +use chain::{BlockHeader, Transaction, IndexedBlockHeader, IndexedBlock, IndexedTransaction, OutPoint, TransactionOutput}; use storage; -use miner::{MemoryPoolOrderingStrategy, MemoryPoolInformation}; +use miner::{MemoryPoolOrderingStrategy, MemoryPoolInformation, FeeCalculator}; use network::ConsensusParams; use primitives::bytes::Bytes; use primitives::hash::H256; @@ -615,7 +615,7 @@ impl Chain { memory_pool.remove_by_prevout(&input.previous_output); } // now insert transaction itself - memory_pool.insert_verified(transaction); + memory_pool.insert_verified(transaction, &FeeCalculator(self.storage.as_transaction_output_provider())); } /// Calculate block locator hashes for hash queue @@ -676,6 +676,18 @@ impl storage::TransactionProvider for Chain { } } +impl storage::TransactionOutputProvider for Chain { + fn transaction_output(&self, outpoint: &OutPoint, transaction_index: usize) -> Option { + self.memory_pool.read().transaction_output(outpoint, transaction_index) + .or_else(|| self.storage.transaction_output(outpoint, transaction_index)) + } + + fn is_spent(&self, outpoint: &OutPoint) -> bool { + self.memory_pool.read().is_spent(outpoint) + || self.storage.is_spent(outpoint) + } +} + impl storage::BlockHeaderProvider for Chain { fn block_header_bytes(&self, block_ref: storage::BlockRef) -> Option { use ser::serialize; @@ -896,19 +908,19 @@ mod tests { #[test] fn chain_transaction_state() { - let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); + let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), test_data::block_h1().into()])); let mut chain = Chain::new(db, ConsensusParams::new(Network::Unitest, ConsensusFork::BitcoinCore), Arc::new(RwLock::new(MemoryPool::new()))); let genesis_block = test_data::genesis(); - let block1 = test_data::block_h1(); + let block2 = test_data::block_h2(); let tx1: Transaction = test_data::TransactionBuilder::with_version(1).into(); - let tx2: Transaction = test_data::TransactionBuilder::with_version(2).into(); + let tx2: Transaction = test_data::TransactionBuilder::with_input(&test_data::genesis().transactions[0], 0).into(); let tx1_hash = tx1.hash(); let tx2_hash = tx2.hash(); chain.verify_transaction(tx1.into()); chain.insert_verified_transaction(tx2.into()); assert_eq!(chain.transaction_state(&genesis_block.transactions[0].hash()), TransactionState::Stored); - assert_eq!(chain.transaction_state(&block1.transactions[0].hash()), TransactionState::Unknown); + assert_eq!(chain.transaction_state(&block2.transactions[0].hash()), TransactionState::Unknown); assert_eq!(chain.transaction_state(&tx1_hash), TransactionState::Verifying); assert_eq!(chain.transaction_state(&tx2_hash), TransactionState::InMemory); } @@ -973,13 +985,16 @@ mod tests { #[test] fn chain_transactions_hashes_with_state() { + let input_tx1 = test_data::genesis().transactions[0].clone(); + let input_tx2 = test_data::block_h1().transactions[0].clone(); let test_chain = &mut test_data::ChainBuilder::new(); - test_data::TransactionBuilder::with_output(100).store(test_chain) // t1 - .into_input(0).add_output(200).store(test_chain) // t1 -> t2 + test_data::TransactionBuilder::with_input(&input_tx1, 0) + .add_output(1_000).store(test_chain) // t1 + .into_input(0).add_output(400).store(test_chain) // t1 -> t2 .into_input(0).add_output(300).store(test_chain) // t1 -> t2 -> t3 - .set_default_input(0).set_output(400).store(test_chain); // t4 + .set_input(&input_tx2, 0).set_output(400).store(test_chain); // t4 - let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); + let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into(), test_data::block_h1().into()])); let mut chain = Chain::new(db, ConsensusParams::new(Network::Unitest, ConsensusFork::BitcoinCore), Arc::new(RwLock::new(MemoryPool::new()))); chain.insert_verified_transaction(test_chain.at(0).into()); chain.insert_verified_transaction(test_chain.at(1).into()); @@ -995,14 +1010,18 @@ mod tests { #[test] fn memory_pool_transactions_are_reverified_after_reorganization() { - let b0 = test_data::block_builder().header().build().build(); + let b0 = test_data::block_builder() + .header().build() + .transaction().coinbase().output().value(100_000).build().build() + .build(); let b1 = test_data::block_builder().header().nonce(1).parent(b0.hash()).build().build(); let b2 = test_data::block_builder().header().nonce(2).parent(b0.hash()).build().build(); let b3 = test_data::block_builder().header().parent(b2.hash()).build().build(); - let tx1: Transaction = test_data::TransactionBuilder::with_version(1).into(); + let input_tx = b0.transactions[0].clone(); + let tx1: Transaction = test_data::TransactionBuilder::with_version(1).set_input(&input_tx, 0).into(); let tx1_hash = tx1.hash(); - let tx2: Transaction = test_data::TransactionBuilder::with_version(2).into(); + let tx2: Transaction = test_data::TransactionBuilder::with_input(&input_tx, 0).into(); let tx2_hash = tx2.hash(); let db = Arc::new(BlockChainDatabase::init_test_chain(vec![b0.into()])); @@ -1028,6 +1047,7 @@ mod tests { #[test] fn fork_chain_block_transaction_is_removed_from_on_block_insert() { let genesis = test_data::genesis(); + let input_tx = genesis.transactions[0].clone(); let b0 = test_data::block_builder().header().parent(genesis.hash()).build().build(); // genesis -> b0 let b1 = test_data::block_builder().header().nonce(1).parent(b0.hash()).build() .transaction().output().value(10).build().build() @@ -1036,13 +1056,16 @@ mod tests { .transaction().output().value(20).build().build() .build(); // genesis -> b0 -> b1[tx1] -> b2[tx2] let b3 = test_data::block_builder().header().nonce(2).parent(b0.hash()).build() - .transaction().output().value(30).build().build() + .transaction().input().hash(input_tx.hash()).index(0).build() + .output().value(50).build().build() .build(); // genesis -> b0 -> b3[tx3] let b4 = test_data::block_builder().header().parent(b3.hash()).build() - .transaction().output().value(40).build().build() + .transaction().input().hash(b3.transactions[0].hash()).index(0).build() + .output().value(40).build().build() .build(); // genesis -> b0 -> b3[tx3] -> b4[tx4] let b5 = test_data::block_builder().header().parent(b4.hash()).build() - .transaction().output().value(50).build().build() + .transaction().input().hash(b4.transactions[0].hash()).index(0).build() + .output().value(30).build().build() .build(); // genesis -> b0 -> b3[tx3] -> b4[tx4] -> b5[tx5] let tx1 = b1.transactions[0].clone(); @@ -1092,35 +1115,35 @@ mod tests { .input().hash(tx0.hash()).index(0).build() .build() .build(); // genesis -> b0[tx1] - // tx1 && tx2 are spending same output + // tx from b0 && tx2 are spending same output let tx2: Transaction = test_data::TransactionBuilder::with_output(20).add_input(&tx0, 0).into(); - let tx3: Transaction = test_data::TransactionBuilder::with_output(20).add_input(&tx0, 1).into(); // insert tx2 to memory pool let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); let mut chain = Chain::new(db, ConsensusParams::new(Network::Unitest, ConsensusFork::BitcoinCore), Arc::new(RwLock::new(MemoryPool::new()))); chain.insert_verified_transaction(tx2.clone().into()); - chain.insert_verified_transaction(tx3.clone().into()); // insert verified block with tx1 chain.insert_best_block(b0.into()).expect("no error"); // => tx2 is removed from memory pool, but tx3 remains - assert_eq!(chain.information().transactions.transactions_count, 1); + assert_eq!(chain.information().transactions.transactions_count, 0); } #[test] fn update_memory_pool_transaction() { use self::test_data::{ChainBuilder, TransactionBuilder}; + let input_tx = test_data::genesis().transactions[0].clone(); let data_chain = &mut ChainBuilder::new(); - TransactionBuilder::with_output(10).add_output(10).add_output(10).store(data_chain) // transaction0 + TransactionBuilder::with_input(&input_tx, 0).set_output(100).store(data_chain) // transaction0 .reset().set_input(&data_chain.at(0), 0).add_output(20).lock().store(data_chain) // transaction0 -> transaction1 .reset().set_input(&data_chain.at(0), 0).add_output(30).store(data_chain); // transaction0 -> transaction2 let db = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); let mut chain = Chain::new(db, ConsensusParams::new(Network::Unitest, ConsensusFork::BitcoinCore), Arc::new(RwLock::new(MemoryPool::new()))); + chain.insert_verified_transaction(data_chain.at(0).into()); chain.insert_verified_transaction(data_chain.at(1).into()); - assert_eq!(chain.information().transactions.transactions_count, 1); + assert_eq!(chain.information().transactions.transactions_count, 2); chain.insert_verified_transaction(data_chain.at(2).into()); - assert_eq!(chain.information().transactions.transactions_count, 1); // tx was replaces + assert_eq!(chain.information().transactions.transactions_count, 2); // tx was replaced } } diff --git a/sync/src/synchronization_client_core.rs b/sync/src/synchronization_client_core.rs index 453b619f60b98ca035f8b8ff4783c35bfb12d961..a91607b4ca62a0aef7184b5374487c2e99ec333f 100644 --- a/sync/src/synchronization_client_core.rs +++ b/sync/src/synchronization_client_core.rs @@ -1942,8 +1942,10 @@ pub mod tests { #[test] fn transaction_is_accepted_when_not_synchronizing() { let (_, core, sync) = create_sync(None, None); + let input_tx = test_data::genesis().transactions[0].clone(); - sync.on_transaction(1, test_data::TransactionBuilder::with_version(1).into()); + let tx1: Transaction = test_data::TransactionBuilder::with_input(&input_tx, 0).set_output(100).into(); + sync.on_transaction(1, tx1.clone().into()); assert_eq!(core.lock().information().chain.transactions.transactions_count, 1); let b1 = test_data::block_h1(); @@ -1951,7 +1953,7 @@ pub mod tests { assert!(core.lock().information().state.is_nearly_saturated()); - sync.on_transaction(1, test_data::TransactionBuilder::with_version(2).into()); + sync.on_transaction(1, test_data::TransactionBuilder::with_input(&tx1, 0).into()); assert_eq!(core.lock().information().chain.transactions.transactions_count, 2); } @@ -1966,9 +1968,10 @@ pub mod tests { #[test] fn orphaned_transaction_is_verified_when_input_is_received() { + let input_tx = test_data::genesis().transactions[0].clone(); let chain = &mut test_data::ChainBuilder::new(); - test_data::TransactionBuilder::with_output(10).store(chain) // t0 - .set_input(&chain.at(0), 0).set_output(20).store(chain); // t0 -> t1 + test_data::TransactionBuilder::with_input(&input_tx, 0).set_output(100).store(chain) // t0 + .set_input(&chain.at(0), 0).set_output(20).store(chain); // t0 -> t1 let (_, core, sync) = create_sync(None, None); diff --git a/sync/src/synchronization_server.rs b/sync/src/synchronization_server.rs index f1fd96b9ab6ef68253011af7b5f66590e8514e10..d50c678d0ba2c176e5f089080949c579f9375c3a 100644 --- a/sync/src/synchronization_server.rs +++ b/sync/src/synchronization_server.rs @@ -486,7 +486,7 @@ pub mod tests { use primitives::hash::H256; use chain::Transaction; use inbound_connection::tests::DummyOutboundSyncConnection; - use miner::MemoryPool; + use miner::{NonZeroFeeCalculator, MemoryPool}; use local_node::tests::{default_filterload, make_filteradd}; use synchronization_executor::Task; use synchronization_executor::tests::DummyTaskExecutor; @@ -648,7 +648,7 @@ pub mod tests { // when memory pool is non-empty let transaction = Transaction::default(); let transaction_hash = transaction.hash(); - memory_pool.write().insert_verified(transaction.into()); + memory_pool.write().insert_verified(transaction.into(), &NonZeroFeeCalculator); // when asking for memory pool transactions ids server.execute(ServerTask::Mempool(0)); // => respond with inventory @@ -734,7 +734,7 @@ pub mod tests { let tx_verified_hash = tx_verified.hash(); // given in-memory transaction { - memory_pool.write().insert_verified(tx_verified.clone().into()); + memory_pool.write().insert_verified(tx_verified.clone().into(), &NonZeroFeeCalculator); } // when asking for known in-memory transaction let inventory = vec![ diff --git a/sync/src/utils/memory_pool_transaction_provider.rs b/sync/src/utils/memory_pool_transaction_provider.rs index 33d6252116610e007533e47a090c9ca231c9870f..2cd310aa82c1467b8d87b8fbf58ec43580c39d21 100644 --- a/sync/src/utils/memory_pool_transaction_provider.rs +++ b/sync/src/utils/memory_pool_transaction_provider.rs @@ -96,9 +96,9 @@ mod tests { use std::sync::Arc; use parking_lot::RwLock; use chain::OutPoint; - use storage::{TransactionOutputProvider}; + use storage::TransactionOutputProvider; use db::BlockChainDatabase; - use miner::MemoryPool; + use miner::{MemoryPool, NonZeroFeeCalculator}; use super::MemoryPoolTransactionOutputProvider; #[test] @@ -113,9 +113,9 @@ mod tests { let storage = Arc::new(BlockChainDatabase::init_test_chain(vec![test_data::genesis().into()])); let memory_pool = Arc::new(RwLock::new(MemoryPool::new())); { - memory_pool.write().insert_verified(dchain.at(0).into()); - memory_pool.write().insert_verified(dchain.at(1).into()); - memory_pool.write().insert_verified(dchain.at(2).into()); + memory_pool.write().insert_verified(dchain.at(0).into(), &NonZeroFeeCalculator); + memory_pool.write().insert_verified(dchain.at(1).into(), &NonZeroFeeCalculator); + memory_pool.write().insert_verified(dchain.at(2).into(), &NonZeroFeeCalculator); } // when inserting t3: diff --git a/test-data/src/chain_builder.rs b/test-data/src/chain_builder.rs index baf95a80bc23544f57643ba40334d33630985c0c..7582379396a61c8b666183987311546fd55b1769 100644 --- a/test-data/src/chain_builder.rs +++ b/test-data/src/chain_builder.rs @@ -64,6 +64,11 @@ impl TransactionBuilder { builder.add_input(&Transaction::default(), output_index) } + pub fn with_input(transaction: &Transaction, output_index: u32) -> TransactionBuilder { + let builder = TransactionBuilder::default(); + builder.add_input(transaction, output_index) + } + pub fn reset(self) -> TransactionBuilder { TransactionBuilder::default() } diff --git a/verification/src/accept_block.rs b/verification/src/accept_block.rs index 18758fdcc7520c9aa966bedbf958b16878ec2327..e62ef6fce6beebfd291d056df53ea186703fc033 100644 --- a/verification/src/accept_block.rs +++ b/verification/src/accept_block.rs @@ -1,11 +1,11 @@ use network::{ConsensusParams, ConsensusFork, TransactionOrdering}; use crypto::dhash256; -use storage::{TransactionOutputProvider, BlockHeaderProvider}; +use storage::{transaction_index_for_output_check, DuplexTransactionOutputProvider, + TransactionOutputProvider, BlockHeaderProvider}; use script; use ser::Stream; use sigops::{transaction_sigops, transaction_sigops_cost} ; use work::block_reward_satoshi; -use duplex_store::{transaction_index_for_output_check, DuplexTransactionOutputProvider}; use deployments::BlockDeployments; use canon::CanonBlock; use error::{Error, TransactionError}; diff --git a/verification/src/accept_chain.rs b/verification/src/accept_chain.rs index 5b87d071c24b79e68bb2d87cb8d4c4460d14405a..a366deb57c69efa86f3bcf2113193867618734d7 100644 --- a/verification/src/accept_chain.rs +++ b/verification/src/accept_chain.rs @@ -1,5 +1,5 @@ use rayon::prelude::{IntoParallelRefIterator, IndexedParallelIterator, ParallelIterator}; -use storage::Store; +use storage::{DuplexTransactionOutputProvider, Store}; use network::ConsensusParams; use error::Error; use canon::CanonBlock; @@ -7,7 +7,6 @@ use accept_block::BlockAcceptor; use accept_header::HeaderAcceptor; use accept_transaction::TransactionAcceptor; use deployments::BlockDeployments; -use duplex_store::DuplexTransactionOutputProvider; use VerificationLevel; pub struct ChainAcceptor<'a> { diff --git a/verification/src/accept_transaction.rs b/verification/src/accept_transaction.rs index 8403417e9238bd1649c5d85b8fe7edc30c36c41a..bcda742c8847695b2f7ff00cf2b97948c145e66a 100644 --- a/verification/src/accept_transaction.rs +++ b/verification/src/accept_transaction.rs @@ -1,10 +1,10 @@ use primitives::hash::H256; use primitives::bytes::Bytes; use ser::Serializable; -use storage::{TransactionMetaProvider, TransactionOutputProvider}; +use storage::{TransactionMetaProvider, TransactionOutputProvider, DuplexTransactionOutputProvider, + transaction_index_for_output_check}; use network::{ConsensusParams, ConsensusFork}; use script::{Script, verify_script, VerificationFlags, TransactionSignatureChecker, TransactionInputSigner, SignatureVersion}; -use duplex_store::{DuplexTransactionOutputProvider, transaction_index_for_output_check}; use deployments::BlockDeployments; use script::Builder; use sigops::transaction_sigops; diff --git a/verification/src/chain_verifier.rs b/verification/src/chain_verifier.rs index 21ba951d1290e19afdb2835d1981bfa73a022fb5..2a9a5bfd79b2b43133a0fd306bde1108691eb41d 100644 --- a/verification/src/chain_verifier.rs +++ b/verification/src/chain_verifier.rs @@ -2,11 +2,11 @@ use hash::H256; use chain::{IndexedBlock, IndexedBlockHeader, BlockHeader, Transaction}; -use storage::{SharedStore, TransactionOutputProvider, BlockHeaderProvider, BlockOrigin}; +use storage::{SharedStore, TransactionOutputProvider, BlockHeaderProvider, BlockOrigin, + DuplexTransactionOutputProvider, NoopStore}; use network::ConsensusParams; use error::{Error, TransactionError}; use canon::{CanonBlock, CanonTransaction}; -use duplex_store::{DuplexTransactionOutputProvider, NoopStore}; use verify_chain::ChainVerifier; use verify_header::HeaderVerifier; use verify_transaction::MemoryPoolTransactionVerifier; diff --git a/verification/src/lib.rs b/verification/src/lib.rs index d1f82854c0604f9d6ba9fa05e93ef63457dd4a3c..732a74c964d4984be0f5f49337e3f6d7f16a1cb0 100644 --- a/verification/src/lib.rs +++ b/verification/src/lib.rs @@ -72,7 +72,6 @@ extern crate db; pub mod constants; mod canon; mod deployments; -mod duplex_store; mod error; mod sigops; mod timestamp; diff --git a/verification/src/verify_block.rs b/verification/src/verify_block.rs index b15b0b06329ddb7c0f774064a2d7986002c3c2e4..938d529542bcf99dd192f336186e1fb5ae7e5541 100644 --- a/verification/src/verify_block.rs +++ b/verification/src/verify_block.rs @@ -2,7 +2,7 @@ use std::collections::HashSet; use chain::IndexedBlock; use network::ConsensusFork; use sigops::transaction_sigops; -use duplex_store::NoopStore; +use storage::NoopStore; use error::{Error, TransactionError}; pub struct BlockVerifier<'a> { diff --git a/verification/src/verify_transaction.rs b/verification/src/verify_transaction.rs index 96564afbcc122f1357ce27d1fbbaa79817e54355..dcc16ab50fa921368f2f9557090a0d61fd6cd6f0 100644 --- a/verification/src/verify_transaction.rs +++ b/verification/src/verify_transaction.rs @@ -3,7 +3,7 @@ use ser::Serializable; use chain::IndexedTransaction; use network::{ConsensusParams, ConsensusFork}; use deployments::BlockDeployments; -use duplex_store::NoopStore; +use storage::NoopStore; use sigops::transaction_sigops; use error::TransactionError; use constants::{MIN_COINBASE_SIZE, MAX_COINBASE_SIZE};