[dependencies]: remove util/macros (#11501)

* [dependencies]: remove `util/macros`

* fix typo: `flish` -> flush`

* [json tests]: add log if `write` or `flush` fails

`write` is performed to `stdout`
`logging` is performed to `stderr`

* [rocksdb-migration]: remove unused `Progress`

* [rpc test]: BTreeMap -> `btreemap!`
This commit is contained in:
Niklas Adolfsson 2020-02-19 13:07:33 +01:00 committed by GitHub
parent 70f08e1549
commit a49950e9c0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 296 additions and 434 deletions

20
Cargo.lock generated
View File

@ -212,7 +212,6 @@ dependencies = [
"log", "log",
"lru-cache", "lru-cache",
"machine", "machine",
"macros",
"parity-bytes", "parity-bytes",
"parity-crypto", "parity-crypto",
"parking_lot 0.10.0", "parking_lot 0.10.0",
@ -665,7 +664,6 @@ dependencies = [
"log", "log",
"lru-cache", "lru-cache",
"machine", "machine",
"macros",
"parity-crypto", "parity-crypto",
"parking_lot 0.10.0", "parking_lot 0.10.0",
"rand 0.7.2", "rand 0.7.2",
@ -1175,7 +1173,6 @@ dependencies = [
"keccak-hash", "keccak-hash",
"log", "log",
"machine", "machine",
"macros",
"rlp", "rlp",
"spec", "spec",
"tempdir", "tempdir",
@ -1234,7 +1231,6 @@ dependencies = [
"lazy_static", "lazy_static",
"log", "log",
"machine", "machine",
"macros",
"memory-cache", "memory-cache",
"parity-bytes", "parity-bytes",
"parity-crypto", "parity-crypto",
@ -1334,7 +1330,7 @@ dependencies = [
"hex-literal", "hex-literal",
"keccak-hash", "keccak-hash",
"log", "log",
"macros", "maplit",
"num", "num",
"parity-bytes", "parity-bytes",
"parity-crypto", "parity-crypto",
@ -1643,7 +1639,6 @@ dependencies = [
"kvdb-memorydb", "kvdb-memorydb",
"log", "log",
"machine", "machine",
"macros",
"parity-bytes", "parity-bytes",
"parity-crypto", "parity-crypto",
"parity-runtime", "parity-runtime",
@ -1678,7 +1673,7 @@ name = "ethjson"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ethereum-types", "ethereum-types",
"macros", "maplit",
"rustc-hex 1.0.0", "rustc-hex 1.0.0",
"serde", "serde",
"serde_json", "serde_json",
@ -2775,7 +2770,6 @@ dependencies = [
"keccak-hash", "keccak-hash",
"log", "log",
"lru-cache", "lru-cache",
"macros",
"parity-bytes", "parity-bytes",
"parity-crypto", "parity-crypto",
"parking_lot 0.10.0", "parking_lot 0.10.0",
@ -2789,10 +2783,6 @@ dependencies = [
"vm", "vm",
] ]
[[package]]
name = "macros"
version = "0.1.0"
[[package]] [[package]]
name = "maplit" name = "maplit"
version = "1.0.2" version = "1.0.2"
@ -2866,7 +2856,7 @@ dependencies = [
"kvdb", "kvdb",
"kvdb-rocksdb", "kvdb-rocksdb",
"log", "log",
"macros", "maplit",
"tempdir", "tempdir",
] ]
@ -3425,7 +3415,7 @@ dependencies = [
"keccak-hash", "keccak-hash",
"log", "log",
"machine", "machine",
"macros", "maplit",
"multihash", "multihash",
"order-stat", "order-stat",
"parity-bytes", "parity-bytes",
@ -3790,7 +3780,7 @@ dependencies = [
"keccak-hasher 0.1.1", "keccak-hasher 0.1.1",
"kvdb", "kvdb",
"log", "log",
"macros", "maplit",
"parity-bytes", "parity-bytes",
"patricia-trie-ethereum", "patricia-trie-ethereum",
"rlp", "rlp",

View File

@ -292,10 +292,6 @@ Caching, Importing Blocks, and Block Information
```bash ```bash
parity-rpc parity-rpc
``` ```
* Parity Ethereum (EthCore) Secret Store
```bash
ethcore-secretstore
```
* Parity Updater Service * Parity Updater Service
```bash ```bash
parity-updater parity-hash-fetch parity-updater parity-hash-fetch
@ -303,9 +299,9 @@ Caching, Importing Blocks, and Block Information
* Parity Core Libraries (Parity Util) * Parity Core Libraries (Parity Util)
```bash ```bash
ethcore-bloom-journal blooms-db dir eip-712 fake-fetch fastmap fetch ethcore-io ethcore-bloom-journal blooms-db dir eip-712 fake-fetch fastmap fetch ethcore-io
journaldb keccak-hasher len-caching-lock macros memory-cache memzero journaldb keccak-hasher len-caching-lock memory-cache memzero
migration-rocksdb ethcore-network ethcore-network-devp2p panic_hook migration-rocksdb ethcore-network ethcore-network-devp2p panic_hook
patricia-trie-ethereum registrar rlp_compress rlp_derive parity-runtime stats patricia-trie-ethereum registrar rlp_compress parity-runtime stats
time-utils triehash-ethereum unexpected parity-version time-utils triehash-ethereum unexpected parity-version
``` ```

View File

@ -36,7 +36,6 @@ kvdb-memorydb = { version = "0.4.0", optional = true }
kvdb-rocksdb = { version = "0.5.0", optional = true } kvdb-rocksdb = { version = "0.5.0", optional = true }
lazy_static = { version = "1.3", optional = true } lazy_static = { version = "1.3", optional = true }
log = "0.4" log = "0.4"
macros = { path = "../util/macros", optional = true }
machine = { path = "./machine" } machine = { path = "./machine" }
memory-cache = { path = "../util/memory-cache" } memory-cache = { path = "../util/memory-cache" }
parity-bytes = "0.1" parity-bytes = "0.1"
@ -82,7 +81,6 @@ kvdb-memorydb = "0.4.0"
kvdb-rocksdb = "0.5.0" kvdb-rocksdb = "0.5.0"
lazy_static = "1.3" lazy_static = "1.3"
machine = { path = "./machine", features = ["test-helpers"] } machine = { path = "./machine", features = ["test-helpers"] }
macros = { path = "../util/macros" }
parity-runtime = "0.1.1" parity-runtime = "0.1.1"
serde_json = "1.0" serde_json = "1.0"
stats = { path = "../util/stats" } stats = { path = "../util/stats" }
@ -91,7 +89,7 @@ tempdir = "0.3"
trie-standardmap = "0.15.0" trie-standardmap = "0.15.0"
[features] [features]
parity = ["work-notify", "price-info", "stratum", "macros"] parity = ["work-notify", "price-info", "stratum"]
# Large optional features that are enabled by default for Parity, # Large optional features that are enabled by default for Parity,
# but might be omitted for other dependent crates. # but might be omitted for other dependent crates.
work-notify = ["ethcore-miner/work-notify"] work-notify = ["ethcore-miner/work-notify"]
@ -127,7 +125,6 @@ test-helpers = [
"parity-crypto", "parity-crypto",
"kvdb-memorydb", "kvdb-memorydb",
"kvdb-rocksdb", "kvdb-rocksdb",
"macros",
"pod", "pod",
"tempdir", "tempdir",
"basic-authority/test-helpers" "basic-authority/test-helpers"

View File

@ -20,4 +20,4 @@ parity-crypto = { version = "0.5.0", features = ["publickey"] }
[dev-dependencies] [dev-dependencies]
hex-literal = "0.2.1" hex-literal = "0.2.1"
macros = { path = "../../util/macros" } maplit = "1.0.2"

View File

@ -759,18 +759,18 @@ mod tests {
PricingAt, AltBn128Pairing as JsonAltBn128PairingPricing, Pricing as JsonPricing, PricingAt, AltBn128Pairing as JsonAltBn128PairingPricing, Pricing as JsonPricing,
}; };
use hex_literal::hex; use hex_literal::hex;
use macros::map; use maplit::btreemap;
use num::{BigUint, Zero, One}; use num::{BigUint, Zero, One};
use parity_bytes::BytesRef; use parity_bytes::BytesRef;
use super::{ use super::{
BTreeMap, Builtin, EthereumBuiltin, FromStr, Implementation, Linear, Builtin, EthereumBuiltin, FromStr, Implementation, Linear,
ModexpPricer, modexp as me, Pricing ModexpPricer, modexp as me, Pricing
}; };
#[test] #[test]
fn blake2f_cost() { fn blake2f_cost() {
let f = Builtin { let f = Builtin {
pricer: map![0 => Pricing::Blake2F(123)], pricer: btreemap![0 => Pricing::Blake2F(123)],
native: EthereumBuiltin::from_str("blake2_f").unwrap(), native: EthereumBuiltin::from_str("blake2_f").unwrap(),
}; };
// 5 rounds // 5 rounds
@ -784,7 +784,7 @@ mod tests {
#[test] #[test]
fn blake2f_cost_on_invalid_length() { fn blake2f_cost_on_invalid_length() {
let f = Builtin { let f = Builtin {
pricer: map![0 => Pricing::Blake2F(123)], pricer: btreemap![0 => Pricing::Blake2F(123)],
native: EthereumBuiltin::from_str("blake2_f").expect("known builtin"), native: EthereumBuiltin::from_str("blake2_f").expect("known builtin"),
}; };
// invalid input (too short) // invalid input (too short)
@ -1031,7 +1031,7 @@ mod tests {
#[test] #[test]
fn modexp() { fn modexp() {
let f = Builtin { let f = Builtin {
pricer: map![0 => Pricing::Modexp(ModexpPricer { divisor: 20 })], pricer: btreemap![0 => Pricing::Modexp(ModexpPricer { divisor: 20 })],
native: EthereumBuiltin::from_str("modexp").unwrap(), native: EthereumBuiltin::from_str("modexp").unwrap(),
}; };
@ -1141,7 +1141,7 @@ mod tests {
fn bn128_add() { fn bn128_add() {
let f = Builtin { let f = Builtin {
pricer: map![0 => Pricing::Linear(Linear { base: 0, word: 0 })], pricer: btreemap![0 => Pricing::Linear(Linear { base: 0, word: 0 })],
native: EthereumBuiltin::from_str("alt_bn128_add").unwrap(), native: EthereumBuiltin::from_str("alt_bn128_add").unwrap(),
}; };
@ -1199,7 +1199,7 @@ mod tests {
fn bn128_mul() { fn bn128_mul() {
let f = Builtin { let f = Builtin {
pricer: map![0 => Pricing::Linear(Linear { base: 0, word: 0 })], pricer: btreemap![0 => Pricing::Linear(Linear { base: 0, word: 0 })],
native: EthereumBuiltin::from_str("alt_bn128_mul").unwrap(), native: EthereumBuiltin::from_str("alt_bn128_mul").unwrap(),
}; };
@ -1238,7 +1238,7 @@ mod tests {
fn builtin_pairing() -> Builtin { fn builtin_pairing() -> Builtin {
Builtin { Builtin {
pricer: map![0 => Pricing::Linear(Linear { base: 0, word: 0 })], pricer: btreemap![0 => Pricing::Linear(Linear { base: 0, word: 0 })],
native: EthereumBuiltin::from_str("alt_bn128_pairing").unwrap(), native: EthereumBuiltin::from_str("alt_bn128_pairing").unwrap(),
} }
} }
@ -1317,7 +1317,7 @@ mod tests {
fn is_active() { fn is_active() {
let pricer = Pricing::Linear(Linear { base: 10, word: 20 }); let pricer = Pricing::Linear(Linear { base: 10, word: 20 });
let b = Builtin { let b = Builtin {
pricer: map![100_000 => pricer], pricer: btreemap![100_000 => pricer],
native: EthereumBuiltin::from_str("identity").unwrap(), native: EthereumBuiltin::from_str("identity").unwrap(),
}; };
@ -1330,7 +1330,7 @@ mod tests {
fn from_named_linear() { fn from_named_linear() {
let pricer = Pricing::Linear(Linear { base: 10, word: 20 }); let pricer = Pricing::Linear(Linear { base: 10, word: 20 });
let b = Builtin { let b = Builtin {
pricer: map![0 => pricer], pricer: btreemap![0 => pricer],
native: EthereumBuiltin::from_str("identity").unwrap(), native: EthereumBuiltin::from_str("identity").unwrap(),
}; };
@ -1349,7 +1349,7 @@ mod tests {
fn from_json() { fn from_json() {
let b = Builtin::try_from(ethjson::spec::Builtin { let b = Builtin::try_from(ethjson::spec::Builtin {
name: "identity".to_owned(), name: "identity".to_owned(),
pricing: map![ pricing: btreemap![
0 => PricingAt { 0 => PricingAt {
info: None, info: None,
price: JsonPricing::Linear(JsonLinearPricing { base: 10, word: 20 }) price: JsonPricing::Linear(JsonLinearPricing { base: 10, word: 20 })
@ -1372,7 +1372,7 @@ mod tests {
fn bn128_pairing_eip1108_transition() { fn bn128_pairing_eip1108_transition() {
let b = Builtin::try_from(JsonBuiltin { let b = Builtin::try_from(JsonBuiltin {
name: "alt_bn128_pairing".to_owned(), name: "alt_bn128_pairing".to_owned(),
pricing: map![ pricing: btreemap![
10 => PricingAt { 10 => PricingAt {
info: None, info: None,
price: JsonPricing::AltBn128Pairing(JsonAltBn128PairingPricing { price: JsonPricing::AltBn128Pairing(JsonAltBn128PairingPricing {
@ -1398,7 +1398,7 @@ mod tests {
fn bn128_add_eip1108_transition() { fn bn128_add_eip1108_transition() {
let b = Builtin::try_from(JsonBuiltin { let b = Builtin::try_from(JsonBuiltin {
name: "alt_bn128_add".to_owned(), name: "alt_bn128_add".to_owned(),
pricing: map![ pricing: btreemap![
10 => PricingAt { 10 => PricingAt {
info: None, info: None,
price: JsonPricing::Linear(JsonLinearPricing { price: JsonPricing::Linear(JsonLinearPricing {
@ -1424,7 +1424,7 @@ mod tests {
fn bn128_mul_eip1108_transition() { fn bn128_mul_eip1108_transition() {
let b = Builtin::try_from(JsonBuiltin { let b = Builtin::try_from(JsonBuiltin {
name: "alt_bn128_mul".to_owned(), name: "alt_bn128_mul".to_owned(),
pricing: map![ pricing: btreemap![
10 => PricingAt { 10 => PricingAt {
info: None, info: None,
price: JsonPricing::Linear(JsonLinearPricing { price: JsonPricing::Linear(JsonLinearPricing {
@ -1451,7 +1451,7 @@ mod tests {
fn multimap_use_most_recent_on_activate() { fn multimap_use_most_recent_on_activate() {
let b = Builtin::try_from(JsonBuiltin { let b = Builtin::try_from(JsonBuiltin {
name: "alt_bn128_mul".to_owned(), name: "alt_bn128_mul".to_owned(),
pricing: map![ pricing: btreemap![
10 => PricingAt { 10 => PricingAt {
info: None, info: None,
price: JsonPricing::Linear(JsonLinearPricing { price: JsonPricing::Linear(JsonLinearPricing {
@ -1489,7 +1489,7 @@ mod tests {
fn multimap_use_last_with_same_activate_at() { fn multimap_use_last_with_same_activate_at() {
let b = Builtin::try_from(JsonBuiltin { let b = Builtin::try_from(JsonBuiltin {
name: "alt_bn128_mul".to_owned(), name: "alt_bn128_mul".to_owned(),
pricing: map![ pricing: btreemap![
1 => PricingAt { 1 => PricingAt {
info: None, info: None,
price: JsonPricing::Linear(JsonLinearPricing { price: JsonPricing::Linear(JsonLinearPricing {

View File

@ -26,7 +26,6 @@ lazy_static = "1.3.0"
log = "0.4" log = "0.4"
lru-cache = "0.1" lru-cache = "0.1"
machine = { path = "../../machine" } machine = { path = "../../machine" }
macros = { path = "../../../util/macros" }
parity-bytes = "0.1" parity-bytes = "0.1"
parking_lot = "0.10.0" parking_lot = "0.10.0"
rand = "0.7" rand = "0.7"

View File

@ -44,12 +44,10 @@ use client_traits::{EngineClient, ForceUpdateSealing, TransactionRequest};
use engine::{Engine, ConstructedVerifier}; use engine::{Engine, ConstructedVerifier};
use block_gas_limit::block_gas_limit; use block_gas_limit::block_gas_limit;
use block_reward::{self, BlockRewardContract, RewardKind}; use block_reward::{self, BlockRewardContract, RewardKind};
use ethjson;
use machine::{ use machine::{
ExecutedBlock, ExecutedBlock,
Machine, Machine,
}; };
use macros::map;
use keccak_hash::keccak; use keccak_hash::keccak;
use log::{info, debug, error, trace, warn}; use log::{info, debug, error, trace, warn};
use lru_cache::LruCache; use lru_cache::LruCache;
@ -1277,22 +1275,20 @@ impl Engine for AuthorityRound {
.map(ToString::to_string) .map(ToString::to_string)
.unwrap_or_default(); .unwrap_or_default();
let mut info = map![ let mut info = BTreeMap::new();
"step".into() => step, info.insert("step".into(), step);
"signature".into() => signature info.insert("signature".into(), signature);
];
if header.number() >= self.empty_steps_transition { if header.number() >= self.empty_steps_transition {
let empty_steps = let empty_steps = header_empty_steps(header).as_ref().map_or(String::new(), |empty_steps| {
if let Ok(empty_steps) = header_empty_steps(header).as_ref() { format!("[{}]", empty_steps.iter().fold(String::new(), |mut acc, e| {
format!("[{}]", if !acc.is_empty() {
empty_steps.iter().fold( acc.push(',');
"".to_string(), }
|acc, e| if acc.len() > 0 { acc + ","} else { acc } + &e.to_string())) acc.push_str(&e.to_string());
acc
} else { }))
"".into() });
};
info.insert("emptySteps".into(), empty_steps); info.insert("emptySteps".into(), empty_steps);
} }

View File

@ -18,7 +18,6 @@ lazy_static = "1.3.0"
log = "0.4" log = "0.4"
lru-cache = "0.1" lru-cache = "0.1"
machine = { path = "../../machine" } machine = { path = "../../machine" }
macros = { path = "../../../util/macros" }
rand = "0.7" rand = "0.7"
parking_lot = "0.10.0" parking_lot = "0.10.0"
rlp = "0.4.0" rlp = "0.4.0"

View File

@ -80,7 +80,6 @@ use machine::{
ExecutedBlock, ExecutedBlock,
Machine, Machine,
}; };
use macros::map;
use parking_lot::RwLock; use parking_lot::RwLock;
use rand::Rng; use rand::Rng;
use unexpected::{Mismatch, OutOfBounds}; use unexpected::{Mismatch, OutOfBounds};
@ -379,13 +378,12 @@ impl Engine for Clique {
fn extra_info(&self, header: &Header) -> BTreeMap<String, String> { fn extra_info(&self, header: &Header) -> BTreeMap<String, String> {
// clique engine seal fields are the same as ethash seal fields // clique engine seal fields are the same as ethash seal fields
match EthashSeal::parse_seal(header.seal()) { let mut engine_info = BTreeMap::new();
Ok(seal) => map![ if let Ok(seal) = EthashSeal::parse_seal(header.seal()) {
"nonce".to_owned() => format!("{:#x}", seal.nonce), engine_info.insert("nonce".to_string(), format!("{:#x}", seal.nonce));
"mixHash".to_owned() => format!("{:#x}", seal.mix_hash) engine_info.insert("mixHash".to_string(), format!("{:#x}", seal.mix_hash));
],
_ => BTreeMap::default()
} }
engine_info
} }
fn maximum_uncle_count(&self, _block: BlockNumber) -> usize { 0 } fn maximum_uncle_count(&self, _block: BlockNumber) -> usize { 0 }

View File

@ -16,7 +16,6 @@ ethjson = { path = "../../../json" }
keccak-hash = "0.4.0" keccak-hash = "0.4.0"
log = "0.4.8" log = "0.4.8"
machine = { path = "../../machine" } machine = { path = "../../machine" }
macros = { path = "../../../util/macros" }
unexpected = { path = "../../../util/unexpected" } unexpected = { path = "../../../util/unexpected" }
[dev-dependencies] [dev-dependencies]

View File

@ -33,11 +33,9 @@ use common_types::{
}; };
use engine::Engine; use engine::Engine;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
use ethjson;
use ethash::{self, quick_get_difficulty, slow_hash_block_number, EthashManager}; use ethash::{self, quick_get_difficulty, slow_hash_block_number, EthashManager};
use keccak_hash::{KECCAK_EMPTY_LIST_RLP}; use keccak_hash::KECCAK_EMPTY_LIST_RLP;
use log::trace; use log::trace;
use macros::map;
use machine::{ use machine::{
ExecutedBlock, ExecutedBlock,
Machine, Machine,
@ -232,18 +230,17 @@ impl Engine for Ethash {
/// Additional engine-specific information for the user/developer concerning `header`. /// Additional engine-specific information for the user/developer concerning `header`.
fn extra_info(&self, header: &Header) -> BTreeMap<String, String> { fn extra_info(&self, header: &Header) -> BTreeMap<String, String> {
match EthashSeal::parse_seal(header.seal()) { let mut engine_info = BTreeMap::new();
Ok(seal) => map![ if let Ok(seal) = EthashSeal::parse_seal(header.seal()) {
"nonce".to_owned() => format!("0x{:x}", seal.nonce), engine_info.insert("nonce".to_string(), format!("{:#x}", seal.nonce));
"mixHash".to_owned() => format!("0x{:x}", seal.mix_hash) engine_info.insert("mixHash".to_string(), format!("{:#x}", seal.mix_hash));
],
_ => BTreeMap::default()
} }
engine_info
} }
fn maximum_uncle_count(&self, _block: BlockNumber) -> usize { 2 } fn maximum_uncle_count(&self, _block: BlockNumber) -> usize { 2 }
fn maximum_gas_limit(&self) -> Option<U256> { Some(0x7fff_ffff_ffff_ffffu64.into()) } fn maximum_gas_limit(&self) -> Option<U256> { Some(0x7fff_ffff_ffff_ffff_u64.into()) }
/// Apply the block reward on finalisation of the block. /// Apply the block reward on finalisation of the block.
/// This assumes that all uncles are valid uncles (i.e. of at least one generation before the current). /// This assumes that all uncles are valid uncles (i.e. of at least one generation before the current).

View File

@ -44,7 +44,6 @@ ethcore = { path = "../", features = ["test-helpers"] }
ethcore-io = { path = "../../util/io" } ethcore-io = { path = "../../util/io" }
ethjson = { path = "../../json" } ethjson = { path = "../../json" }
parity-crypto = { version = "0.5.0", features = ["publickey"] } parity-crypto = { version = "0.5.0", features = ["publickey"] }
macros = { path = "../../util/macros" }
rustc-hex = "1.0" rustc-hex = "1.0"
spec = { path = "../spec" } spec = { path = "../spec" }
tempdir = "0.3" tempdir = "0.3"

View File

@ -1233,7 +1233,6 @@ mod tests {
}; };
use parity_crypto::publickey::{Generator, Random}; use parity_crypto::publickey::{Generator, Random};
use evm::{Factory, evm_test, evm_test_ignore}; use evm::{Factory, evm_test, evm_test_ignore};
use macros::vec_into;
use vm::{ActionParams, ActionValue, EnvInfo, CreateContractAddress}; use vm::{ActionParams, ActionValue, EnvInfo, CreateContractAddress};
use ::trace::{ use ::trace::{
trace, trace,
@ -1551,28 +1550,28 @@ mod tests {
parent_step: 0, parent_step: 0,
code: vec![124, 96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85, 96, 0, 82, 96, 29, 96, 3, 96, 23, 240, 96, 0, 85], code: vec![124, 96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85, 96, 0, 82, 96, 29, 96, 3, 96, 23, 240, 96, 0, 85],
operations: vec![ operations: vec![
VMOperation { pc: 0, instruction: 124, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99997.into(), stack_push: vec_into![U256::from_dec_str("2589892687202724018173567190521546555304938078595079151649957320078677").unwrap()], mem_diff: None, store_diff: None }) }, VMOperation { pc: 0, instruction: 124, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99997.into(), stack_push: vec![U256::from_dec_str("2589892687202724018173567190521546555304938078595079151649957320078677").unwrap()], mem_diff: None, store_diff: None }) },
VMOperation { pc: 30, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99994.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 30, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99994.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 32, instruction: 82, gas_cost: 6.into(), executed: Some(VMExecutedOperation { gas_used: 99988.into(), stack_push: vec_into![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![0, 0, 0, 96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85] }), store_diff: None }) }, VMOperation { pc: 32, instruction: 82, gas_cost: 6.into(), executed: Some(VMExecutedOperation { gas_used: 99988.into(), stack_push: vec![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![0, 0, 0, 96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85] }), store_diff: None }) },
VMOperation { pc: 33, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99985.into(), stack_push: vec_into![29], mem_diff: None, store_diff: None }) }, VMOperation { pc: 33, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99985.into(), stack_push: vec![U256::from(29)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 35, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99982.into(), stack_push: vec_into![3], mem_diff: None, store_diff: None }) }, VMOperation { pc: 35, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99982.into(), stack_push: vec![U256::from(3)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 37, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec_into![23], mem_diff: None, store_diff: None }) }, VMOperation { pc: 37, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec![U256::from(23)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 39, instruction: 240, gas_cost: 99979.into(), executed: Some(VMExecutedOperation { gas_used: 64755.into(), stack_push: vec_into![U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap()], mem_diff: None, store_diff: None }) }, VMOperation { pc: 39, instruction: 240, gas_cost: 99979.into(), executed: Some(VMExecutedOperation { gas_used: 64755.into(), stack_push: vec![U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap()], mem_diff: None, store_diff: None }) },
VMOperation { pc: 40, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 64752.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 40, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 64752.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 42, instruction: 85, gas_cost: 20000.into(), executed: Some(VMExecutedOperation { gas_used: 44752.into(), stack_push: vec_into![], mem_diff: None, store_diff: Some(StorageDiff { location: 0.into(), value: U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap() }) }) } VMOperation { pc: 42, instruction: 85, gas_cost: 20000.into(), executed: Some(VMExecutedOperation { gas_used: 44752.into(), stack_push: vec![], mem_diff: None, store_diff: Some(StorageDiff { location: 0.into(), value: U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap() }) }) }
], ],
subs: vec![ subs: vec![
VMTrace { VMTrace {
parent_step: 6, parent_step: 6,
code: vec![96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85], code: vec![96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85],
operations: vec![ operations: vec![
VMOperation { pc: 0, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67976.into(), stack_push: vec_into![16], mem_diff: None, store_diff: None }) }, VMOperation { pc: 0, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67976.into(), stack_push: vec![U256::from(16)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 2, instruction: 128, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67973.into(), stack_push: vec_into![16, 16], mem_diff: None, store_diff: None }) }, VMOperation { pc: 2, instruction: 128, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67973.into(), stack_push: vec![U256::from(16); 2], mem_diff: None, store_diff: None }) },
VMOperation { pc: 3, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67970.into(), stack_push: vec_into![12], mem_diff: None, store_diff: None }) }, VMOperation { pc: 3, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67970.into(), stack_push: vec![U256::from(12)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 5, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67967.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 5, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67967.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 7, instruction: 57, gas_cost: 9.into(), executed: Some(VMExecutedOperation { gas_used: 67958.into(), stack_push: vec_into![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53] }), store_diff: None }) }, VMOperation { pc: 7, instruction: 57, gas_cost: 9.into(), executed: Some(VMExecutedOperation { gas_used: 67958.into(), stack_push: vec![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53] }), store_diff: None }) },
VMOperation { pc: 8, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67955.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 8, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 67955.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 10, instruction: 243, gas_cost: 0.into(), executed: Some(VMExecutedOperation { gas_used: 67955.into(), stack_push: vec_into![], mem_diff: None, store_diff: None }) } VMOperation { pc: 10, instruction: 243, gas_cost: 0.into(), executed: Some(VMExecutedOperation { gas_used: 67955.into(), stack_push: vec![], mem_diff: None, store_diff: None }) }
], ],
subs: vec![] subs: vec![]
} }
@ -1726,13 +1725,13 @@ mod tests {
parent_step: 0, parent_step: 0,
code: vec![96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85], code: vec![96, 16, 128, 96, 12, 96, 0, 57, 96, 0, 243, 0, 96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53, 85],
operations: vec![ operations: vec![
VMOperation { pc: 0, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99997.into(), stack_push: vec_into![16], mem_diff: None, store_diff: None }) }, VMOperation { pc: 0, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99997.into(), stack_push: vec![U256::from(16)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 2, instruction: 128, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99994.into(), stack_push: vec_into![16, 16], mem_diff: None, store_diff: None }) }, VMOperation { pc: 2, instruction: 128, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99994.into(), stack_push: vec![U256::from(16); 2], mem_diff: None, store_diff: None }) },
VMOperation { pc: 3, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99991.into(), stack_push: vec_into![12], mem_diff: None, store_diff: None }) }, VMOperation { pc: 3, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99991.into(), stack_push: vec![U256::from(12)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 5, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99988.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 5, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99988.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 7, instruction: 57, gas_cost: 9.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec_into![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53] }), store_diff: None }) }, VMOperation { pc: 7, instruction: 57, gas_cost: 9.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec![], mem_diff: Some(MemoryDiff { offset: 0, data: vec![96, 0, 53, 84, 21, 96, 9, 87, 0, 91, 96, 32, 53, 96, 0, 53] }), store_diff: None }) },
VMOperation { pc: 8, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99976.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 8, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99976.into(), stack_push: vec![U256::from(0)], mem_diff: None, store_diff: None }) },
VMOperation { pc: 10, instruction: 243, gas_cost: 0.into(), executed: Some(VMExecutedOperation { gas_used: 99976.into(), stack_push: vec_into![], mem_diff: None, store_diff: None }) } VMOperation { pc: 10, instruction: 243, gas_cost: 0.into(), executed: Some(VMExecutedOperation { gas_used: 99976.into(), stack_push: vec![], mem_diff: None, store_diff: None }) }
], ],
subs: vec![] subs: vec![]
}; };

View File

@ -25,4 +25,4 @@ trie-db = "0.20.0"
triehash = { package = "triehash-ethereum", version = "0.2", path = "../../util/triehash-ethereum" } triehash = { package = "triehash-ethereum", version = "0.2", path = "../../util/triehash-ethereum" }
[dev-dependencies] [dev-dependencies]
macros = { path = "../../util/macros" } maplit = "1.0.2"

View File

@ -27,7 +27,6 @@ use triehash::sec_trie_root;
use parity_bytes::Bytes; use parity_bytes::Bytes;
use trie_db::TrieFactory; use trie_db::TrieFactory;
use ethtrie::Layout; use ethtrie::Layout;
use ethjson;
use common_types::account_diff::*; use common_types::account_diff::*;
use rlp::{self, RlpStream}; use rlp::{self, RlpStream};
use serde::{Serializer, Serialize}; use serde::{Serializer, Serialize};
@ -146,55 +145,54 @@ pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<A
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::collections::BTreeMap;
use common_types::account_diff::*; use common_types::account_diff::*;
use super::{PodAccount, diff_pod}; use super::{PodAccount, diff_pod};
use ethereum_types::H256; use ethereum_types::H256;
use macros::map; use maplit::btreemap;
#[test] #[test]
fn existence() { fn existence() {
let a = PodAccount { let a = PodAccount {
balance: 69.into(), nonce: 0.into(), code: Some(vec![]), storage: map![], version: 0.into(), balance: 69.into(), nonce: 0.into(), code: Some(vec![]), storage: btreemap![], version: 0.into(),
}; };
assert_eq!(diff_pod(Some(&a), Some(&a)), None); assert_eq!(diff_pod(Some(&a), Some(&a)), None);
assert_eq!(diff_pod(None, Some(&a)), Some(AccountDiff{ assert_eq!(diff_pod(None, Some(&a)), Some(AccountDiff{
balance: Diff::Born(69.into()), balance: Diff::Born(69.into()),
nonce: Diff::Born(0.into()), nonce: Diff::Born(0.into()),
code: Diff::Born(vec![]), code: Diff::Born(vec![]),
storage: map![], storage: btreemap![],
})); }));
} }
#[test] #[test]
fn basic() { fn basic() {
let a = PodAccount { let a = PodAccount {
balance: 69.into(), nonce: 0.into(), code: Some(vec![]), storage: map![], version: 0.into(), balance: 69.into(), nonce: 0.into(), code: Some(vec![]), storage: btreemap![], version: 0.into(),
}; };
let b = PodAccount { let b = PodAccount {
balance: 42.into(), nonce: 1.into(), code: Some(vec![]), storage: map![], version: 0.into(), balance: 42.into(), nonce: 1.into(), code: Some(vec![]), storage: btreemap![], version: 0.into(),
}; };
assert_eq!(diff_pod(Some(&a), Some(&b)), Some(AccountDiff { assert_eq!(diff_pod(Some(&a), Some(&b)), Some(AccountDiff {
balance: Diff::Changed(69.into(), 42.into()), balance: Diff::Changed(69.into(), 42.into()),
nonce: Diff::Changed(0.into(), 1.into()), nonce: Diff::Changed(0.into(), 1.into()),
code: Diff::Same, code: Diff::Same,
storage: map![], storage: btreemap![],
})); }));
} }
#[test] #[test]
fn code() { fn code() {
let a = PodAccount { let a = PodAccount {
balance: 0.into(), nonce: 0.into(), code: Some(vec![]), storage: map![], version: 0.into(), balance: 0.into(), nonce: 0.into(), code: Some(vec![]), storage: btreemap![], version: 0.into(),
}; };
let b = PodAccount { let b = PodAccount {
balance: 0.into(), nonce: 1.into(), code: Some(vec![0]), storage: map![], version: 0.into(), balance: 0.into(), nonce: 1.into(), code: Some(vec![0]), storage: btreemap![], version: 0.into(),
}; };
assert_eq!(diff_pod(Some(&a), Some(&b)), Some(AccountDiff { assert_eq!(diff_pod(Some(&a), Some(&b)), Some(AccountDiff {
balance: Diff::Same, balance: Diff::Same,
nonce: Diff::Changed(0.into(), 1.into()), nonce: Diff::Changed(0.into(), 1.into()),
code: Diff::Changed(vec![], vec![0]), code: Diff::Changed(vec![], vec![0]),
storage: map![], storage: btreemap![],
})); }));
} }
@ -204,7 +202,7 @@ mod test {
balance: 0.into(), balance: 0.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(vec![]), code: Some(vec![]),
storage: map![ storage: btreemap![
H256::from_low_u64_be(1) => H256::from_low_u64_be(1), H256::from_low_u64_be(1) => H256::from_low_u64_be(1),
H256::from_low_u64_be(2) => H256::from_low_u64_be(2), H256::from_low_u64_be(2) => H256::from_low_u64_be(2),
H256::from_low_u64_be(3) => H256::from_low_u64_be(3), H256::from_low_u64_be(3) => H256::from_low_u64_be(3),
@ -219,7 +217,7 @@ mod test {
balance: 0.into(), balance: 0.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(vec![]), code: Some(vec![]),
storage: map![ storage: btreemap![
H256::from_low_u64_be(1) => H256::from_low_u64_be(1), H256::from_low_u64_be(1) => H256::from_low_u64_be(1),
H256::from_low_u64_be(2) => H256::from_low_u64_be(3), H256::from_low_u64_be(2) => H256::from_low_u64_be(3),
H256::from_low_u64_be(3) => H256::from_low_u64_be(0), H256::from_low_u64_be(3) => H256::from_low_u64_be(0),
@ -234,7 +232,7 @@ mod test {
balance: Diff::Same, balance: Diff::Same,
nonce: Diff::Same, nonce: Diff::Same,
code: Diff::Same, code: Diff::Same,
storage: map![ storage: btreemap![
H256::from_low_u64_be(2) => Diff::new(H256::from_low_u64_be(2), H256::from_low_u64_be(3)), H256::from_low_u64_be(2) => Diff::new(H256::from_low_u64_be(2), H256::from_low_u64_be(3)),
H256::from_low_u64_be(3) => Diff::new(H256::from_low_u64_be(3), H256::from_low_u64_be(0)), H256::from_low_u64_be(3) => Diff::new(H256::from_low_u64_be(3), H256::from_low_u64_be(0)),
H256::from_low_u64_be(4) => Diff::new(H256::from_low_u64_be(4), H256::from_low_u64_be(0)), H256::from_low_u64_be(4) => Diff::new(H256::from_low_u64_be(4), H256::from_low_u64_be(0)),

View File

@ -20,7 +20,6 @@ use std::collections::BTreeMap;
use ethereum_types::{H256, Address}; use ethereum_types::{H256, Address};
use triehash::sec_trie_root; use triehash::sec_trie_root;
use common_types::state_diff::StateDiff; use common_types::state_diff::StateDiff;
use ethjson;
use serde::Serialize; use serde::Serialize;
use crate::account::PodAccount; use crate::account::PodAccount;
@ -74,7 +73,6 @@ pub fn diff_pod(pre: &PodState, post: &PodState) -> StateDiff {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::collections::BTreeMap;
use common_types::{ use common_types::{
account_diff::{AccountDiff, Diff}, account_diff::{AccountDiff, Diff},
state_diff::StateDiff, state_diff::StateDiff,
@ -82,122 +80,122 @@ mod test {
use ethereum_types::Address; use ethereum_types::Address;
use crate::account::PodAccount; use crate::account::PodAccount;
use super::PodState; use super::PodState;
use macros::map; use maplit::btreemap;
#[test] #[test]
fn create_delete() { fn create_delete() {
let a = PodState::from(map![ let a = PodState::from(btreemap![
Address::from_low_u64_be(1) => PodAccount { Address::from_low_u64_be(1) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
} }
]); ]);
assert_eq!(super::diff_pod(&a, &PodState::default()), StateDiff { raw: map![ assert_eq!(super::diff_pod(&a, &PodState::default()), StateDiff { raw: btreemap![
Address::from_low_u64_be(1) => AccountDiff{ Address::from_low_u64_be(1) => AccountDiff{
balance: Diff::Died(69.into()), balance: Diff::Died(69.into()),
nonce: Diff::Died(0.into()), nonce: Diff::Died(0.into()),
code: Diff::Died(vec![]), code: Diff::Died(vec![]),
storage: map![], storage: btreemap![],
} }
]}); ]});
assert_eq!(super::diff_pod(&PodState::default(), &a), StateDiff { raw: map![ assert_eq!(super::diff_pod(&PodState::default(), &a), StateDiff { raw: btreemap![
Address::from_low_u64_be(1) => AccountDiff{ Address::from_low_u64_be(1) => AccountDiff{
balance: Diff::Born(69.into()), balance: Diff::Born(69.into()),
nonce: Diff::Born(0.into()), nonce: Diff::Born(0.into()),
code: Diff::Born(vec![]), code: Diff::Born(vec![]),
storage: map![], storage: btreemap![],
} }
]}); ]});
} }
#[test] #[test]
fn create_delete_with_unchanged() { fn create_delete_with_unchanged() {
let a = PodState::from(map![ let a = PodState::from(btreemap![
Address::from_low_u64_be(1) => PodAccount { Address::from_low_u64_be(1) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
} }
]); ]);
let b = PodState::from(map![ let b = PodState::from(btreemap![
Address::from_low_u64_be(1) => PodAccount { Address::from_low_u64_be(1) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
}, },
Address::from_low_u64_be(2) => PodAccount { Address::from_low_u64_be(2) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
} }
]); ]);
assert_eq!(super::diff_pod(&a, &b), StateDiff { raw: map![ assert_eq!(super::diff_pod(&a, &b), StateDiff { raw: btreemap![
Address::from_low_u64_be(2) => AccountDiff { Address::from_low_u64_be(2) => AccountDiff {
balance: Diff::Born(69.into()), balance: Diff::Born(69.into()),
nonce: Diff::Born(0.into()), nonce: Diff::Born(0.into()),
code: Diff::Born(vec![]), code: Diff::Born(vec![]),
storage: map![], storage: btreemap![],
} }
]}); ]});
assert_eq!(super::diff_pod(&b, &a), StateDiff { raw: map![ assert_eq!(super::diff_pod(&b, &a), StateDiff { raw: btreemap![
Address::from_low_u64_be(2) => AccountDiff { Address::from_low_u64_be(2) => AccountDiff {
balance: Diff::Died(69.into()), balance: Diff::Died(69.into()),
nonce: Diff::Died(0.into()), nonce: Diff::Died(0.into()),
code: Diff::Died(vec![]), code: Diff::Died(vec![]),
storage: map![], storage: btreemap![],
} }
]}); ]});
} }
#[test] #[test]
fn change_with_unchanged() { fn change_with_unchanged() {
let a = PodState::from(map![ let a = PodState::from(btreemap![
Address::from_low_u64_be(1) => PodAccount { Address::from_low_u64_be(1) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
}, },
Address::from_low_u64_be(2) => PodAccount { Address::from_low_u64_be(2) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
} }
]); ]);
let b = PodState::from(map![ let b = PodState::from(btreemap![
Address::from_low_u64_be(1) => PodAccount { Address::from_low_u64_be(1) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 1.into(), nonce: 1.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
}, },
Address::from_low_u64_be(2) => PodAccount { Address::from_low_u64_be(2) => PodAccount {
balance: 69.into(), balance: 69.into(),
nonce: 0.into(), nonce: 0.into(),
code: Some(Vec::new()), code: Some(Vec::new()),
storage: map![], storage: btreemap![],
version: 0.into(), version: 0.into(),
} }
]); ]);
assert_eq!(super::diff_pod(&a, &b), StateDiff { raw: map![ assert_eq!(super::diff_pod(&a, &b), StateDiff { raw: btreemap![
Address::from_low_u64_be(1) => AccountDiff { Address::from_low_u64_be(1) => AccountDiff {
balance: Diff::Same, balance: Diff::Same,
nonce: Diff::Changed(0.into(), 1.into()), nonce: Diff::Changed(0.into(), 1.into()),
code: Diff::Same, code: Diff::Same,
storage: map![], storage: btreemap![],
} }
]}); ]});
} }

View File

@ -25,8 +25,7 @@ use io::IoChannel;
use test_helpers::{self, EvmTestClient}; use test_helpers::{self, EvmTestClient};
use types::verification::Unverified; use types::verification::Unverified;
use verification::{VerifierType, queue::kind::BlockLike}; use verification::{VerifierType, queue::kind::BlockLike};
use super::SKIP_TESTS; use super::{HookType, SKIP_TESTS};
use super::HookType;
#[allow(dead_code)] #[allow(dead_code)]
fn skip_test(name: &String) -> bool { fn skip_test(name: &String) -> bool {
@ -56,7 +55,7 @@ pub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8],
let mut fail_unless = |cond: bool| { let mut fail_unless = |cond: bool| {
if !cond && !fail { if !cond && !fail {
failed.push(name.clone()); failed.push(name.clone());
flushln!("FAIL"); flushed_writeln!("FAIL");
fail = true; fail = true;
true true
} else { } else {
@ -64,7 +63,7 @@ pub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8],
} }
}; };
flush!(" - {}...", name); flushed_write!(" - {}...", name);
let spec = { let spec = {
let mut spec = match EvmTestClient::fork_spec_from_json(&blockchain.network) { let mut spec = match EvmTestClient::fork_spec_from_json(&blockchain.network) {
@ -123,9 +122,9 @@ pub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8],
} }
if !fail { if !fail {
flushln!("ok"); flushed_writeln!("OK");
} else { } else {
flushln!("fail"); flushed_writeln!("FAILED");
} }
start_stop_hook(&name, HookType::OnStop); start_stop_hook(&name, HookType::OnStop);

View File

@ -37,8 +37,7 @@ pub fn json_difficulty_test<H: FnMut(&str, HookType)>(
for (name, test) in tests.into_iter() { for (name, test) in tests.into_iter() {
start_stop_hook(&name, HookType::OnStart); start_stop_hook(&name, HookType::OnStart);
flush!(" - {}...", name); flushed_writeln!(" - {}...", name);
println!(" - {}...", name);
let mut parent_header = Header::new(); let mut parent_header = Header::new();
let block_number: u64 = test.current_block_number.into(); let block_number: u64 = test.current_block_number.into();
@ -53,7 +52,7 @@ pub fn json_difficulty_test<H: FnMut(&str, HookType)>(
engine.populate_from_parent(&mut header, &parent_header); engine.populate_from_parent(&mut header, &parent_header);
let expected_difficulty: U256 = test.current_difficulty.into(); let expected_difficulty: U256 = test.current_difficulty.into();
assert_eq!(header.difficulty(), &expected_difficulty); assert_eq!(header.difficulty(), &expected_difficulty);
flushln!("ok"); flushed_writeln!("OK");
start_stop_hook(&name, HookType::OnStop); start_stop_hook(&name, HookType::OnStop);
} }

View File

@ -0,0 +1,86 @@
//! Helper macros for running the `JSON tests`
/// Declares a test:
///
/// declare_test!(test_name, "path/to/folder/with/tests");
///
/// Declares a test but skip the named test files inside the folder (no extension):
///
/// declare_test!(skip => ["a-test-file", "other-test-file"], test_name, "path/to/folder/with/tests");
///
/// NOTE: a skipped test is considered a passing test as far as `cargo test` is concerned. Normally
/// one test corresponds to a folder full of test files, each of which may contain many tests.
#[macro_export]
macro_rules! declare_test {
(skip => $arr: expr, $id: ident, $name: expr) => {
#[cfg(test)]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, $arr);
}
};
(ignore => $id: ident, $name: expr) => {
#[cfg(test)]
#[ignore]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
};
(heavy => $id: ident, $name: expr) => {
#[cfg(test)]
#[cfg(feature = "test-heavy")]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
};
($id: ident, $name: expr) => {
#[cfg(test)]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
}
}
#[cfg(test)]
macro_rules! test {
($name: expr, $skip: expr) => {
$crate::json_tests::test_common::run_test_path(
std::path::Path::new(concat!("res/ethereum/tests/", $name)),
&$skip,
do_json_test,
&mut |_, _| ()
);
}
}
/// Similar to `print!` but flushes stdout in order to ensure the output is emitted immediately.
#[macro_export]
macro_rules! flushed_write {
($arg:expr) => ($crate::json_tests::macros::write_and_flush($arg.into()));
($($arg:tt)*) => ($crate::json_tests::macros::write_and_flush(format!("{}", format_args!($($arg)*))));
}
/// Similar to `println!` but flushes stdout in order to ensure the output is emitted immediately.
#[macro_export]
macro_rules! flushed_writeln {
($fmt:expr) => (flushed_write!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (flushed_write!(concat!($fmt, "\n"), $($arg)*));
}
/// Write to stdout and flush (ignores errors)
#[doc(hidden)]
pub fn write_and_flush(s: String) {
if let Err(err) = std::io::Write::write_all(&mut std::io::stdout(), s.as_bytes()) {
error!(target: "json_tests", "io::Write::write_all to stdout failed because of: {:?}", err);
}
if let Err(err) = std::io::Write::flush(&mut std::io::stdout()) {
error!(target: "json_tests", "io::Write::flush stdout failed because of: {:?}", err);
}
}

View File

@ -14,23 +14,24 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>. // along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
//! Helpers and tests for operating on jsontests. //! Helpers and tests for operating on `JSON` tests.
#[macro_use] #[macro_use]
mod test_common; mod macros;
mod transaction;
mod executive;
mod state;
mod chain; mod chain;
mod trie; mod executive;
mod skip; mod skip;
mod state;
mod test_common;
mod transaction;
mod trie;
#[cfg(test)] #[cfg(test)]
mod difficulty; mod difficulty;
pub use self::test_common::HookType;
pub use self::executive::run_test_path as run_executive_test_path; pub use self::executive::run_test_path as run_executive_test_path;
pub use self::executive::run_test_file as run_executive_test_file; pub use self::executive::run_test_file as run_executive_test_file;
pub use self::test_common::HookType;
use self::skip::SKIP_TESTS; use self::skip::SKIP_TESTS;

View File

@ -17,13 +17,10 @@
use std::path::Path; use std::path::Path;
use super::test_common::*; use super::test_common::*;
use pod::PodState; use pod::PodState;
use trace;
use ethjson;
use test_helpers::{EvmTestClient, EvmTestError, TransactErr, TransactSuccess}; use test_helpers::{EvmTestClient, EvmTestError, TransactErr, TransactSuccess};
use types::transaction::SignedTransaction; use types::transaction::SignedTransaction;
use vm::EnvInfo; use vm::EnvInfo;
use super::SKIP_TESTS; use super::SKIP_TESTS;
use super::HookType;
#[allow(dead_code)] #[allow(dead_code)]
fn skip_test(subname: &str, chain: &String, number: usize) -> bool { fn skip_test(subname: &str, chain: &String, number: usize) -> bool {
@ -84,25 +81,25 @@ pub fn json_chain_test<H: FnMut(&str, HookType)>(path: &Path, json_data: &[u8],
match result() { match result() {
Err(err) => { Err(err) => {
println!("{} !!! Unexpected internal error: {:?}", info, err); println!("{} !!! Unexpected internal error: {:?}", info, err);
flushln!("{} fail", info); flushed_writeln!("{} fail", info);
failed.push(name.clone()); failed.push(name.clone());
}, },
Ok(Ok(TransactSuccess { state_root, .. })) if state_root != post_root => { Ok(Ok(TransactSuccess { state_root, .. })) if state_root != post_root => {
println!("{} !!! State mismatch (got: {}, expect: {}", info, state_root, post_root); println!("{} !!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
flushln!("{} fail", info); flushed_writeln!("{} fail", info);
failed.push(name.clone()); failed.push(name.clone());
}, },
Ok(Err(TransactErr { state_root, ref error, .. })) if state_root != post_root => { Ok(Err(TransactErr { state_root, ref error, .. })) if state_root != post_root => {
println!("{} !!! State mismatch (got: {}, expect: {}", info, state_root, post_root); println!("{} !!! State mismatch (got: {}, expect: {}", info, state_root, post_root);
println!("{} !!! Execution error: {:?}", info, error); println!("{} !!! Execution error: {:?}", info, error);
flushln!("{} fail", info); flushed_writeln!("{} fail", info);
failed.push(name.clone()); failed.push(name.clone());
}, },
Ok(Err(TransactErr { error, .. })) => { Ok(Err(TransactErr { error, .. })) => {
flushln!("{} ok ({:?})", info, error); flushed_writeln!("{} ok ({:?})", info, error);
}, },
Ok(_) => { Ok(_) => {
flushln!("{} ok", info); flushed_writeln!("{} ok", info);
}, },
} }
} }

View File

@ -19,6 +19,7 @@ use std::io::Read;
use std::fs::{File, read_dir}; use std::fs::{File, read_dir};
use std::path::Path; use std::path::Path;
use std::ffi::OsString; use std::ffi::OsString;
pub use ethereum_types::{H256, U256, Address}; pub use ethereum_types::{H256, U256, Address};
/// Indicate when to run the hook passed to test functions. /// Indicate when to run the hook passed to test functions.
@ -41,7 +42,7 @@ pub fn run_test_path<H: FnMut(&str, HookType)>(
if !skip.is_empty() { if !skip.is_empty() {
// todo[dvdplm] it's really annoying to have to use flushln here. Should be `info!(target: // todo[dvdplm] it's really annoying to have to use flushln here. Should be `info!(target:
// "json-tests", …)`. Issue https://github.com/paritytech/parity-ethereum/issues/11084 // "json-tests", …)`. Issue https://github.com/paritytech/parity-ethereum/issues/11084
flushln!("[run_test_path] Skipping tests in {}: {:?}", path.display(), skip); flushed_writeln!("[run_test_path] Skipping tests in {}: {:?}", path.display(), skip);
} }
let mut errors = Vec::new(); let mut errors = Vec::new();
run_test_path_inner(path, skip, runner, start_stop_hook, &mut errors); run_test_path_inner(path, skip, runner, start_stop_hook, &mut errors);
@ -121,63 +122,3 @@ pub fn run_test_file<H: FnMut(&str, HookType)>(
let empty: [String; 0] = []; let empty: [String; 0] = [];
assert_eq!(results, empty); assert_eq!(results, empty);
} }
#[cfg(test)]
macro_rules! test {
($name: expr, $skip: expr) => {
::json_tests::test_common::run_test_path(
::std::path::Path::new(concat!("res/ethereum/tests/", $name)),
&$skip,
do_json_test,
&mut |_, _| ()
);
}
}
/// Declares a test:
///
/// declare_test!(test_name, "path/to/folder/with/tests");
///
/// Declares a test but skip the named test files inside the folder (no extension):
///
/// declare_test!(skip => ["a-test-file", "other-test-file"], test_name, "path/to/folder/with/tests");
///
/// NOTE: a skipped test is considered a passing test as far as `cargo test` is concerned. Normally
/// one test corresponds to a folder full of test files, each of which may contain many tests.
#[macro_export]
macro_rules! declare_test {
(skip => $arr: expr, $id: ident, $name: expr) => {
#[cfg(test)]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, $arr);
}
};
(ignore => $id: ident, $name: expr) => {
#[cfg(test)]
#[ignore]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
};
(heavy => $id: ident, $name: expr) => {
#[cfg(test)]
#[cfg(feature = "test-heavy")]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
};
($id: ident, $name: expr) => {
#[cfg(test)]
#[test]
#[allow(non_snake_case)]
fn $id() {
test!($name, []);
}
}
}

View File

@ -83,9 +83,6 @@ extern crate kvdb_rocksdb;
#[cfg(feature = "json-tests")] #[cfg(feature = "json-tests")]
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
#[cfg(any(test, feature = "json-tests"))]
#[macro_use]
extern crate macros;
#[cfg(any(test, feature = "test-helpers"))] #[cfg(any(test, feature = "test-helpers"))]
extern crate pod; extern crate pod;
#[cfg(any(test, feature = "blooms-db"))] #[cfg(any(test, feature = "blooms-db"))]

View File

@ -38,8 +38,6 @@ use client_traits::{
BlockInfo, BlockChainClient, BlockChainReset, ChainInfo, BlockInfo, BlockChainClient, BlockChainReset, ChainInfo,
ImportExportBlocks, Tick, ImportBlock ImportExportBlocks, Tick, ImportBlock
}; };
use spec;
use stats;
use machine::executive::{Executive, TransactOptions}; use machine::executive::{Executive, TransactOptions};
use miner::{Miner, PendingOrdering, MinerService}; use miner::{Miner, PendingOrdering, MinerService};
use account_state::{State, CleanupMode, backend}; use account_state::{State, CleanupMode, backend};
@ -51,6 +49,14 @@ use test_helpers::{
use rustc_hex::ToHex; use rustc_hex::ToHex;
use registrar::RegistrarClient; use registrar::RegistrarClient;
fn into_u256_vec<'a, T, I>(iter: I) -> Vec<U256>
where
I: IntoIterator<Item = &'a T>,
T: Into<U256> + Clone + 'a,
{
iter.into_iter().cloned().map(Into::into).collect()
}
#[test] #[test]
fn imports_from_empty() { fn imports_from_empty() {
let db = test_helpers::new_db(); let db = test_helpers::new_db();
@ -204,32 +210,32 @@ fn can_collect_garbage() {
#[test] #[test]
fn can_generate_gas_price_median() { fn can_generate_gas_price_median() {
let client = generate_dummy_client_with_data(3, 1, slice_into![1, 2, 3]); let client = generate_dummy_client_with_data(3, 1, &into_u256_vec(&[1, 2, 3]));
assert_eq!(Some(&U256::from(2)), client.gas_price_corpus(3).median()); assert_eq!(Some(&U256::from(2)), client.gas_price_corpus(3).median());
let client = generate_dummy_client_with_data(4, 1, slice_into![1, 4, 3, 2]); let client = generate_dummy_client_with_data(4, 1, &into_u256_vec(&[1, 4, 3, 2]));
assert_eq!(Some(&U256::from(3)), client.gas_price_corpus(3).median()); assert_eq!(Some(&U256::from(3)), client.gas_price_corpus(3).median());
} }
#[test] #[test]
fn can_generate_gas_price_histogram() { fn can_generate_gas_price_histogram() {
let client = generate_dummy_client_with_data(20, 1, slice_into![6354,8593,6065,4842,7845,7002,689,4958,4250,6098,5804,4320,643,8895,2296,8589,7145,2000,2512,1408]); let client = generate_dummy_client_with_data(20, 1, &into_u256_vec(&[6354,8593,6065,4842,7845,7002,689,4958,4250,6098,5804,4320,643,8895,2296,8589,7145,2000,2512,1408]));
let hist = client.gas_price_corpus(20).histogram(5).unwrap(); let hist = client.gas_price_corpus(20).histogram(5).unwrap();
let correct_hist = stats::Histogram { bucket_bounds: vec_into![643, 2294, 3945, 5596, 7247, 8898], counts: vec![4,2,4,6,4] }; let correct_hist = stats::Histogram { bucket_bounds: into_u256_vec(&[643, 2294, 3945, 5596, 7247, 8898]), counts: vec![4,2,4,6,4] };
assert_eq!(hist, correct_hist); assert_eq!(hist, correct_hist);
} }
#[test] #[test]
fn empty_gas_price_histogram() { fn empty_gas_price_histogram() {
let client = generate_dummy_client_with_data(20, 0, slice_into![]); let client = generate_dummy_client_with_data(20, 0, &[]);
assert!(client.gas_price_corpus(20).histogram(5).is_none()); assert!(client.gas_price_corpus(20).histogram(5).is_none());
} }
#[test] #[test]
fn corpus_is_sorted() { fn corpus_is_sorted() {
let client = generate_dummy_client_with_data(2, 1, slice_into![U256::from_str("11426908979").unwrap(), U256::from_str("50426908979").unwrap()]); let client = generate_dummy_client_with_data(2, 1, &[U256::from_str("11426908979").unwrap(), U256::from_str("50426908979").unwrap()]);
let corpus = client.gas_price_corpus(20); let corpus = client.gas_price_corpus(20);
assert!(corpus[0] < corpus[1]); assert!(corpus[0] < corpus[1]);
} }

View File

@ -23,7 +23,6 @@ indexmap = "1.3.0"
keccak-hash = "0.4.0" keccak-hash = "0.4.0"
light = { package = "ethcore-light", path = "../light" } light = { package = "ethcore-light", path = "../light" }
log = "0.4" log = "0.4"
macros = { path = "../../util/macros" }
network = { package = "ethcore-network", path = "../../util/network" } network = { package = "ethcore-network", path = "../../util/network" }
parity-runtime = "0.1.1" parity-runtime = "0.1.1"
parity-crypto = { version = "0.5.0", features = ["publickey"] } parity-crypto = { version = "0.5.0", features = ["publickey"] }

View File

@ -49,7 +49,6 @@ use light::net::{
Capabilities, Handler as LightHandler, EventContext, SampleStore, Capabilities, Handler as LightHandler, EventContext, SampleStore,
}; };
use log::{trace, warn}; use log::{trace, warn};
use macros::hash_map;
use network::{ use network::{
client_version::ClientVersion, client_version::ClientVersion,
NetworkProtocolHandler, NetworkContext, PeerId, ProtocolId, NetworkProtocolHandler, NetworkContext, PeerId, ProtocolId,
@ -795,7 +794,11 @@ impl NetworkConfiguration {
max_peers: self.max_peers, max_peers: self.max_peers,
min_peers: self.min_peers, min_peers: self.min_peers,
max_handshakes: self.max_pending_peers, max_handshakes: self.max_pending_peers,
reserved_protocols: hash_map![WARP_SYNC_PROTOCOL_ID => self.snapshot_peers], reserved_protocols: {
let mut reserved = HashMap::new();
reserved.insert(WARP_SYNC_PROTOCOL_ID, self.snapshot_peers);
reserved
},
reserved_nodes: self.reserved_nodes, reserved_nodes: self.reserved_nodes,
ip_filter: self.ip_filter, ip_filter: self.ip_filter,
non_reserved_mode: if self.allow_non_reserved { NonReservedPeerMode::Accept } else { NonReservedPeerMode::Deny }, non_reserved_mode: if self.allow_non_reserved { NonReservedPeerMode::Accept } else { NonReservedPeerMode::Deny },

View File

@ -93,7 +93,6 @@ impl TransactionsStats {
mod tests { mod tests {
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use super::{Stats, TransactionsStats, NodeId, H256}; use super::{Stats, TransactionsStats, NodeId, H256};
use macros::hash_map;
#[test] #[test]
fn should_keep_track_of_propagations() { fn should_keep_track_of_propagations() {
@ -112,10 +111,12 @@ mod tests {
let stats = stats.get(&hash); let stats = stats.get(&hash);
assert_eq!(stats, Some(&Stats { assert_eq!(stats, Some(&Stats {
first_seen: 5, first_seen: 5,
propagated_to: hash_map![ propagated_to: {
enodeid1 => 2, let mut map = HashMap::new();
enodeid2 => 1 map.insert(enodeid1, 2);
], map.insert(enodeid2, 1);
map
},
})); }));
} }

View File

@ -12,7 +12,7 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
[dev-dependencies] [dev-dependencies]
macros = { path = "../util/macros" } maplit = "1.0.2"
[features] [features]
test-helpers = [] test-helpers = []

View File

@ -139,8 +139,8 @@ pub struct PricingAt {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{Builtin, BuiltinCompat, BTreeMap, Pricing, PricingAt, Linear, Modexp, AltBn128ConstOperations}; use super::{Builtin, BuiltinCompat, Pricing, PricingAt, Linear, Modexp, AltBn128ConstOperations};
use macros::map; use maplit::btreemap;
#[test] #[test]
fn builtin_deserialization() { fn builtin_deserialization() {
@ -150,7 +150,7 @@ mod tests {
}"#; }"#;
let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into(); let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into();
assert_eq!(builtin.name, "ecrecover"); assert_eq!(builtin.name, "ecrecover");
assert_eq!(builtin.pricing, map![ assert_eq!(builtin.pricing, btreemap![
0 => PricingAt { 0 => PricingAt {
info: None, info: None,
price: Pricing::Linear(Linear { base: 3000, word: 0 }) price: Pricing::Linear(Linear { base: 3000, word: 0 })
@ -174,7 +174,7 @@ mod tests {
}"#; }"#;
let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into(); let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into();
assert_eq!(builtin.name, "ecrecover"); assert_eq!(builtin.name, "ecrecover");
assert_eq!(builtin.pricing, map![ assert_eq!(builtin.pricing, btreemap![
0 => PricingAt { 0 => PricingAt {
info: None, info: None,
price: Pricing::Linear(Linear { base: 3000, word: 0 }) price: Pricing::Linear(Linear { base: 3000, word: 0 })
@ -195,7 +195,7 @@ mod tests {
}"#; }"#;
let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into(); let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into();
assert_eq!(builtin.name, "blake2_f"); assert_eq!(builtin.name, "blake2_f");
assert_eq!(builtin.pricing, map![ assert_eq!(builtin.pricing, btreemap![
0xffffff => PricingAt { 0xffffff => PricingAt {
info: None, info: None,
price: Pricing::Blake2F { gas_per_round: 123 } price: Pricing::Blake2F { gas_per_round: 123 }
@ -215,10 +215,10 @@ mod tests {
}"#; }"#;
let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into(); let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into();
assert_eq!(builtin.name, "alt_bn128_mul"); assert_eq!(builtin.name, "alt_bn128_mul");
assert_eq!(builtin.pricing, map![ assert_eq!(builtin.pricing, btreemap![
100500 => PricingAt { 100500 => PricingAt {
info: None, info: None,
price: Pricing::AltBn128ConstOperations(AltBn128ConstOperations { price: Pricing::AltBn128ConstOperations(AltBn128ConstOperations {
price: 123, price: 123,
}), }),
} }
@ -235,7 +235,7 @@ mod tests {
let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into(); let builtin: Builtin = serde_json::from_str::<BuiltinCompat>(s).unwrap().into();
assert_eq!(builtin.name, "late_start"); assert_eq!(builtin.name, "late_start");
assert_eq!(builtin.pricing, map![ assert_eq!(builtin.pricing, btreemap![
100_000 => PricingAt { 100_000 => PricingAt {
info: None, info: None,
price: Pricing::Modexp(Modexp { divisor: 5 }) price: Pricing::Modexp(Modexp { divisor: 5 })

View File

@ -121,15 +121,12 @@ pub struct Env {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{ use std::str::FromStr;
collections::BTreeMap,
str::FromStr
};
use super::{Address, Bytes, Call, Env, H256, MaybeEmpty, State, Transaction, Uint, Vm}; use super::{Address, Bytes, Call, Env, H256, MaybeEmpty, State, Transaction, Uint, Vm};
use crate::spec::{Account, HashOrMap}; use crate::spec::{Account, HashOrMap};
use ethereum_types::{U256, H160 as Hash160, H256 as Hash256}; use ethereum_types::{U256, H160 as Hash160, H256 as Hash256};
use macros::map; use maplit::btreemap;
use rustc_hex::FromHex; use rustc_hex::FromHex;
const TEST_CODE: &str = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01600055"; const TEST_CODE: &str = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01600055";
@ -207,14 +204,14 @@ mod tests {
assert_eq!(vm.output, Some(Bytes::new(Vec::new()))); assert_eq!(vm.output, Some(Bytes::new(Vec::new())));
assert_eq!(vm.pre_state, State( assert_eq!(vm.pre_state, State(
HashOrMap::Map( HashOrMap::Map(
map![ btreemap![
Address(Hash160::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap()) => Account { Address(Hash160::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap()) => Account {
builtin: None, builtin: None,
balance: Some(Uint(0x0de0b6b3a7640000_u64.into())), balance: Some(Uint(0x0de0b6b3a7640000_u64.into())),
code: Some(Bytes::new(TEST_CODE.from_hex().unwrap())), code: Some(Bytes::new(TEST_CODE.from_hex().unwrap())),
constructor: None, constructor: None,
nonce: Some(Uint(0.into())), nonce: Some(Uint(0.into())),
storage: Some(map![]), storage: Some(btreemap![]),
version: None, version: None,
} }
])) ]))
@ -222,14 +219,14 @@ mod tests {
assert_eq!(vm.post_state, Some( assert_eq!(vm.post_state, Some(
State( State(
HashOrMap::Map( HashOrMap::Map(
map![ btreemap![
Address(Hash160::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap()) => Account { Address(Hash160::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap()) => Account {
builtin: None, builtin: None,
balance: Some(Uint(0x0de0b6b3a7640000_u64.into())), balance: Some(Uint(0x0de0b6b3a7640000_u64.into())),
code: Some(Bytes::new(TEST_CODE.from_hex().unwrap())), code: Some(Bytes::new(TEST_CODE.from_hex().unwrap())),
constructor: None, constructor: None,
nonce: Some(Uint(0.into())), nonce: Some(Uint(0.into())),
storage: Some(map![ storage: Some(btreemap![
Uint(0.into()) => Uint(U256::from_str("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe").unwrap()) Uint(0.into()) => Uint(U256::from_str("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe").unwrap())
]), ]),
version: None, version: None,

View File

@ -77,7 +77,7 @@ ethcore-io = { path = "../util/io" }
ethcore-network = { path = "../util/network" } ethcore-network = { path = "../util/network" }
ethjson = { path = "../json", features = ["test-helpers"] } ethjson = { path = "../json", features = ["test-helpers"] }
fake-fetch = { path = "../util/fake-fetch" } fake-fetch = { path = "../util/fake-fetch" }
macros = { path = "../util/macros" } maplit = "1.0.2"
spec = { path = "../ethcore/spec" } spec = { path = "../ethcore/spec" }
pretty_assertions = "0.1" pretty_assertions = "0.1"
transaction-pool = "2.0.1" transaction-pool = "2.0.1"

View File

@ -120,7 +120,7 @@ extern crate pretty_assertions;
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]
extern crate macros; extern crate maplit;
#[cfg(test)] #[cfg(test)]
extern crate fake_fetch; extern crate fake_fetch;

View File

@ -113,16 +113,16 @@ impl SyncProvider for TestSyncProvider {
} }
fn transactions_stats(&self) -> BTreeMap<H256, TransactionStats> { fn transactions_stats(&self) -> BTreeMap<H256, TransactionStats> {
map![ btreemap![
H256::from_low_u64_be(1) => TransactionStats { H256::from_low_u64_be(1) => TransactionStats {
first_seen: 10, first_seen: 10,
propagated_to: map![ propagated_to: btreemap![
H512::from_low_u64_be(128) => 16 H512::from_low_u64_be(128) => 16
], ],
}, },
H256::from_low_u64_be(5) => TransactionStats { H256::from_low_u64_be(5) => TransactionStats {
first_seen: 16, first_seen: 16,
propagated_to: map![ propagated_to: btreemap![
H512::from_low_u64_be(16) => 1 H512::from_low_u64_be(16) => 1
], ],
} }

View File

@ -206,10 +206,16 @@ impl<T: Serialize> Serialize for Rich<T> {
mod tests { mod tests {
use std::collections::BTreeMap; use std::collections::BTreeMap;
use ethereum_types::{H64, H160, H256, U256, Bloom as H2048}; use ethereum_types::{H64, H160, H256, U256, Bloom as H2048};
use serde_json;
use v1::types::{Transaction, Bytes}; use v1::types::{Transaction, Bytes};
use super::{Block, RichBlock, BlockTransactions, Header, RichHeader}; use super::{Block, RichBlock, BlockTransactions, Header, RichHeader};
fn default_extra_info() -> BTreeMap<String, String> {
btreemap![
"mixHash".into() => format!("{:?}", H256::default()),
"nonce".into() => format!("{:?}", H64::default())
]
}
#[test] #[test]
fn test_serialize_block_transactions() { fn test_serialize_block_transactions() {
let t = BlockTransactions::Full(vec![Transaction::default()]); let t = BlockTransactions::Full(vec![Transaction::default()]);
@ -248,10 +254,7 @@ mod tests {
let serialized_block = serde_json::to_string(&block).unwrap(); let serialized_block = serde_json::to_string(&block).unwrap();
let rich_block = RichBlock { let rich_block = RichBlock {
inner: block, inner: block,
extra_info: map![ extra_info: default_extra_info(),
"mixHash".into() => format!("{:?}", H256::zero()),
"nonce".into() => format!("{:?}", H64::default())
],
}; };
let serialized_rich_block = serde_json::to_string(&rich_block).unwrap(); let serialized_rich_block = serde_json::to_string(&rich_block).unwrap();
@ -286,10 +289,7 @@ mod tests {
let serialized_block = serde_json::to_string(&block).unwrap(); let serialized_block = serde_json::to_string(&block).unwrap();
let rich_block = RichBlock { let rich_block = RichBlock {
inner: block, inner: block,
extra_info: map![ extra_info: default_extra_info(),
"mixHash".into() => format!("{:?}", H256::zero()),
"nonce".into() => format!("{:?}", H64::default())
],
}; };
let serialized_rich_block = serde_json::to_string(&rich_block).unwrap(); let serialized_rich_block = serde_json::to_string(&rich_block).unwrap();
@ -321,10 +321,7 @@ mod tests {
let serialized_header = serde_json::to_string(&header).unwrap(); let serialized_header = serde_json::to_string(&header).unwrap();
let rich_header = RichHeader { let rich_header = RichHeader {
inner: header, inner: header,
extra_info: map![ extra_info: default_extra_info(),
"mixHash".into() => format!("{:?}", H256::zero()),
"nonce".into() => format!("{:?}", H64::default())
],
}; };
let serialized_rich_header = serde_json::to_string(&rich_header).unwrap(); let serialized_rich_header = serde_json::to_string(&rich_header).unwrap();

View File

@ -195,8 +195,6 @@ pub struct ChainStatus {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use serde_json;
use std::collections::BTreeMap;
use super::{SyncInfo, SyncStatus, Peers, TransactionStats, ChainStatus, H512}; use super::{SyncInfo, SyncStatus, Peers, TransactionStats, ChainStatus, H512};
#[test] #[test]
@ -240,9 +238,7 @@ mod tests {
fn test_serialize_transaction_stats() { fn test_serialize_transaction_stats() {
let stats = TransactionStats { let stats = TransactionStats {
first_seen: 100, first_seen: 100,
propagated_to: map![ propagated_to: btreemap![H512::from_low_u64_be(10) => 50],
H512::from_low_u64_be(10) => 50
],
}; };
let serialized = serde_json::to_string(&stats).unwrap(); let serialized = serde_json::to_string(&stats).unwrap();

View File

@ -674,8 +674,6 @@ impl From<(H256, Executed)> for TraceResultsWithTransactionHash {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use serde_json;
use std::collections::BTreeMap;
use v1::types::Bytes; use v1::types::Bytes;
use trace::TraceError; use trace::TraceError;
use ethereum_types::Address; use ethereum_types::Address;
@ -875,12 +873,12 @@ mod tests {
#[test] #[test]
fn test_statediff_serialize() { fn test_statediff_serialize() {
let t = StateDiff(map![ let t = StateDiff(btreemap![
Address::from_low_u64_be(42) => AccountDiff { Address::from_low_u64_be(42) => AccountDiff {
balance: Diff::Same, balance: Diff::Same,
nonce: Diff::Born(1.into()), nonce: Diff::Born(1.into()),
code: Diff::Same, code: Diff::Same,
storage: map![ storage: btreemap![
H256::from_low_u64_be(42) => Diff::Same H256::from_low_u64_be(42) => Diff::Same
] ]
}, },
@ -888,7 +886,7 @@ mod tests {
balance: Diff::Same, balance: Diff::Same,
nonce: Diff::Changed(ChangedType { from: 1.into(), to: 0.into() }), nonce: Diff::Changed(ChangedType { from: 1.into(), to: 0.into() }),
code: Diff::Died(vec![96].into()), code: Diff::Died(vec![96].into()),
storage: map![], storage: btreemap![],
} }
]); ]);
let serialized = serde_json::to_string(&t).unwrap(); let serialized = serde_json::to_string(&t).unwrap();

View File

@ -1,5 +0,0 @@
[package]
name = "macros"
version = "0.1.0"
authors = ["Parity Technologies <admin@parity.io>"]
edition = "2018"

View File

@ -1,80 +0,0 @@
// Copyright 2015-2020 Parity Technologies (UK) Ltd.
// This file is part of Parity Ethereum.
// Parity Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
//! Utils common types and macros global reexport.
use std::io;
#[macro_export]
macro_rules! vec_into {
( $( $x:expr ),* ) => {
vec![ $( $x.into() ),* ]
}
}
#[macro_export]
macro_rules! slice_into {
( $( $x:expr ),* ) => {
&[ $( $x.into() ),* ]
}
}
#[macro_export]
macro_rules! hash_map {
() => { HashMap::new() };
( $( $x:expr => $y:expr ),* ) => {{
let mut x = HashMap::new();
$(
x.insert($x, $y);
)*
x
}}
}
#[macro_export]
macro_rules! map {
() => { BTreeMap::new() };
( $( $x:expr => $y:expr ),* ) => {{
let mut x = BTreeMap::new();
$(
x.insert($x, $y);
)*
x
}}
}
#[macro_export]
macro_rules! flush {
($arg:expr) => ($crate::flush($arg.into()));
($($arg:tt)*) => ($crate::flush(format!("{}", format_args!($($arg)*))));
}
#[macro_export]
macro_rules! flushln {
($fmt:expr) => (flush!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (flush!(concat!($fmt, "\n"), $($arg)*));
}
#[doc(hidden)]
pub fn flush(s: String) {
let _ = io::Write::write(&mut io::stdout(), s.as_bytes());
let _ = io::Write::flush(&mut io::stdout());
}
#[test]
fn test_flush() {
flushln!("hello_world {:?}", 1);
}

View File

@ -2,12 +2,14 @@
name = "migration-rocksdb" name = "migration-rocksdb"
version = "0.1.0" version = "0.1.0"
authors = ["Parity Technologies <admin@parity.io>"] authors = ["Parity Technologies <admin@parity.io>"]
edition = "2018"
license = "GPL-3.0"
[dependencies] [dependencies]
log = "0.4" log = "0.4"
macros = { path = "../macros" }
kvdb = "0.4.0" kvdb = "0.4.0"
kvdb-rocksdb = "0.5.0" kvdb-rocksdb = "0.5.0"
[dev-dependencies] [dev-dependencies]
tempdir = "0.3" tempdir = "0.3"
maplit = "1.0.2"

View File

@ -16,19 +16,12 @@
//! DB Migration module. //! DB Migration module.
#[macro_use]
extern crate log;
#[macro_use]
extern crate macros;
extern crate kvdb;
extern crate kvdb_rocksdb;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use std::{fs, io, error}; use std::{fs, io, error};
use log::trace;
use kvdb::DBTransaction; use kvdb::DBTransaction;
use kvdb_rocksdb::{CompactionProfile, Database, DatabaseConfig}; use kvdb_rocksdb::{CompactionProfile, Database, DatabaseConfig};
@ -309,29 +302,3 @@ impl Manager {
self.migrations.iter_mut().filter(|m| m.version() > version).collect() self.migrations.iter_mut().filter(|m| m.version() > version).collect()
} }
} }
/// Prints a dot every `max` ticks
pub struct Progress {
current: usize,
max: usize,
}
impl Default for Progress {
fn default() -> Self {
Progress {
current: 0,
max: 100_000,
}
}
}
impl Progress {
/// Tick progress meter.
pub fn tick(&mut self) {
self.current += 1;
if self.current == self.max {
self.current = 0;
flush!(".");
}
}
}

View File

@ -18,19 +18,15 @@
//! A random temp directory is created. A database is created within it, and migrations //! A random temp directory is created. A database is created within it, and migrations
//! are performed in temp sub-directories. //! are performed in temp sub-directories.
#[macro_use]
extern crate macros;
extern crate tempdir;
extern crate kvdb_rocksdb;
extern crate migration_rocksdb as migration;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use tempdir::TempDir;
use kvdb_rocksdb::{Database, DatabaseConfig}; use kvdb_rocksdb::{Database, DatabaseConfig};
use migration::{Batch, Config, SimpleMigration, Migration, Manager, ChangeColumns}; use maplit::btreemap;
use migration_rocksdb::{Batch, Config, SimpleMigration, Migration, Manager, ChangeColumns};
use tempdir::TempDir;
#[inline] #[inline]
fn db_path(path: &Path) -> PathBuf { fn db_path(path: &Path) -> PathBuf {
@ -114,8 +110,8 @@ fn one_simple_migration() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
let expected = map![vec![0x11] => vec![0x22], vec![1, 0x11] => vec![1, 0x22]]; let expected = btreemap![vec![0x11] => vec![0x22], vec![1, 0x11] => vec![1, 0x22]];
manager.add_migration(Migration0).unwrap(); manager.add_migration(Migration0).unwrap();
let end_path = manager.execute(&db_path, 0).unwrap(); let end_path = manager.execute(&db_path, 0).unwrap();
@ -129,7 +125,7 @@ fn no_migration_needed() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
manager.add_migration(Migration0).unwrap(); manager.add_migration(Migration0).unwrap();
manager.execute(&db_path, 1).unwrap(); manager.execute(&db_path, 1).unwrap();
@ -141,7 +137,7 @@ fn wrong_adding_order() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
manager.add_migration(Migration1).unwrap(); manager.add_migration(Migration1).unwrap();
manager.add_migration(Migration0).unwrap(); manager.add_migration(Migration0).unwrap();
@ -152,8 +148,8 @@ fn multiple_migrations() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
let expected = map![vec![0x11] => vec![], vec![1, 0x11] => vec![]]; let expected = btreemap![vec![0x11] => vec![], vec![1, 0x11] => vec![]];
manager.add_migration(Migration0).unwrap(); manager.add_migration(Migration0).unwrap();
manager.add_migration(Migration1).unwrap(); manager.add_migration(Migration1).unwrap();
@ -167,8 +163,8 @@ fn second_migration() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
let expected = map![vec![] => vec![], vec![1] => vec![]]; let expected = btreemap![vec![] => vec![], vec![1] => vec![]];
manager.add_migration(Migration0).unwrap(); manager.add_migration(Migration0).unwrap();
manager.add_migration(Migration1).unwrap(); manager.add_migration(Migration1).unwrap();
@ -182,8 +178,8 @@ fn first_and_noop_migration() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
let expected = map![vec![0x11] => vec![0x22], vec![1, 0x11] => vec![1, 0x22]]; let expected = btreemap![vec![0x11] => vec![0x22], vec![1, 0x11] => vec![1, 0x22]];
manager.add_migration(Migration0).expect("Migration0 can be added"); manager.add_migration(Migration0).expect("Migration0 can be added");
let end_path = manager.execute(&db_path, 0).expect("Migration0 runs clean"); let end_path = manager.execute(&db_path, 0).expect("Migration0 runs clean");
@ -196,8 +192,8 @@ fn noop_and_second_migration() {
let tempdir = TempDir::new("").unwrap(); let tempdir = TempDir::new("").unwrap();
let db_path = db_path(tempdir.path()); let db_path = db_path(tempdir.path());
let mut manager = Manager::new(Config::default()); let mut manager = Manager::new(Config::default());
make_db(&db_path, map![vec![] => vec![], vec![1] => vec![1]]); make_db(&db_path, btreemap![vec![] => vec![], vec![1] => vec![1]]);
let expected = map![vec![] => vec![], vec![1] => vec![]]; let expected = btreemap![vec![] => vec![], vec![1] => vec![]];
manager.add_migration(Migration1).unwrap(); manager.add_migration(Migration1).unwrap();
let end_path = manager.execute(&db_path, 0).unwrap(); let end_path = manager.execute(&db_path, 0).unwrap();