diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4a7ba9d44..94ac69d2c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,7 +8,6 @@ variables: RUST_BACKTRACE: "1" RUSTFLAGS: "" CARGOFLAGS: "" - NIGHTLY: "nigtly" cache: key: "$CI_BUILD_STAGE/$CI_BUILD_REF_NAME" untracked: true @@ -21,7 +20,7 @@ linux-stable: - stable - triggers script: - - cargo build --release $CARGOFLAGS + - cargo build -j $(nproc) --release $CARGOFLAGS - strip target/release/parity - md5sum target/release/parity > parity.md5 - sh scripts/deb-build.sh amd64 @@ -53,7 +52,7 @@ linux-beta: - stable - triggers script: - - cargo build --release $CARGOFLAGS + - cargo build -j $(nproc) --release $CARGOFLAGS - strip target/release/parity tags: - rust @@ -72,7 +71,7 @@ linux-nightly: - stable - triggers script: - - cargo build --release $CARGOFLAGS + - cargo build -j $(nproc) --release $CARGOFLAGS - strip target/release/parity tags: - rust @@ -93,7 +92,7 @@ linux-centos: script: - export CXX="g++" - export CC="gcc" - - cargo build --release $CARGOFLAGS + - cargo build -j $(nproc) --release $CARGOFLAGS - strip target/release/parity - md5sum target/release/parity > parity.md5 - aws configure set aws_access_key_id $s3_key @@ -120,7 +119,7 @@ linux-i686: script: - export HOST_CC=gcc - export HOST_CXX=g++ - - cargo build --target i686-unknown-linux-gnu --release $CARGOFLAGS + - cargo build -j $(nproc) --target i686-unknown-linux-gnu --release $CARGOFLAGS - strip target/i686-unknown-linux-gnu/release/parity - md5sum target/i686-unknown-linux-gnu/release/parity > parity.md5 - sh scripts/deb-build.sh i386 @@ -162,7 +161,7 @@ linux-armv7: - echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build --target armv7-unknown-linux-gnueabihf --release $CARGOFLAGS + - cargo build -j $(nproc) --target armv7-unknown-linux-gnueabihf --release $CARGOFLAGS - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - md5sum target/armv7-unknown-linux-gnueabihf/release/parity > parity.md5 - sh scripts/deb-build.sh armhf @@ -204,7 +203,7 @@ linux-arm: - echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build --target arm-unknown-linux-gnueabihf --release $CARGOFLAGS + - cargo build -j $(nproc) --target arm-unknown-linux-gnueabihf --release $CARGOFLAGS - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - md5sum target/arm-unknown-linux-gnueabihf/release/parity > parity.md5 - sh scripts/deb-build.sh armhf @@ -232,9 +231,9 @@ linux-armv6: stage: build image: ethcore/rust-armv6:latest only: - - beta - - tags - - stable +# - beta +# - tags +# - stable - triggers script: - export CC=arm-linux-gnueabi-gcc @@ -246,7 +245,7 @@ linux-armv6: - echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config - echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build --target arm-unknown-linux-gnueabi --release $CARGOFLAGS + - cargo build -j $(nproc) --target arm-unknown-linux-gnueabi --release $CARGOFLAGS - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - md5sum target/arm-unknown-linux-gnueabi/release/parity > parity.md5 - aws configure set aws_access_key_id $s3_key @@ -281,7 +280,7 @@ linux-aarch64: - echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config - echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build --target aarch64-unknown-linux-gnu --release $CARGOFLAGS + - cargo build -j $(nproc) --target aarch64-unknown-linux-gnu --release $CARGOFLAGS - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - md5sum target/aarch64-unknown-linux-gnu/release/parity > parity.md5 - sh scripts/deb-build.sh arm64 @@ -305,41 +304,6 @@ linux-aarch64: - target/aarch64-unknown-linux-gnu/release/parity name: "aarch64-unknown-linux-gnu_parity" allow_failure: true -#linux-alpine: -# stage: build -# image: ethcore/rust-alpine:latest -# only: -# - beta -# - tags -# - stable -# - triggers -# script: -# - export HOST_CC=gcc -# - export HOST_CXX=g++ -# - cargo build --release $CARGOFLAGS -# - strip target/release/parity -# - md5sum target/release/parity > parity.md5 -# - sh scripts/deb-build.sh arm64 -# - cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity -# - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") -# - dpkg-deb -b deb "parity_"$VER"_arm64.deb" -# - md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5" -# - aws configure set aws_access_key_id $s3_key -# - aws configure set aws_secret_access_key $s3_secret -# - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi -# - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu -# - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity -# - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5 -# - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" -# - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" -# tags: -# - rust -# - rust-alpine -# artifacts: -# paths: -# - target/aarch64-unknown-linux-gnu/release/parity -# name: "aarch64-unknown-linux-gnu_parity" -# allow_failure: true darwin: stage: build only: @@ -348,8 +312,8 @@ darwin: - stable - triggers script: - - cargo build --release -p ethstore $CARGOFLAGS - - cargo build --release $CARGOFLAGS + - cargo build -j 8 --release -p ethstore #$CARGOFLAGS + - cargo build -j 8 --release #$CARGOFLAGS - rm -rf parity.md5 - md5sum target/release/parity > parity.md5 - packagesbuild -v mac/Parity.pkgproj @@ -386,7 +350,7 @@ windows: - set RUST_BACKTRACE=1 - set RUSTFLAGS=%RUSTFLAGS% - rustup default stable-x86_64-pc-windows-msvc - - cargo build --release %CARGOFLAGS% + - cargo build -j 8 --release #%CARGOFLAGS% - curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll - curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe - signtool sign /f %keyfile% /p %certpass% target\release\parity.exe @@ -449,7 +413,7 @@ test-windows: - git submodule update --init --recursive script: - set RUST_BACKTRACE=1 - - cargo test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p ethcore-dapps -p ethcore-rpc -p ethcore-signer -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity %CARGOFLAGS% --verbose --release + - cargo -j 8 test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p ethcore-dapps -p ethcore-rpc -p ethcore-signer -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity %CARGOFLAGS% --verbose --release tags: - rust-windows allow_failure: true diff --git a/Cargo.lock b/Cargo.lock index a97bb776e..4d1bba344 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,7 +3,7 @@ name = "parity" version = "1.5.0" dependencies = [ "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", @@ -146,15 +146,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "clippy" -version = "0.0.96" +version = "0.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy_lints 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "clippy_lints" -version = "0.0.96" +version = "0.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -224,7 +224,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "elastic-array" version = "0.6.0" -source = "git+https://github.com/ethcore/elastic-array#70e4012e691b732c7c4cb04e9232799e6aa268bc" +source = "git+https://github.com/ethcore/elastic-array#346f1ba5982576dab9d0b8fa178b50e1db0a21cd" dependencies = [ "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -280,7 +280,7 @@ dependencies = [ "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethash 1.4.0", @@ -333,7 +333,7 @@ dependencies = [ name = "ethcore-dapps" version = "1.5.0" dependencies = [ - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore-devtools 1.4.0", "ethcore-hash-fetch 1.5.0", @@ -490,7 +490,7 @@ dependencies = [ name = "ethcore-rpc" version = "1.5.0" dependencies = [ - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "ethash 1.4.0", "ethcore 1.5.0", "ethcore-devtools 1.4.0", @@ -520,7 +520,7 @@ dependencies = [ name = "ethcore-signer" version = "1.5.0" dependencies = [ - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore-devtools 1.4.0", "ethcore-io 1.5.0", @@ -559,7 +559,7 @@ version = "1.5.0" dependencies = [ "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", @@ -649,7 +649,7 @@ dependencies = [ name = "ethsync" version = "1.5.0" dependencies = [ - "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)", + "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.5.0", "ethcore-io 1.5.0", @@ -1263,7 +1263,7 @@ dependencies = [ [[package]] name = "parity-ui-precompiled" version = "1.4.0" -source = "git+https://github.com/ethcore/js-precompiled.git#cb6836dddf8c9951e056283dcd9e105e97923d07" +source = "git+https://github.com/ethcore/js-precompiled.git#b3f0e3ddedf9afee35ca8384a74158df572973c7" dependencies = [ "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2005,8 +2005,8 @@ dependencies = [ "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" -"checksum clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "6eacf01b0aad84a0817703498f72d252df7c0faf6a5b86d0be4265f1829e459f" -"checksum clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "a49960c9aab544ce86b004dcb61620e8b898fea5fc0f697a028f460f48221ed6" +"checksum clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4fabf979ddf6419a313c1c0ada4a5b95cfd2049c56e8418d622d27b4b6ff32" +"checksum clippy_lints 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "ce96ec05bfe018a0d5d43da115e54850ea2217981ff0f2e462780ab9d594651a" "checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245" "checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc" "checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "" diff --git a/Cargo.toml b/Cargo.toml index a8d7ba794..3808cce95 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,7 @@ ethcore-hash-fetch = { path = "ethcore/hash-fetch" } rlp = { path = "util/rlp" } ethcore-stratum = { path = "stratum" } ethcore-dapps = { path = "dapps", optional = true } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} [target.'cfg(windows)'.dependencies] winapi = "0.2" diff --git a/dapps/Cargo.toml b/dapps/Cargo.toml index 15e537820..6be30884a 100644 --- a/dapps/Cargo.toml +++ b/dapps/Cargo.toml @@ -34,7 +34,7 @@ ethcore-hash-fetch = { path = "../ethcore/hash-fetch" } fetch = { path = "../util/fetch" } parity-ui = { path = "./ui" } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} [build-dependencies] serde_codegen = { version = "0.8", optional = true } diff --git a/db/Cargo.toml b/db/Cargo.toml index 27eadef4a..2b4a19892 100644 --- a/db/Cargo.toml +++ b/db/Cargo.toml @@ -11,7 +11,7 @@ build = "build.rs" ethcore-ipc-codegen = { path = "../ipc/codegen" } [dependencies] -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} ethcore-devtools = { path = "../devtools" } ethcore-ipc = { path = "../ipc/rpc" } rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" } diff --git a/ethcore/Cargo.toml b/ethcore/Cargo.toml index 48fce064a..bd87c422f 100644 --- a/ethcore/Cargo.toml +++ b/ethcore/Cargo.toml @@ -29,7 +29,7 @@ byteorder = "0.5" transient-hashmap = "0.1" linked-hash-map = "0.3.0" evmjit = { path = "../evmjit", optional = true } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} ethash = { path = "../ethash" } ethcore-util = { path = "../util" } ethcore-io = { path = "../util/io" } diff --git a/ethcore/src/account_db.rs b/ethcore/src/account_db.rs index 0761b7fba..63524a442 100644 --- a/ethcore/src/account_db.rs +++ b/ethcore/src/account_db.rs @@ -121,10 +121,6 @@ impl<'db> HashDB for AccountDB<'db>{ fn remove(&mut self, _key: &H256) { unimplemented!() } - - fn get_aux(&self, hash: &[u8]) -> Option { - self.db.get_aux(hash) - } } /// DB backend wrapper for Account trie @@ -197,18 +193,6 @@ impl<'db> HashDB for AccountDBMut<'db>{ let key = combine_key(&self.address_hash, key); self.db.remove(&key) } - - fn insert_aux(&mut self, hash: Vec, value: Vec) { - self.db.insert_aux(hash, value); - } - - fn get_aux(&self, hash: &[u8]) -> Option { - self.db.get_aux(hash) - } - - fn remove_aux(&mut self, hash: &[u8]) { - self.db.remove_aux(hash); - } } struct Wrapping<'db>(&'db HashDB); diff --git a/ethcore/src/block.rs b/ethcore/src/block.rs index 54c2a7a02..bcbceb9aa 100644 --- a/ethcore/src/block.rs +++ b/ethcore/src/block.rs @@ -594,9 +594,9 @@ mod tests { use factory::Factories; use state_db::StateDB; use views::BlockView; - use util::Address; + use util::{Address, TrieFactory}; use util::hash::FixedHash; - + use util::trie::TrieSpec; use std::sync::Arc; /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header @@ -637,7 +637,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b = OpenBlock::new(&*spec.engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = b.close_and_lock(); @@ -653,7 +653,7 @@ mod tests { let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap() .close_and_lock().seal(engine, vec![]).unwrap(); @@ -662,7 +662,7 @@ mod tests { let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); assert_eq!(e.rlp_bytes(), orig_bytes); @@ -681,7 +681,7 @@ mod tests { let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let mut open_block = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let mut uncle1_header = Header::new(); @@ -697,7 +697,7 @@ mod tests { let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); let bytes = e.rlp_bytes(); diff --git a/ethcore/src/client/client.rs b/ethcore/src/client/client.rs index 0e0b292f9..85ec05b03 100644 --- a/ethcore/src/client/client.rs +++ b/ethcore/src/client/client.rs @@ -22,7 +22,7 @@ use std::time::{Instant}; use time::precise_time_ns; // util -use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock}; +use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock, Hashable}; use util::{journaldb, TrieFactory, Trie}; use util::trie::TrieSpec; use util::{U256, H256, Address, H2048, Uint, FixedHash}; @@ -172,9 +172,10 @@ impl Client { false => TrieSpec::Secure, }; + let trie_factory = TrieFactory::new(trie_spec); let journal_db = journaldb::new(db.clone(), config.pruning, ::db::COL_STATE); let mut state_db = StateDB::new(journal_db, config.state_cache_size); - if state_db.journal_db().is_empty() && try!(spec.ensure_db_good(&mut state_db)) { + if state_db.journal_db().is_empty() && try!(spec.ensure_db_good(&mut state_db, &trie_factory)) { let mut batch = DBTransaction::new(&db); try!(state_db.journal_under(&mut batch, 0, &spec.genesis_header().hash())); try!(db.write(batch).map_err(ClientError::Database)); @@ -216,7 +217,7 @@ impl Client { let factories = Factories { vm: EvmFactory::new(config.vm_type.clone(), config.jump_table_size), - trie: TrieFactory::new(trie_spec), + trie: trie_factory, accountdb: Default::default(), }; @@ -869,8 +870,8 @@ impl BlockChainClient for Client { } fn keep_alive(&self) { - let should_wake = match &*self.mode.lock() { - &Mode::Dark(..) | &Mode::Passive(..) => true, + let should_wake = match *self.mode.lock() { + Mode::Dark(..) | Mode::Passive(..) => true, _ => false, }; if should_wake { @@ -952,6 +953,10 @@ impl BlockChainClient for Client { self.state_at(id).map(|s| s.nonce(address)) } + fn storage_root(&self, address: &Address, id: BlockID) -> Option { + self.state_at(id).and_then(|s| s.storage_root(address)) + } + fn block_hash(&self, id: BlockID) -> Option { let chain = self.chain.read(); Self::block_hash(&chain, id) @@ -969,7 +974,7 @@ impl BlockChainClient for Client { self.state_at(id).map(|s| s.storage_at(address, position)) } - fn list_accounts(&self, id: BlockID) -> Option> { + fn list_accounts(&self, id: BlockID, after: Option<&Address>, count: u64) -> Option> { if !self.factories.trie.is_fat() { trace!(target: "fatdb", "list_accounts: Not a fat DB"); return None; @@ -989,18 +994,68 @@ impl BlockChainClient for Client { } }; - let iter = match trie.iter() { + let mut iter = match trie.iter() { Ok(iter) => iter, _ => return None, }; + if let Some(after) = after { + if let Err(e) = iter.seek(after) { + trace!(target: "fatdb", "list_accounts: Couldn't seek the DB: {:?}", e); + } + } + let accounts = iter.filter_map(|item| { item.ok().map(|(addr, _)| Address::from_slice(&addr)) - }).collect(); + }).take(count as usize).collect(); Some(accounts) } + fn list_storage(&self, id: BlockID, account: &Address, after: Option<&H256>, count: u64) -> Option> { + if !self.factories.trie.is_fat() { + trace!(target: "fatdb", "list_stroage: Not a fat DB"); + return None; + } + + let state = match self.state_at(id) { + Some(state) => state, + _ => return None, + }; + + let root = match state.storage_root(account) { + Some(root) => root, + _ => return None, + }; + + let (_, db) = state.drop(); + let account_db = self.factories.accountdb.readonly(db.as_hashdb(), account.sha3()); + let trie = match self.factories.trie.readonly(account_db.as_hashdb(), &root) { + Ok(trie) => trie, + _ => { + trace!(target: "fatdb", "list_storage: Couldn't open the DB"); + return None; + } + }; + + let mut iter = match trie.iter() { + Ok(iter) => iter, + _ => return None, + }; + + if let Some(after) = after { + if let Err(e) = iter.seek(after) { + trace!(target: "fatdb", "list_accounts: Couldn't seek the DB: {:?}", e); + } + } + + let keys = iter.filter_map(|item| { + item.ok().map(|(key, _)| H256::from_slice(&key)) + }).take(count as usize).collect(); + + Some(keys) + } + fn transaction(&self, id: TransactionID) -> Option { self.transaction_address(id).and_then(|address| self.chain.read().transaction(&address)) } diff --git a/ethcore/src/client/test_client.rs b/ethcore/src/client/test_client.rs index 84ed25b37..c03b5920b 100644 --- a/ethcore/src/client/test_client.rs +++ b/ethcore/src/client/test_client.rs @@ -333,7 +333,7 @@ impl MiningBlockChainClient for TestBlockChainClient { let genesis_header = self.spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - self.spec.ensure_db_good(&mut db).unwrap(); + self.spec.ensure_db_good(&mut db, &TrieFactory::default()).unwrap(); let last_hashes = vec![genesis_header.hash()]; let mut open_block = OpenBlock::new( @@ -385,6 +385,10 @@ impl BlockChainClient for TestBlockChainClient { } } + fn storage_root(&self, _address: &Address, _id: BlockID) -> Option { + None + } + fn latest_nonce(&self, address: &Address) -> U256 { self.nonce(address, BlockID::Latest).unwrap() } @@ -416,10 +420,13 @@ impl BlockChainClient for TestBlockChainClient { } } - fn list_accounts(&self, _id: BlockID) -> Option> { + fn list_accounts(&self, _id: BlockID, _after: Option<&Address>, _count: u64) -> Option> { None } + fn list_storage(&self, _id: BlockID, _account: &Address, _after: Option<&H256>, _count: u64) -> Option> { + None + } fn transaction(&self, _id: TransactionID) -> Option { None // Simple default. } diff --git a/ethcore/src/client/traits.rs b/ethcore/src/client/traits.rs index 67092e986..6d774e250 100644 --- a/ethcore/src/client/traits.rs +++ b/ethcore/src/client/traits.rs @@ -68,6 +68,10 @@ pub trait BlockChainClient : Sync + Send { /// May not fail on BlockID::Latest. fn nonce(&self, address: &Address, id: BlockID) -> Option; + /// Attempt to get address storage root at given block. + /// May not fail on BlockID::Latest. + fn storage_root(&self, address: &Address, id: BlockID) -> Option; + /// Get address nonce at the latest block's state. fn latest_nonce(&self, address: &Address) -> U256 { self.nonce(address, BlockID::Latest) @@ -114,7 +118,12 @@ pub trait BlockChainClient : Sync + Send { } /// Get a list of all accounts in the block `id`, if fat DB is in operation, otherwise `None`. - fn list_accounts(&self, id: BlockID) -> Option>; + /// If `after` is set the list starts with the following item. + fn list_accounts(&self, id: BlockID, after: Option<&Address>, count: u64) -> Option>; + + /// Get a list of all storage keys in the block `id`, if fat DB is in operation, otherwise `None`. + /// If `after` is set the list starts with the following item. + fn list_storage(&self, id: BlockID, account: &Address, after: Option<&H256>, count: u64) -> Option>; /// Get transaction with given hash. fn transaction(&self, id: TransactionID) -> Option; diff --git a/ethcore/src/engines/authority_round.rs b/ethcore/src/engines/authority_round.rs index 830fcf9c8..6c1d0a409 100644 --- a/ethcore/src/engines/authority_round.rs +++ b/ethcore/src/engines/authority_round.rs @@ -124,7 +124,7 @@ impl AuthorityRound { } fn step_proposer(&self, step: usize) -> &Address { - let ref p = self.our_params; + let p = &self.our_params; p.authorities.get(step % p.authority_n).expect("There are authority_n authorities; taking number modulo authority_n gives number in authority_n range; qed") } @@ -211,7 +211,7 @@ impl Engine for AuthorityRound { fn on_close_block(&self, _block: &mut ExecutedBlock) {} fn is_sealer(&self, author: &Address) -> Option { - let ref p = self.our_params; + let p = &self.our_params; Some(p.authorities.contains(author)) } @@ -279,7 +279,7 @@ impl Engine for AuthorityRound { let step = try!(header_step(header)); // Check if parent is from a previous step. - if step == try!(header_step(parent)) { + if step == try!(header_step(parent)) { trace!(target: "poa", "Multiple blocks proposed for step {}.", step); try!(Err(BlockError::DoubleVote(header.author().clone()))); } @@ -315,6 +315,7 @@ impl Engine for AuthorityRound { #[cfg(test)] mod tests { use util::*; + use util::trie::TrieSpec; use env_info::EnvInfo; use header::Header; use error::{Error, BlockError}; @@ -384,9 +385,9 @@ mod tests { let engine = &*spec.engine; let genesis_header = spec.genesis_header(); let mut db1 = get_temp_state_db().take(); - spec.ensure_db_good(&mut db1).unwrap(); + spec.ensure_db_good(&mut db1, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let mut db2 = get_temp_state_db().take(); - spec.ensure_db_good(&mut db2).unwrap(); + spec.ensure_db_good(&mut db2, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b1 = OpenBlock::new(engine, Default::default(), false, db1, &genesis_header, last_hashes.clone(), addr1, (3141562.into(), 31415620.into()), vec![]).unwrap(); let b1 = b1.close_and_lock(); diff --git a/ethcore/src/engines/basic_authority.rs b/ethcore/src/engines/basic_authority.rs index 23a97967c..fb2f9bde6 100644 --- a/ethcore/src/engines/basic_authority.rs +++ b/ethcore/src/engines/basic_authority.rs @@ -184,6 +184,7 @@ impl Engine for BasicAuthority { #[cfg(test)] mod tests { use util::*; + use util::trie::TrieSpec; use block::*; use env_info::EnvInfo; use error::{BlockError, Error}; @@ -256,7 +257,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = b.close_and_lock(); diff --git a/ethcore/src/engines/instant_seal.rs b/ethcore/src/engines/instant_seal.rs index 3dc78d1a2..f50f7344b 100644 --- a/ethcore/src/engines/instant_seal.rs +++ b/ethcore/src/engines/instant_seal.rs @@ -68,6 +68,7 @@ impl Engine for InstantSeal { #[cfg(test)] mod tests { use util::*; + use util::trie::TrieSpec; use tests::helpers::*; use account_provider::AccountProvider; use spec::Spec; @@ -84,7 +85,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = b.close_and_lock(); diff --git a/ethcore/src/ethereum/ethash.rs b/ethcore/src/ethereum/ethash.rs index de2a85942..38a1df525 100644 --- a/ethcore/src/ethereum/ethash.rs +++ b/ethcore/src/ethereum/ethash.rs @@ -422,6 +422,7 @@ impl Header { #[cfg(test)] mod tests { use util::*; + use util::trie::TrieSpec; use block::*; use tests::helpers::*; use env_info::EnvInfo; @@ -438,7 +439,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = b.close(); @@ -452,7 +453,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let last_hashes = Arc::new(vec![genesis_header.hash()]); let mut b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let mut uncle = Header::new(); diff --git a/ethcore/src/ethereum/mod.rs b/ethcore/src/ethereum/mod.rs index e236924ad..3916e5ccc 100644 --- a/ethcore/src/ethereum/mod.rs +++ b/ethcore/src/ethereum/mod.rs @@ -72,6 +72,7 @@ pub fn new_morden() -> Spec { load(include_bytes!("../../res/ethereum/morden.jso #[cfg(test)] mod tests { use util::*; + use util::trie::TrieSpec; use state::*; use super::*; use tests::helpers::*; @@ -84,7 +85,7 @@ mod tests { let genesis_header = spec.genesis_header(); let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - spec.ensure_db_good(&mut db).unwrap(); + spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let s = State::from_existing(db, genesis_header.state_root().clone(), engine.account_start_nonce(), Default::default()).unwrap(); assert_eq!(s.balance(&"0000000000000000000000000000000000000001".into()), 1u64.into()); assert_eq!(s.balance(&"0000000000000000000000000000000000000002".into()), 1u64.into()); diff --git a/ethcore/src/evm/interpreter/gasometer.rs b/ethcore/src/evm/interpreter/gasometer.rs index beaaadac5..886120880 100644 --- a/ethcore/src/evm/interpreter/gasometer.rs +++ b/ethcore/src/evm/interpreter/gasometer.rs @@ -197,19 +197,17 @@ impl Gasometer { let address = u256_to_address(stack.peek(1)); let is_value_transfer = !stack.peek(2).is_zero(); - if instruction == instructions::CALL { - if ( - !schedule.no_empty && !ext.exists(&address) - ) || ( - schedule.no_empty && is_value_transfer && !ext.exists_and_not_null(&address) - ) { - gas = overflowing!(gas.overflow_add(schedule.call_new_account_gas.into())); - } - }; + if instruction == instructions::CALL && ( + (!schedule.no_empty && !ext.exists(&address)) + || + (schedule.no_empty && is_value_transfer && !ext.exists_and_not_null(&address)) + ) { + gas = overflowing!(gas.overflow_add(schedule.call_new_account_gas.into())); + } if is_value_transfer { gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into())); - }; + } let requested = *stack.peek(0); @@ -347,7 +345,7 @@ fn test_mem_gas_cost() { let result = gasometer.mem_gas_cost(&schedule, current_mem_size, &mem_size); // then - if let Ok(_) = result { + if result.is_ok() { assert!(false, "Should fail with OutOfGas"); } } diff --git a/ethcore/src/evm/tests.rs b/ethcore/src/evm/tests.rs index 7e69c0771..6cfc9a43e 100644 --- a/ethcore/src/evm/tests.rs +++ b/ethcore/src/evm/tests.rs @@ -95,7 +95,7 @@ impl Ext for FakeExt { } fn exists_and_not_null(&self, address: &Address) -> bool { - self.balances.get(address).map_or(false, |b| !b.is_zero()) + self.balances.get(address).map_or(false, |b| !b.is_zero()) } fn origin_balance(&self) -> U256 { @@ -103,7 +103,7 @@ impl Ext for FakeExt { } fn balance(&self, address: &Address) -> U256 { - *self.balances.get(address).unwrap() + self.balances[address] } fn blockhash(&self, number: &U256) -> H256 { diff --git a/ethcore/src/executive.rs b/ethcore/src/executive.rs index 5da105e2f..1dfd987c1 100644 --- a/ethcore/src/executive.rs +++ b/ethcore/src/executive.rs @@ -445,7 +445,7 @@ impl<'a> Executive<'a> { trace!("exec::finalize: Refunding refund_value={}, sender={}\n", refund_value, sender); // Below: NoEmpty is safe since the sender must already be non-null to have sent this transaction - self.state.add_balance(&sender, &refund_value, CleanupMode::NoEmpty); + self.state.add_balance(&sender, &refund_value, CleanupMode::NoEmpty); trace!("exec::finalize: Compensating author: fees_value={}, author={}\n", fees_value, &self.info.author); self.state.add_balance(&self.info.author, &fees_value, substate.to_cleanup_mode(&schedule)); @@ -514,9 +514,11 @@ impl<'a> Executive<'a> { #[cfg(test)] #[allow(dead_code)] mod tests { + use std::sync::Arc; use ethkey::{Generator, Random}; use super::*; - use util::*; + use util::{H256, U256, U512, Address, Uint, FixedHash, FromHex, FromStr}; + use util::bytes::BytesRef; use action_params::{ActionParams, ActionValue}; use env_info::EnvInfo; use evm::{Factory, VMType}; diff --git a/ethcore/src/miner/miner.rs b/ethcore/src/miner/miner.rs index 9d4e66918..a543e608d 100644 --- a/ethcore/src/miner/miner.rs +++ b/ethcore/src/miner/miner.rs @@ -151,7 +151,7 @@ impl GasPriceCalibrator { if Instant::now() >= self.next_calibration { let usd_per_tx = self.options.usd_per_tx; trace!(target: "miner", "Getting price info"); - if let Ok(_) = PriceInfo::get(move |price: PriceInfo| { + let price_info = PriceInfo::get(move |price: PriceInfo| { trace!(target: "miner", "Price info arrived: {:?}", price); let usd_per_eth = price.ethusd; let wei_per_usd: f32 = 1.0e18 / usd_per_eth; @@ -159,7 +159,9 @@ impl GasPriceCalibrator { let wei_per_gas: f32 = wei_per_usd * usd_per_tx / gas_per_tx; info!(target: "miner", "Updated conversion rate to Ξ1 = {} ({} wei/gas)", Colour::White.bold().paint(format!("US${}", usd_per_eth)), Colour::Yellow.bold().paint(format!("{}", wei_per_gas))); set_price(U256::from(wei_per_gas as u64)); - }) { + }); + + if price_info.is_ok() { self.next_calibration = Instant::now() + self.options.recalibration_period; } else { warn!(target: "miner", "Unable to update Ether price."); @@ -1139,15 +1141,16 @@ impl MinerService for Miner { #[cfg(test)] mod tests { + use std::sync::Arc; use std::time::Duration; use super::super::{MinerService, PrioritizationStrategy}; use super::*; - use util::*; + use block::IsBlock; + use util::{U256, Uint, FromHex}; use ethkey::{Generator, Random}; use client::{BlockChainClient, TestBlockChainClient, EachBlockWith, TransactionImportResult}; use header::BlockNumber; use types::transaction::{Transaction, SignedTransaction, Action}; - use block::*; use spec::Spec; use tests::helpers::{generate_dummy_client}; diff --git a/ethcore/src/miner/transaction_queue.rs b/ethcore/src/miner/transaction_queue.rs index bfbd3fade..cd2d3ba47 100644 --- a/ethcore/src/miner/transaction_queue.rs +++ b/ethcore/src/miner/transaction_queue.rs @@ -990,7 +990,7 @@ impl TransactionQueue { let mut update_last_nonce_to = None; { let by_nonce = self.future.by_address.row_mut(&address); - if let None = by_nonce { + if by_nonce.is_none() { return; } let mut by_nonce = by_nonce.expect("None is tested in early-exit condition above; qed"); @@ -1212,12 +1212,12 @@ mod test { use util::table::*; use util::*; use ethkey::{Random, Generator}; - use transaction::*; use error::{Error, TransactionError}; use super::*; use super::{TransactionSet, TransactionOrder, VerifiedTransaction}; use miner::local_transactions::LocalTransactionsList; use client::TransactionImportResult; + use transaction::{SignedTransaction, Transaction, Action}; fn unwrap_tx_err(err: Result) -> TransactionError { match err.unwrap_err() { diff --git a/ethcore/src/pod_account.rs b/ethcore/src/pod_account.rs index 0882b688c..92a78cebd 100644 --- a/ethcore/src/pod_account.rs +++ b/ethcore/src/pod_account.rs @@ -64,13 +64,13 @@ impl PodAccount { } /// Place additional data into given hash DB. - pub fn insert_additional(&self, db: &mut AccountDBMut) { + pub fn insert_additional(&self, db: &mut AccountDBMut, factory: &TrieFactory) { match self.code { Some(ref c) if !c.is_empty() => { db.insert(c); } _ => {} } let mut r = H256::new(); - let mut t = SecTrieDBMut::new(db, &mut r); + let mut t = factory.create(db, &mut r); for (k, v) in &self.storage { if let Err(e) = t.insert(k, &rlp::encode(&U256::from(&**v))) { warn!("Encountered potential DB corruption: {}", e); diff --git a/ethcore/src/snapshot/mod.rs b/ethcore/src/snapshot/mod.rs index 3f63ac208..408941309 100644 --- a/ethcore/src/snapshot/mod.rs +++ b/ethcore/src/snapshot/mod.rs @@ -552,11 +552,11 @@ const POW_VERIFY_RATE: f32 = 0.02; pub fn verify_old_block(rng: &mut OsRng, header: &Header, engine: &Engine, chain: &BlockChain, body: Option<&[u8]>, always: bool) -> Result<(), ::error::Error> { if always || rng.gen::() <= POW_VERIFY_RATE { match chain.block_header(header.parent_hash()) { - Some(parent) => engine.verify_block_family(&header, &parent, body), - None => engine.verify_block_seal(&header), + Some(parent) => engine.verify_block_family(header, &parent, body), + None => engine.verify_block_seal(header), } } else { - engine.verify_block_basic(&header, body) + engine.verify_block_basic(header, body) } } diff --git a/ethcore/src/spec/spec.rs b/ethcore/src/spec/spec.rs index c8910bbdd..71c15bca2 100644 --- a/ethcore/src/spec/spec.rs +++ b/ethcore/src/spec/spec.rs @@ -244,13 +244,13 @@ impl Spec { } /// Ensure that the given state DB has the trie nodes in for the genesis state. - pub fn ensure_db_good(&self, db: &mut StateDB) -> Result> { + pub fn ensure_db_good(&self, db: &mut StateDB, factory: &TrieFactory) -> Result> { if !db.as_hashdb().contains(&self.state_root()) { trace!(target: "spec", "ensure_db_good: Fresh database? Cannot find state root {}", self.state_root()); let mut root = H256::new(); { - let mut t = SecTrieDBMut::new(db.as_hashdb_mut(), &mut root); + let mut t = factory.create(db.as_hashdb_mut(), &mut root); for (address, account) in self.genesis_state.get().iter() { try!(t.insert(&**address, &account.rlp())); } @@ -258,7 +258,7 @@ impl Spec { trace!(target: "spec", "ensure_db_good: Populated sec trie; root is {}", root); for (address, account) in self.genesis_state.get().iter() { db.note_non_null_account(address); - account.insert_additional(&mut AccountDBMut::new(db.as_hashdb_mut(), address)); + account.insert_additional(&mut AccountDBMut::new(db.as_hashdb_mut(), address), factory); } assert!(db.as_hashdb().contains(&self.state_root())); Ok(true) diff --git a/ethcore/src/state/account.rs b/ethcore/src/state/account.rs index 76061f6a0..bdcc92bd0 100644 --- a/ethcore/src/state/account.rs +++ b/ethcore/src/state/account.rs @@ -314,11 +314,10 @@ impl Account { self.code_hash == SHA3_EMPTY } - #[cfg(test)] - /// return the storage root associated with this account or None if it has been altered via the overlay. + /// Return the storage root associated with this account or None if it has been altered via the overlay. pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} } - /// return the storage overlay. + /// Return the storage overlay. pub fn storage_changes(&self) -> &HashMap { &self.storage_changes } /// Increment the nonce of the account by one. @@ -445,11 +444,10 @@ impl fmt::Debug for Account { #[cfg(test)] mod tests { - + use rlp::{UntrustedRlp, RlpType, View, Compressible}; use util::*; use super::*; use account_db::*; - use rlp::*; #[test] fn account_compress() { diff --git a/ethcore/src/state/mod.rs b/ethcore/src/state/mod.rs index 01a7e3b15..db7c318c6 100644 --- a/ethcore/src/state/mod.rs +++ b/ethcore/src/state/mod.rs @@ -369,6 +369,12 @@ impl State { |a| a.as_ref().map_or(self.account_start_nonce, |account| *account.nonce())) } + /// Get the storage root of account `a`. + pub fn storage_root(&self, a: &Address) -> Option { + self.ensure_cached(a, RequireCache::None, true, + |a| a.as_ref().and_then(|account| account.storage_root().cloned())) + } + /// Mutate storage of account `address` so that it is `value` for `key`. pub fn storage_at(&self, address: &Address, key: &H256) -> H256 { // Storage key search and update works like this: @@ -445,6 +451,7 @@ impl State { } /// Add `incr` to the balance of account `a`. + #[cfg_attr(feature="dev", allow(single_match))] pub fn add_balance(&mut self, a: &Address, incr: &U256, cleanup_mode: CleanupMode) { trace!(target: "state", "add_balance({}, {}): {}", a, incr, self.balance(a)); let is_value_transfer = !incr.is_zero(); diff --git a/ethcore/src/state/substate.rs b/ethcore/src/state/substate.rs index 853b0e422..c11f802a1 100644 --- a/ethcore/src/state/substate.rs +++ b/ethcore/src/state/substate.rs @@ -57,6 +57,7 @@ impl Substate { } /// Get the cleanup mode object from this. + #[cfg_attr(feature="dev", allow(wrong_self_convention))] pub fn to_cleanup_mode(&mut self, schedule: &Schedule) -> CleanupMode { match (schedule.no_empty, schedule.kill_empty) { (false, _) => CleanupMode::ForceCreate, diff --git a/ethcore/src/state_db.rs b/ethcore/src/state_db.rs index 3a3595a35..eafa4022e 100644 --- a/ethcore/src/state_db.rs +++ b/ethcore/src/state_db.rs @@ -397,6 +397,7 @@ impl StateDB { } /// Get cached code based on hash. + #[cfg_attr(feature="dev", allow(map_clone))] pub fn get_cached_code(&self, hash: &H256) -> Option>> { let mut cache = self.code_cache.lock(); diff --git a/ethcore/src/tests/client.rs b/ethcore/src/tests/client.rs index 99b251d66..427082823 100644 --- a/ethcore/src/tests/client.rs +++ b/ethcore/src/tests/client.rs @@ -62,7 +62,7 @@ fn should_return_registrar() { &db_config ).unwrap(); let params = client.additional_params(); - let address = params.get("registrar").unwrap(); + let address = ¶ms["registrar"]; assert_eq!(address.len(), 40); assert!(U256::from_str(address).is_ok()); @@ -93,7 +93,7 @@ fn imports_good_block() { &db_config ).unwrap(); let good_block = get_good_dummy_block(); - if let Err(_) = client.import_block(good_block) { + if client.import_block(good_block).is_err() { panic!("error importing block being good by definition"); } client.flush_queue(); @@ -203,18 +203,18 @@ fn can_collect_garbage() { #[test] fn can_generate_gas_price_median() { - let client_result = generate_dummy_client_with_data(3, 1, &vec_into![1, 2, 3]); + let client_result = generate_dummy_client_with_data(3, 1, slice_into![1, 2, 3]); let client = client_result.reference(); assert_eq!(Some(U256::from(2)), client.gas_price_median(3)); - let client_result = generate_dummy_client_with_data(4, 1, &vec_into![1, 4, 3, 2]); + let client_result = generate_dummy_client_with_data(4, 1, slice_into![1, 4, 3, 2]); let client = client_result.reference(); assert_eq!(Some(U256::from(3)), client.gas_price_median(4)); } #[test] fn can_generate_gas_price_histogram() { - let client_result = generate_dummy_client_with_data(20, 1, &vec_into![6354,8593,6065,4842,7845,7002,689,4958,4250,6098,5804,4320,643,8895,2296,8589,7145,2000,2512,1408]); + let client_result = generate_dummy_client_with_data(20, 1, slice_into![6354,8593,6065,4842,7845,7002,689,4958,4250,6098,5804,4320,643,8895,2296,8589,7145,2000,2512,1408]); let client = client_result.reference(); let hist = client.gas_price_histogram(20, 5).unwrap(); @@ -224,7 +224,7 @@ fn can_generate_gas_price_histogram() { #[test] fn empty_gas_price_histogram() { - let client_result = generate_dummy_client_with_data(20, 0, &vec_into![]); + let client_result = generate_dummy_client_with_data(20, 0, slice_into![]); let client = client_result.reference(); assert!(client.gas_price_histogram(20, 5).is_none()); diff --git a/ethcore/src/tests/helpers.rs b/ethcore/src/tests/helpers.rs index adfb4f096..77a6f117a 100644 --- a/ethcore/src/tests/helpers.rs +++ b/ethcore/src/tests/helpers.rs @@ -18,6 +18,7 @@ use ethkey::KeyPair; use io::*; use client::{BlockChainClient, Client, ClientConfig}; use util::*; +use util::trie::TrieSpec; use spec::*; use state_db::StateDB; use block::{OpenBlock, Drain}; @@ -157,7 +158,7 @@ pub fn generate_dummy_client_with_spec_and_data(get_test_spec: F, block_numbe let mut db_result = get_temp_state_db(); let mut db = db_result.take(); - test_spec.ensure_db_good(&mut db).unwrap(); + test_spec.ensure_db_good(&mut db, &TrieFactory::new(TrieSpec::Secure)).unwrap(); let genesis_header = test_spec.genesis_header(); let mut rolling_timestamp = 40; @@ -262,7 +263,7 @@ pub fn get_test_client_with_blocks(blocks: Vec) -> GuardedTempResult for ClientMode { Mode::Active => ClientMode::Active, } } -} \ No newline at end of file +} diff --git a/ethcore/src/types/transaction.rs b/ethcore/src/types/transaction.rs index d7e06790b..91a4ac836 100644 --- a/ethcore/src/types/transaction.rs +++ b/ethcore/src/types/transaction.rs @@ -73,7 +73,7 @@ pub struct Transaction { impl Transaction { /// Append object with a without signature into RLP stream pub fn rlp_append_unsigned_transaction(&self, s: &mut RlpStream, network_id: Option) { - s.begin_list(if let None = network_id { 6 } else { 9 }); + s.begin_list(if network_id.is_none() { 6 } else { 9 }); s.append(&self.nonce); s.append(&self.gas_price); s.append(&self.gas); @@ -210,7 +210,7 @@ pub struct SignedTransaction { /// Plain Transaction. unsigned: Transaction, /// The V field of the signature; the LS bit described which half of the curve our point falls - /// in. The MS bits describe which network this transaction is for. If 27/28, its for all networks. + /// in. The MS bits describe which network this transaction is for. If 27/28, its for all networks. v: u8, /// The R field of the signature; helps describe the point on the curve. r: U256, @@ -464,7 +464,7 @@ fn should_agree_with_vitalik() { let signed: SignedTransaction = decode(&FromHex::from_hex(tx_data).unwrap()); signed.check_low_s().unwrap(); assert_eq!(signed.sender().unwrap(), address.into()); - flushln!("networkid: {:?}", signed.network_id()); + flushln!("networkid: {:?}", signed.network_id()); }; test_vector("f864808504a817c800825208943535353535353535353535353535353535353535808025a0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116da0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d", "0xf0f6f18bca1b28cd68e4357452947e021241e9ce") @@ -477,4 +477,4 @@ fn should_agree_with_vitalik() { test_vector("f867078504a817c807830290409435353535353535353535353535353535353535358201578025a052f1a9b320cab38e5da8a8f97989383aab0a49165fc91c737310e4f7e9821021a052f1a9b320cab38e5da8a8f97989383aab0a49165fc91c737310e4f7e9821021", "0xd37922162ab7cea97c97a87551ed02c9a38b7332") test_vector("f867088504a817c8088302e2489435353535353535353535353535353535353535358202008025a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c12a064b1702d9298fee62dfeccc57d322a463ad55ca201256d01f62b45b2e1c21c10", "0x9bddad43f934d313c2b79ca28a432dd2b7281029") test_vector("f867098504a817c809830334509435353535353535353535353535353535353535358202d98025a052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afba052f8f61201b2b11a78d6e866abc9c3db2ae8631fa656bfe5cb53668255367afb", "0x3c24d7329e92f84f08556ceb6df1cdb0104ca49f") -} \ No newline at end of file +} diff --git a/ethcore/src/verification/verification.rs b/ethcore/src/verification/verification.rs index 47b2e16de..7e42e8881 100644 --- a/ethcore/src/verification/verification.rs +++ b/ethcore/src/verification/verification.rs @@ -29,6 +29,7 @@ use header::{BlockNumber, Header}; use rlp::{UntrustedRlp, View}; use transaction::SignedTransaction; use views::BlockView; +use time::get_time; /// Preprocessed block data gathered in `verify_block_unordered` call pub struct PreverifiedBlock { @@ -209,6 +210,10 @@ pub fn verify_header_params(header: &Header, engine: &Engine) -> Result<(), Erro if header.number() != 0 && header.extra_data().len() > maximum_extra_data_size { return Err(From::from(BlockError::ExtraDataOutOfBounds(OutOfBounds { min: None, max: Some(maximum_extra_data_size), found: header.extra_data().len() }))); } + let max_time = get_time().sec as u64 + 30; + if header.timestamp() > max_time { + return Err(From::from(BlockError::InvalidTimestamp(OutOfBounds { max: Some(max_time), min: None, found: header.timestamp() }))) + } Ok(()) } @@ -258,6 +263,7 @@ mod tests { use tests::helpers::*; use types::log_entry::{LogEntry, LocalizedLogEntry}; use rlp::View; + use time::get_time; fn check_ok(result: Result<(), Error>) { result.unwrap_or_else(|e| panic!("Block verification failed: {:?}", e)); @@ -271,6 +277,14 @@ mod tests { } } + fn check_fail_timestamp(result: Result<(), Error>) { + match result { + Err(Error::Block(BlockError::InvalidTimestamp(_))) => (), + Err(other) => panic!("Block verification failed.\nExpected: InvalidTimestamp\nGot: {:?}", other), + Ok(_) => panic!("Block verification failed.\nExpected: InvalidTimestamp\nGot: Ok"), + } + } + struct TestBlockChain { blocks: HashMap, numbers: HashMap, @@ -515,6 +529,14 @@ mod tests { check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine, &bc), InvalidTimestamp(OutOfBounds { max: None, min: Some(parent.timestamp() + 1), found: header.timestamp() })); + header = good.clone(); + header.set_timestamp(2450000000); + check_fail_timestamp(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine)); + + header = good.clone(); + header.set_timestamp(get_time().sec as u64 + 40); + check_fail_timestamp(basic_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine)); + header = good.clone(); header.set_number(9); check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine, &bc), diff --git a/js/assets/images/certifications/unknown.svg b/js/assets/images/certifications/unknown.svg new file mode 100644 index 000000000..1554bcc25 --- /dev/null +++ b/js/assets/images/certifications/unknown.svg @@ -0,0 +1,4 @@ + + + + diff --git a/js/package.json b/js/package.json index a6f705ec6..67fd98976 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "parity.js", - "version": "0.2.78", + "version": "0.2.80", "main": "release/index.js", "jsnext:main": "src/index.js", "author": "Parity Team ", @@ -72,6 +72,7 @@ "core-js": "~2.4.1", "coveralls": "~2.11.11", "css-loader": "~0.26.0", + "ejs-loader": "~0.3.0", "enzyme": "2.3.0", "eslint": "~3.10.2", "eslint-config-semistandard": "~7.0.0", diff --git a/js/src/api/format/output.js b/js/src/api/format/output.js index 262a275a0..1094cdb83 100644 --- a/js/src/api/format/output.js +++ b/js/src/api/format/output.js @@ -144,7 +144,8 @@ export function outSignerRequest (request) { break; case 'payload': - request[key].transaction = outTransaction(request[key].transaction); + request[key].signTransaction = outTransaction(request[key].signTransaction); + request[key].sendTransaction = outTransaction(request[key].sendTransaction); break; } }); diff --git a/js/src/contracts/abi/badgereg.json b/js/src/contracts/abi/badgereg.json new file mode 100644 index 000000000..3d18ba393 --- /dev/null +++ b/js/src/contracts/abi/badgereg.json @@ -0,0 +1 @@ +[{"constant":false,"inputs":[{"name":"_new","type":"address"}],"name":"setOwner","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_addr","type":"address"},{"name":"_name","type":"bytes32"}],"name":"register","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_name","type":"bytes32"}],"name":"fromName","outputs":[{"name":"id","type":"uint256"},{"name":"addr","type":"address"},{"name":"owner","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"badgeCount","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_fee","type":"uint256"}],"name":"setFee","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_id","type":"uint256"},{"name":"_key","type":"bytes32"}],"name":"meta","outputs":[{"name":"","type":"bytes32"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":false,"inputs":[],"name":"drain","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_id","type":"uint256"}],"name":"unregister","outputs":[],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_addr","type":"address"}],"name":"fromAddress","outputs":[{"name":"id","type":"uint256"},{"name":"name","type":"bytes32"},{"name":"owner","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_id","type":"uint256"}],"name":"badge","outputs":[{"name":"addr","type":"address"},{"name":"name","type":"bytes32"},{"name":"owner","type":"address"}],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_id","type":"uint256"},{"name":"_key","type":"bytes32"},{"name":"_value","type":"bytes32"}],"name":"setMeta","outputs":[],"payable":false,"type":"function"},{"constant":false,"inputs":[{"name":"_addr","type":"address"},{"name":"_name","type":"bytes32"},{"name":"_owner","type":"address"}],"name":"registerAs","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[],"name":"fee","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"id","type":"uint256"},{"indexed":false,"name":"addr","type":"address"}],"name":"Registered","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"id","type":"uint256"}],"name":"Unregistered","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"id","type":"uint256"},{"indexed":true,"name":"key","type":"bytes32"},{"indexed":false,"name":"value","type":"bytes32"}],"name":"MetaChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"old","type":"address"},{"indexed":true,"name":"current","type":"address"}],"name":"NewOwner","type":"event"}] \ No newline at end of file diff --git a/js/src/contracts/abi/certifier.json b/js/src/contracts/abi/certifier.json new file mode 100644 index 000000000..905ddde6c --- /dev/null +++ b/js/src/contracts/abi/certifier.json @@ -0,0 +1 @@ +[{"constant":true,"inputs":[{"name":"_who","type":"address"},{"name":"_field","type":"string"}],"name":"getAddress","outputs":[{"name":"","type":"address"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_who","type":"address"},{"name":"_field","type":"string"}],"name":"getUint","outputs":[{"name":"","type":"uint256"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_who","type":"address"}],"name":"certified","outputs":[{"name":"","type":"bool"}],"payable":false,"type":"function"},{"constant":true,"inputs":[{"name":"_who","type":"address"},{"name":"_field","type":"string"}],"name":"get","outputs":[{"name":"","type":"bytes32"}],"payable":false,"type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"who","type":"address"}],"name":"Confirmed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"who","type":"address"}],"name":"Revoked","type":"event"}] \ No newline at end of file diff --git a/js/src/contracts/abi/index.js b/js/src/contracts/abi/index.js index a6a7f0783..f15765b1a 100644 --- a/js/src/contracts/abi/index.js +++ b/js/src/contracts/abi/index.js @@ -14,6 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . +import badgereg from './badgereg.json'; import basiccoin from './basiccoin.json'; import basiccoinmanager from './basiccoinmanager.json'; import dappreg from './dappreg.json'; @@ -28,6 +29,7 @@ import tokenreg from './tokenreg.json'; import wallet from './wallet.json'; export { + badgereg, basiccoin, basiccoinmanager, dappreg, diff --git a/js/src/contracts/badgereg.js b/js/src/contracts/badgereg.js new file mode 100644 index 000000000..f8dbefa78 --- /dev/null +++ b/js/src/contracts/badgereg.js @@ -0,0 +1,66 @@ +// Copyright 2015, 2016 Ethcore (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import { bytesToHex, hex2Ascii } from '../api/util/format'; + +import ABI from './abi/certifier.json'; + +const ZERO = '0x0000000000000000000000000000000000000000000000000000000000000000'; + +export default class BadgeReg { + constructor (api, registry) { + this._api = api; + this._registry = registry; + + registry.getContract('badgereg'); + this.certifiers = {}; // by name + this.contracts = {}; // by name + } + + fetchCertifier (name) { + if (this.certifiers[name]) { + return Promise.resolve(this.certifiers[name]); + } + return this._registry.getContract('badgereg') + .then((badgeReg) => { + return badgeReg.instance.fromName.call({}, [name]) + .then(([ id, address ]) => { + return Promise.all([ + badgeReg.instance.meta.call({}, [id, 'TITLE']), + badgeReg.instance.meta.call({}, [id, 'IMG']) + ]) + .then(([ title, img ]) => { + title = bytesToHex(title); + title = title === ZERO ? null : hex2Ascii(title); + if (bytesToHex(img) === ZERO) img = null; + + const data = { address, name, title, icon: img }; + this.certifiers[name] = data; + return data; + }); + }); + }); + } + + checkIfCertified (certifier, address) { + if (!this.contracts[certifier]) { + this.contracts[certifier] = this._api.newContract(ABI, certifier); + } + const contract = this.contracts[certifier]; + + return contract.instance.certified.call({}, [address]); + } +} diff --git a/js/src/contracts/contracts.js b/js/src/contracts/contracts.js index cefece7de..f61a63690 100644 --- a/js/src/contracts/contracts.js +++ b/js/src/contracts/contracts.js @@ -20,6 +20,7 @@ import SignatureReg from './signaturereg'; import TokenReg from './tokenreg'; import GithubHint from './githubhint'; import * as smsVerification from './sms-verification'; +import BadgeReg from './badgereg'; let instance = null; @@ -33,6 +34,7 @@ export default class Contracts { this._signaturereg = new SignatureReg(api, this._registry); this._tokenreg = new TokenReg(api, this._registry); this._githubhint = new GithubHint(api, this._registry); + this.badgeReg = new BadgeReg(api, this._registry); } get registry () { diff --git a/js/src/dapps/dappreg.js b/js/src/dapps/dappreg.js index 8ed9724bc..9bd96f1a7 100644 --- a/js/src/dapps/dappreg.js +++ b/js/src/dapps/dappreg.js @@ -17,10 +17,8 @@ import React from 'react'; import ReactDOM from 'react-dom'; import injectTapEventPlugin from 'react-tap-event-plugin'; -import { useStrict } from 'mobx'; injectTapEventPlugin(); -useStrict(true); import Application from './dappreg/Application'; diff --git a/js/src/dapps/index.ejs b/js/src/dapps/index.ejs index 250194529..fa65c78bf 100644 --- a/js/src/dapps/index.ejs +++ b/js/src/dapps/index.ejs @@ -11,28 +11,23 @@ height: 100%; margin: 0; padding: 0; - } - - .loading-container { - display: flex; - align-items: center; - justify-content: center; - height: 100%; - font-family: Roboto; - background-color: rgba(0, 0, 0, 0.8); - color: #ddd; + background: white; + font-family: 'Roboto', sans-serif; + font-size: 16px; + font-weight: 300; } .loading { - font-size: 4em; + text-align: center; + padding-top: 5em; + font-size: 2em; + color: #999; }
-
- Loading... -
+
Loading
<% if (!htmlWebpackPlugin.options.secure) { %> diff --git a/js/src/index.ejs b/js/src/index.ejs index eefc50dbb..48cd5c0c0 100644 --- a/js/src/index.ejs +++ b/js/src/index.ejs @@ -11,28 +11,23 @@ height: 100%; margin: 0; padding: 0; - } - - .loading-container { - display: flex; - align-items: center; - justify-content: center; - height: 100%; - font-family: Roboto; - background-color: rgba(0, 0, 0, 0.8); - color: #ddd; + background: white; + font-family: 'Roboto', sans-serif; + font-size: 16px; + font-weight: 300; } .loading { - font-size: 4em; + text-align: center; + padding-top: 5em; + font-size: 2em; + color: #999; }
-
- Loading... -
+
Loading
diff --git a/js/src/modals/CreateAccount/AccountDetails/accountDetails.js b/js/src/modals/CreateAccount/AccountDetails/accountDetails.js index 14c858c06..945bcc975 100644 --- a/js/src/modals/CreateAccount/AccountDetails/accountDetails.js +++ b/js/src/modals/CreateAccount/AccountDetails/accountDetails.js @@ -15,8 +15,15 @@ // along with Parity. If not, see . import React, { Component, PropTypes } from 'react'; +import PrintIcon from 'material-ui/svg-icons/action/print'; import { Form, Input, InputAddress } from '../../../ui'; +import Button from '../../../ui/Button'; + +import { createIdentityImg } from '../../../api/util/identity'; +import print from './print'; +import recoveryPage from './recovery-page.ejs'; +import ParityLogo from '../../../../assets/images/parity-logo-black-no-text.svg'; export default class AccountDetails extends Component { static propTypes = { @@ -42,6 +49,7 @@ export default class AccountDetails extends Component { label='address' value={ address } /> { this.renderPhrase() } + { this.renderPhraseCopyButton() } ); } @@ -62,4 +70,26 @@ export default class AccountDetails extends Component { value={ phrase } /> ); } + + renderPhraseCopyButton () { + const { phrase } = this.props; + if (!phrase) { + return null; + } + + return ( +
diff --git a/js/src/views/Account/account.js b/js/src/views/Account/account.js index e27333cbf..21d2f380c 100644 --- a/js/src/views/Account/account.js +++ b/js/src/views/Account/account.js @@ -64,12 +64,6 @@ class Account extends Component { } componentDidMount () { - const { api } = this.context; - const { address } = this.props.params; - const { isTestnet } = this.props; - - const verificationStore = new VerificationStore(api, address, isTestnet); - this.setState({ verificationStore }); this.setVisibleAccounts(); } @@ -80,6 +74,15 @@ class Account extends Component { if (prevAddress !== nextAddress) { this.setVisibleAccounts(nextProps); } + + const { isTestnet } = nextProps; + if (typeof isTestnet === 'boolean' && !this.state.verificationStore) { + const { api } = this.context; + const { address } = nextProps.params; + this.setState({ + verificationStore: new VerificationStore(api, address, isTestnet) + }); + } } componentWillUnmount () { @@ -115,7 +118,8 @@ class Account extends Component {
+ balance={ balance } + /> diff --git a/js/src/views/Application/Snackbar/snackbar.js b/js/src/views/Application/Snackbar/snackbar.js index ac6e6b950..fdeb48c57 100644 --- a/js/src/views/Application/Snackbar/snackbar.js +++ b/js/src/views/Application/Snackbar/snackbar.js @@ -19,10 +19,17 @@ import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import { Snackbar as SnackbarMUI } from 'material-ui'; -import { darkBlack } from 'material-ui/styles/colors'; +import { darkBlack, grey800 } from 'material-ui/styles/colors'; import { closeSnackbar } from '../../../redux/providers/snackbarActions'; +const bodyStyle = { + backgroundColor: darkBlack, + borderStyle: 'solid', + borderColor: grey800, + borderWidth: '1px 1px 0 1px' +}; + class Snackbar extends Component { static propTypes = { closeSnackbar: PropTypes.func.isRequired, @@ -40,7 +47,7 @@ class Snackbar extends Component { open={ open } message={ message } autoHideDuration={ cooldown } - bodyStyle={ { backgroundColor: darkBlack } } + bodyStyle={ bodyStyle } onRequestClose={ this.handleClose } /> ); diff --git a/js/src/views/Application/Status/status.css b/js/src/views/Application/Status/status.css index de043a1ad..8721bc4c2 100644 --- a/js/src/views/Application/Status/status.css +++ b/js/src/views/Application/Status/status.css @@ -15,45 +15,46 @@ /* along with Parity. If not, see . */ .status { - padding: 0.5em; + position: fixed; + bottom: 0; + left: 0; + right: 0; + z-index: 1000; + display: flex; + align-items: center; + padding: .4em .5em; font-size: x-small; color: #ccc; - background-color: rgba(0, 0, 0, 0.2) -} - -.title { - margin: 0 0.5em 0 2em; + background-color: rgba(0, 0, 0, 0.8); } .enode { word-wrap: break-word; - float: right; } .enode > * { display: inline-block; - margin: 0.25em 0.5em; - vertical-align: top; + margin: 0 .25em; + vertical-align: middle; } - -.block { +.enode > :last-child { + margin-right: 0; } .netinfo { display: flex; + flex-grow: 1; align-items: center; - color: #ddd; + color: #ddd; } .netinfo > * { - display: inline-block; margin-left: 1em; } .network { padding: 0.25em 0.5em; - display: inline-block; - border-radius: 4px; + border-radius: .4em; line-height: 1.2; text-transform: uppercase; } @@ -65,14 +66,3 @@ .networktest { background: rgb(136, 0, 0); } - -.peers { -} - -.version { - padding: 0.25em 0.5em; - float: left; -} - -.syncing { -} diff --git a/js/src/views/Application/Status/status.js b/js/src/views/Application/Status/status.js index 6417d5d28..287e7a6ee 100644 --- a/js/src/views/Application/Status/status.js +++ b/js/src/views/Application/Status/status.js @@ -46,7 +46,6 @@ class Status extends Component {
{ clientVersion }
- { this.renderEnode() }
@@ -56,6 +55,7 @@ class Status extends Component { { netPeers.active.toFormat() }/{ netPeers.connected.toFormat() }/{ netPeers.max.toFormat() } peers
+ { this.renderEnode() } ); } @@ -73,7 +73,7 @@ class Status extends Component { return (
- +
{ abbreviated }
); diff --git a/js/src/views/Application/TabBar/tabBar.js b/js/src/views/Application/TabBar/tabBar.js index c8fa0fd4a..ac8a28866 100644 --- a/js/src/views/Application/TabBar/tabBar.js +++ b/js/src/views/Application/TabBar/tabBar.js @@ -59,7 +59,7 @@ class Tab extends Component { selected={ active } icon={ view.icon } label={ label } - onClick={ this.handleClick } + onTouchTap={ this.handleClick } > { children } diff --git a/js/src/views/Contract/Events/events.js b/js/src/views/Contract/Events/events.js index 382a1c658..5558be499 100644 --- a/js/src/views/Contract/Events/events.js +++ b/js/src/views/Contract/Events/events.js @@ -16,7 +16,7 @@ import React, { Component, PropTypes } from 'react'; -import { Container, ContainerTitle } from '../../../ui'; +import { Container } from '../../../ui'; import Event from './Event'; import styles from '../contract.css'; @@ -48,8 +48,7 @@ export default class Events extends Component { }); return ( - - + { list }
diff --git a/js/src/views/Contract/Queries/queries.js b/js/src/views/Contract/Queries/queries.js index 99fe9ff2a..5c69ab76e 100644 --- a/js/src/views/Contract/Queries/queries.js +++ b/js/src/views/Contract/Queries/queries.js @@ -19,7 +19,7 @@ import React, { Component, PropTypes } from 'react'; import { Card, CardTitle, CardText } from 'material-ui/Card'; import InputQuery from './inputQuery'; -import { Container, ContainerTitle, Input, InputAddress } from '../../../ui'; +import { Container, Input, InputAddress } from '../../../ui'; import styles from './queries.css'; @@ -55,8 +55,7 @@ export default class Queries extends Component { .map((fn) => this.renderInputQuery(fn)); return ( - - +
{ noInputQueries } diff --git a/js/src/views/Contract/contract.js b/js/src/views/Contract/contract.js index 613bf70b9..54d06f228 100644 --- a/js/src/views/Contract/contract.js +++ b/js/src/views/Contract/contract.js @@ -132,7 +132,8 @@ class Contract extends Component {
+ balance={ balance } + /> @@ -447,7 +448,10 @@ function mapStateToProps (state) { } function mapDispatchToProps (dispatch) { - return bindActionCreators({ newError, setVisibleAccounts }, dispatch); + return bindActionCreators({ + newError, + setVisibleAccounts + }, dispatch); } export default connect( diff --git a/js/src/views/ParityBar/parityBar.css b/js/src/views/ParityBar/parityBar.css index 5b01613d4..dab138238 100644 --- a/js/src/views/ParityBar/parityBar.css +++ b/js/src/views/ParityBar/parityBar.css @@ -42,25 +42,26 @@ } .expanded { - right: 16px; - max-height: 300px; + right: 1em; border-radius: 4px 4px 0 0; - overflow-y: auto; display: flex; flex-direction: column; + max-height: 19em; } .expanded .content { flex: 1; - overflow: auto; + overflow-y: auto; + overflow-x: hidden; display: flex; background: rgba(0, 0, 0, 0.8); + min-height: 16em; } .corner { position: absolute; bottom: 0; - right: 16px; + right: 1em; border-radius: 4px 4px 0 0; } @@ -118,7 +119,7 @@ } .header { - height: 36px; + height: 2em; padding: 0.5em 1em; background: rgba(0, 0, 0, 0.25); margin-bottom: 0; @@ -148,6 +149,7 @@ .actions { float: right; + margin-top: -2px; } .actions div { diff --git a/js/src/views/Settings/Background/background.js b/js/src/views/Settings/Background/background.js index 4b210881a..d771c4239 100644 --- a/js/src/views/Settings/Background/background.js +++ b/js/src/views/Settings/Background/background.js @@ -19,7 +19,7 @@ import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import NavigationRefresh from 'material-ui/svg-icons/navigation/refresh'; -import { Button, Container, ContainerTitle, ParityBackground } from '../../../ui'; +import { Button, Container, ParityBackground } from '../../../ui'; import { updateBackground } from '../actions'; @@ -55,8 +55,7 @@ class Background extends Component { render () { return ( - - +
The background pattern you can see right now is unique to your Parity installation. It will change every time you create a new Signer token. This is so that decentralized applications cannot pretend to be trustworthy.
diff --git a/js/src/views/Settings/Parity/parity.js b/js/src/views/Settings/Parity/parity.js index abec8cc8a..e5e5233d0 100644 --- a/js/src/views/Settings/Parity/parity.js +++ b/js/src/views/Settings/Parity/parity.js @@ -17,7 +17,7 @@ import React, { Component, PropTypes } from 'react'; import { MenuItem } from 'material-ui'; -import { Select, Container, ContainerTitle } from '../../../ui'; +import { Select, Container } from '../../../ui'; import layout from '../layout.css'; @@ -43,8 +43,7 @@ export default class Parity extends Component { render () { return ( - - +
Control the Parity node settings and mode of operation via this interface.
diff --git a/js/src/views/Settings/Proxy/proxy.js b/js/src/views/Settings/Proxy/proxy.js index 69e415d1f..3d2a607bc 100644 --- a/js/src/views/Settings/Proxy/proxy.js +++ b/js/src/views/Settings/Proxy/proxy.js @@ -16,7 +16,7 @@ import React, { Component, PropTypes } from 'react'; -import { Container, ContainerTitle } from '../../../ui'; +import { Container } from '../../../ui'; import layout from '../layout.css'; import styles from './proxy.css'; @@ -31,8 +31,7 @@ export default class Proxy extends Component { const proxyurl = `${dappsUrl}/proxy/proxy.pac`; return ( - - +
The proxy setup allows you to access Parity and all associated decentralized applications via memorable addresses.
diff --git a/js/src/views/Settings/Views/views.js b/js/src/views/Settings/Views/views.js index a485876c8..e5fdedf5c 100644 --- a/js/src/views/Settings/Views/views.js +++ b/js/src/views/Settings/Views/views.js @@ -19,7 +19,7 @@ import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import { Checkbox } from 'material-ui'; -import { Container, ContainerTitle } from '../../../ui'; +import { Container } from '../../../ui'; import { toggleView } from '../actions'; @@ -34,8 +34,7 @@ class Views extends Component { render () { return ( - - +
Manage the available application views, using only the parts of the application that is applicable to you.
diff --git a/js/src/views/Signer/components/RequestFinished/requestFinished.js b/js/src/views/Signer/components/RequestFinished/requestFinished.js index bce9e4038..fcca55540 100644 --- a/js/src/views/Signer/components/RequestFinished/requestFinished.js +++ b/js/src/views/Signer/components/RequestFinished/requestFinished.js @@ -25,7 +25,8 @@ export default class RequestFinished extends Component { result: PropTypes.any.isRequired, date: PropTypes.instanceOf(Date).isRequired, payload: PropTypes.oneOfType([ - PropTypes.shape({ transaction: PropTypes.object.isRequired }), + PropTypes.shape({ signTransaction: PropTypes.object.isRequired }), + PropTypes.shape({ sendTransaction: PropTypes.object.isRequired }), PropTypes.shape({ sign: PropTypes.object.isRequired }) ]).isRequired, msg: PropTypes.string, @@ -58,9 +59,8 @@ export default class RequestFinished extends Component { ); } - if (payload.transaction) { - const { transaction } = payload; - + const transaction = payload.sendTransaction || payload.signTransaction; + if (transaction) { return ( - + { this.renderActions() }

{ devLogsLevels || '-' } diff --git a/js/webpack/app.js b/js/webpack/app.js index 320410b2e..aff9b8aac 100644 --- a/js/webpack/app.js +++ b/js/webpack/app.js @@ -64,6 +64,10 @@ module.exports = { test: /\.json$/, use: [ 'json-loader' ] }, + { + test: /\.ejs$/, + use: [ 'ejs-loader' ] + }, { test: /\.html$/, use: [ diff --git a/json/Cargo.toml b/json/Cargo.toml index 8f7b0c227..67303cc7e 100644 --- a/json/Cargo.toml +++ b/json/Cargo.toml @@ -10,7 +10,7 @@ rustc-serialize = "0.3" serde = "0.8" serde_json = "0.8" serde_macros = { version = "0.8", optional = true } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} [build-dependencies] serde_codegen = { version = "0.8", optional = true } diff --git a/parity/blockchain.rs b/parity/blockchain.rs index 0baeb1354..02d3e39fb 100644 --- a/parity/blockchain.rs +++ b/parity/blockchain.rs @@ -22,7 +22,7 @@ use std::thread::sleep; use std::sync::Arc; use rustc_serialize::hex::FromHex; use io::{PanicHandler, ForwardPanic}; -use util::{ToPretty, Uint}; +use util::{ToPretty, Uint, U256, H256, Address, Hashable}; use rlp::PayloadInfo; use ethcore::service::ClientService; use ethcore::client::{Mode, DatabaseCompactionProfile, VMType, BlockImportError, BlockChainClient, BlockID}; @@ -65,6 +65,7 @@ impl FromStr for DataFormat { pub enum BlockchainCmd { Import(ImportBlockchain), Export(ExportBlockchain), + ExportState(ExportState), } #[derive(Debug, PartialEq)] @@ -103,10 +104,31 @@ pub struct ExportBlockchain { pub check_seal: bool, } +#[derive(Debug, PartialEq)] +pub struct ExportState { + pub spec: SpecType, + pub cache_config: CacheConfig, + pub dirs: Directories, + pub file_path: Option, + pub format: Option, + pub pruning: Pruning, + pub pruning_history: u64, + pub compaction: DatabaseCompactionProfile, + pub wal: bool, + pub fat_db: Switch, + pub tracing: Switch, + pub at: BlockID, + pub storage: bool, + pub code: bool, + pub min_balance: Option, + pub max_balance: Option, +} + pub fn execute(cmd: BlockchainCmd) -> Result { match cmd { BlockchainCmd::Import(import_cmd) => execute_import(import_cmd), BlockchainCmd::Export(export_cmd) => execute_export(export_cmd), + BlockchainCmd::ExportState(export_cmd) => execute_export_state(export_cmd), } } @@ -245,6 +267,7 @@ fn execute_import(cmd: ImportBlockchain) -> Result { // save user defaults user_defaults.pruning = algorithm; user_defaults.tracing = tracing; + user_defaults.fat_db = fat_db; try!(user_defaults.save(&user_defaults_path)); let report = client.report(); @@ -261,23 +284,28 @@ fn execute_import(cmd: ImportBlockchain) -> Result { ).into()) } -fn execute_export(cmd: ExportBlockchain) -> Result { - // Setup panic handler - let panic_handler = PanicHandler::new_in_arc(); +fn start_client( + dirs: Directories, + spec: SpecType, + pruning: Pruning, + pruning_history: u64, + tracing: Switch, + fat_db: Switch, + compaction: DatabaseCompactionProfile, + wal: bool, + cache_config: CacheConfig) -> Result { // create dirs used by parity - try!(cmd.dirs.create_dirs(false, false)); - - let format = cmd.format.unwrap_or_default(); + try!(dirs.create_dirs(false, false)); // load spec file - let spec = try!(cmd.spec.spec()); + let spec = try!(spec.spec()); // load genesis hash let genesis_hash = spec.genesis_header().hash(); // database paths - let db_dirs = cmd.dirs.database(genesis_hash, spec.fork_name.clone()); + let db_dirs = dirs.database(genesis_hash, spec.fork_name.clone()); // user defaults path let user_defaults_path = db_dirs.user_defaults_path(); @@ -288,34 +316,42 @@ fn execute_export(cmd: ExportBlockchain) -> Result { fdlimit::raise_fd_limit(); // select pruning algorithm - let algorithm = cmd.pruning.to_algorithm(&user_defaults); + let algorithm = pruning.to_algorithm(&user_defaults); // check if tracing is on - let tracing = try!(tracing_switch_to_bool(cmd.tracing, &user_defaults)); + let tracing = try!(tracing_switch_to_bool(tracing, &user_defaults)); // check if fatdb is on - let fat_db = try!(fatdb_switch_to_bool(cmd.fat_db, &user_defaults, algorithm)); + let fat_db = try!(fatdb_switch_to_bool(fat_db, &user_defaults, algorithm)); // prepare client and snapshot paths. let client_path = db_dirs.client_path(algorithm); let snapshot_path = db_dirs.snapshot_path(); // execute upgrades - try!(execute_upgrades(&db_dirs, algorithm, cmd.compaction.compaction_profile(db_dirs.fork_path().as_path()))); + try!(execute_upgrades(&db_dirs, algorithm, compaction.compaction_profile(db_dirs.fork_path().as_path()))); // prepare client config - let client_config = to_client_config(&cmd.cache_config, Mode::Active, tracing, fat_db, cmd.compaction, cmd.wal, VMType::default(), "".into(), algorithm, cmd.pruning_history, cmd.check_seal); + let client_config = to_client_config(&cache_config, Mode::Active, tracing, fat_db, compaction, wal, VMType::default(), "".into(), algorithm, pruning_history, true); let service = try!(ClientService::start( client_config, &spec, &client_path, &snapshot_path, - &cmd.dirs.ipc_path(), + &dirs.ipc_path(), Arc::new(Miner::with_spec(&spec)), ).map_err(|e| format!("Client service error: {:?}", e))); drop(spec); + Ok(service) +} + +fn execute_export(cmd: ExportBlockchain) -> Result { + // Setup panic handler + let service = try!(start_client(cmd.dirs, cmd.spec, cmd.pruning, cmd.pruning_history, cmd.tracing, cmd.fat_db, cmd.compaction, cmd.wal, cmd.cache_config)); + let panic_handler = PanicHandler::new_in_arc(); + let format = cmd.format.unwrap_or_default(); panic_handler.forward_from(&service); let client = service.client(); @@ -329,6 +365,9 @@ fn execute_export(cmd: ExportBlockchain) -> Result { let to = try!(client.block_number(cmd.to_block).ok_or("To block could not be found")); for i in from..(to + 1) { + if i % 10000 == 0 { + info!("#{}", i); + } let b = try!(client.block(BlockID::Number(i)).ok_or("Error exporting incomplete chain")); match format { DataFormat::Binary => { out.write(&b).expect("Couldn't write to stream."); } @@ -339,6 +378,85 @@ fn execute_export(cmd: ExportBlockchain) -> Result { Ok("Export completed.".into()) } +fn execute_export_state(cmd: ExportState) -> Result { + // Setup panic handler + let service = try!(start_client(cmd.dirs, cmd.spec, cmd.pruning, cmd.pruning_history, cmd.tracing, cmd.fat_db, cmd.compaction, cmd.wal, cmd.cache_config)); + let panic_handler = PanicHandler::new_in_arc(); + + panic_handler.forward_from(&service); + let client = service.client(); + + let mut out: Box = match cmd.file_path { + Some(f) => Box::new(try!(fs::File::create(&f).map_err(|_| format!("Cannot write to file given: {}", f)))), + None => Box::new(io::stdout()), + }; + + let mut last: Option
= None; + let at = cmd.at; + let mut i = 0usize; + + out.write_fmt(format_args!("{{ \"state\": [", )).expect("Couldn't write to stream."); + loop { + let accounts = try!(client.list_accounts(at, last.as_ref(), 1000).ok_or("Specified block not found")); + if accounts.is_empty() { + break; + } + + for account in accounts.into_iter() { + let balance = client.balance(&account, at).unwrap_or_else(U256::zero); + if cmd.min_balance.map_or(false, |m| balance < m) || cmd.max_balance.map_or(false, |m| balance > m) { + last = Some(account); + continue; //filtered out + } + + if i != 0 { + out.write(b",").expect("Write error"); + } + out.write_fmt(format_args!("\n\"0x{}\": {{\"balance\": \"{:x}\", \"nonce\": \"{:x}\"", account.hex(), balance, client.nonce(&account, at).unwrap_or_else(U256::zero))).expect("Write error"); + let code = client.code(&account, at).unwrap_or(None).unwrap_or_else(Vec::new); + if !code.is_empty() { + out.write_fmt(format_args!(", \"code_hash\": \"0x{}\"", code.sha3().hex())).expect("Write error"); + if cmd.code { + out.write_fmt(format_args!(", \"code\": \"{}\"", code.to_hex())).expect("Write error"); + } + } + let storage_root = client.storage_root(&account, at).unwrap_or(::util::SHA3_NULL_RLP); + if storage_root != ::util::SHA3_NULL_RLP { + out.write_fmt(format_args!(", \"storage_root\": \"0x{}\"", storage_root.hex())).expect("Write error"); + if cmd.storage { + out.write_fmt(format_args!(", \"storage\": {{")).expect("Write error"); + let mut last_storage: Option = None; + loop { + let keys = try!(client.list_storage(at, &account, last_storage.as_ref(), 1000).ok_or("Specified block not found")); + if keys.is_empty() { + break; + } + + let mut si = 0; + for key in keys.into_iter() { + if si != 0 { + out.write(b",").expect("Write error"); + } + out.write_fmt(format_args!("\n\t\"0x{}\": \"0x{}\"", key.hex(), client.storage_at(&account, &key, at).unwrap_or_else(Default::default).hex())).expect("Write error"); + si += 1; + last_storage = Some(key); + } + } + out.write(b"\n}").expect("Write error"); + } + } + out.write(b"}").expect("Write error"); + i += 1; + if i % 10000 == 0 { + info!("Account #{}", i); + } + last = Some(account); + } + } + out.write_fmt(format_args!("\n]}}")).expect("Write error"); + Ok("Export completed.".into()) +} + #[cfg(test)] mod test { use super::DataFormat; diff --git a/parity/cli/mod.rs b/parity/cli/mod.rs index 3f67cf1fa..d33c58d9d 100644 --- a/parity/cli/mod.rs +++ b/parity/cli/mod.rs @@ -26,6 +26,8 @@ usage! { cmd_new: bool, cmd_list: bool, cmd_export: bool, + cmd_blocks: bool, + cmd_state: bool, cmd_import: bool, cmd_signer: bool, cmd_new_token: bool, @@ -246,6 +248,10 @@ usage! { flag_to: String = "latest", or |_| None, flag_format: Option = None, or |_| None, flag_no_seal_check: bool = false, or |_| None, + flag_no_storage: bool = false, or |_| None, + flag_no_code: bool = false, or |_| None, + flag_min_balance: Option = None, or |_| None, + flag_max_balance: Option = None, or |_| None, // -- Snapshot Optons flag_at: String = "latest", or |_| None, @@ -484,6 +490,8 @@ mod tests { cmd_new: false, cmd_list: false, cmd_export: false, + cmd_state: false, + cmd_blocks: false, cmd_import: false, cmd_signer: false, cmd_new_token: false, @@ -600,6 +608,10 @@ mod tests { flag_to: "latest".into(), flag_format: None, flag_no_seal_check: false, + flag_no_code: false, + flag_no_storage: false, + flag_min_balance: None, + flag_max_balance: None, // -- Snapshot Optons flag_at: "latest".into(), diff --git a/parity/cli/usage.txt b/parity/cli/usage.txt index fe0824dfe..b67af6110 100644 --- a/parity/cli/usage.txt +++ b/parity/cli/usage.txt @@ -10,7 +10,7 @@ Usage: parity account import ... [options] parity wallet import --password FILE [options] parity import [ ] [options] - parity export [ ] [options] + parity export (blocks | state) [ ] [options] parity signer new-token [options] parity snapshot [options] parity restore [ ] [options] @@ -271,6 +271,16 @@ Import/Export Options: one of 'hex' and 'binary'. (default: {flag_format:?} = Import: auto, Export: binary) --no-seal-check Skip block seal check. (default: {flag_no_seal_check}) + --at BLOCK Export state at the given block, which may be an + index, hash, or 'latest'. Note that taking snapshots at + non-recent blocks will only work with --pruning archive + (default: {flag_at}) + --no-storage Don't export account storge. (default: {flag_no_storage}) + --no-code Don't export account code. (default: {flag_no_code}) + --min-balance WEI Don't export accounts with balance less than specified. + (default: {flag_min_balance:?}) + --max-balance WEI Don't export accounts with balance greater than specified. + (default: {flag_max_balance:?}) Snapshot Options: --at BLOCK Take a snapshot at the given block, which may be an diff --git a/parity/configuration.rs b/parity/configuration.rs index ecf47ddad..c4a54f747 100644 --- a/parity/configuration.rs +++ b/parity/configuration.rs @@ -37,7 +37,7 @@ use dir::Directories; use dapps::Configuration as DappsConfiguration; use signer::{Configuration as SignerConfiguration}; use run::RunCmd; -use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, DataFormat}; +use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, ExportState, DataFormat}; use presale::ImportWallet; use account::{AccountCmd, NewAccount, ImportAccounts, ImportFromGethAccounts}; use snapshot::{self, SnapshotCommand}; @@ -161,23 +161,47 @@ impl Configuration { }; Cmd::Blockchain(BlockchainCmd::Import(import_cmd)) } else if self.args.cmd_export { - let export_cmd = ExportBlockchain { - spec: spec, - cache_config: cache_config, - dirs: dirs, - file_path: self.args.arg_file.clone(), - format: format, - pruning: pruning, - pruning_history: pruning_history, - compaction: compaction, - wal: wal, - tracing: tracing, - fat_db: fat_db, - from_block: try!(to_block_id(&self.args.flag_from)), - to_block: try!(to_block_id(&self.args.flag_to)), - check_seal: !self.args.flag_no_seal_check, - }; - Cmd::Blockchain(BlockchainCmd::Export(export_cmd)) + if self.args.cmd_blocks { + let export_cmd = ExportBlockchain { + spec: spec, + cache_config: cache_config, + dirs: dirs, + file_path: self.args.arg_file.clone(), + format: format, + pruning: pruning, + pruning_history: pruning_history, + compaction: compaction, + wal: wal, + tracing: tracing, + fat_db: fat_db, + from_block: try!(to_block_id(&self.args.flag_from)), + to_block: try!(to_block_id(&self.args.flag_to)), + check_seal: !self.args.flag_no_seal_check, + }; + Cmd::Blockchain(BlockchainCmd::Export(export_cmd)) + } else if self.args.cmd_state { + let export_cmd = ExportState { + spec: spec, + cache_config: cache_config, + dirs: dirs, + file_path: self.args.arg_file.clone(), + format: format, + pruning: pruning, + pruning_history: pruning_history, + compaction: compaction, + wal: wal, + tracing: tracing, + fat_db: fat_db, + at: try!(to_block_id(&self.args.flag_at)), + storage: !self.args.flag_no_storage, + code: !self.args.flag_no_code, + min_balance: self.args.flag_min_balance.and_then(|s| to_u256(&s).ok()), + max_balance: self.args.flag_max_balance.and_then(|s| to_u256(&s).ok()), + }; + Cmd::Blockchain(BlockchainCmd::ExportState(export_cmd)) + } else { + unreachable!(); + } } else if self.args.cmd_snapshot { let snapshot_cmd = SnapshotCommand { cache_config: cache_config, @@ -690,7 +714,7 @@ mod tests { use helpers::{replace_home, default_network_config}; use run::RunCmd; use signer::{Configuration as SignerConfiguration}; - use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, DataFormat}; + use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, DataFormat, ExportState}; use presale::ImportWallet; use account::{AccountCmd, NewAccount, ImportAccounts}; use devtools::{RandomTempPath}; @@ -779,7 +803,7 @@ mod tests { #[test] fn test_command_blockchain_export() { - let args = vec!["parity", "export", "blockchain.json"]; + let args = vec!["parity", "export", "blocks", "blockchain.json"]; let conf = parse(&args); assert_eq!(conf.into_command().unwrap().cmd, Cmd::Blockchain(BlockchainCmd::Export(ExportBlockchain { spec: Default::default(), @@ -799,9 +823,33 @@ mod tests { }))); } + #[test] + fn test_command_state_export() { + let args = vec!["parity", "export", "state", "state.json"]; + let conf = parse(&args); + assert_eq!(conf.into_command().unwrap().cmd, Cmd::Blockchain(BlockchainCmd::ExportState(ExportState { + spec: Default::default(), + cache_config: Default::default(), + dirs: Default::default(), + file_path: Some("state.json".into()), + pruning: Default::default(), + pruning_history: 64, + format: Default::default(), + compaction: Default::default(), + wal: true, + tracing: Default::default(), + fat_db: Default::default(), + at: BlockID::Latest, + storage: true, + code: true, + min_balance: None, + max_balance: None, + }))); + } + #[test] fn test_command_blockchain_export_with_custom_format() { - let args = vec!["parity", "export", "--format", "hex", "blockchain.json"]; + let args = vec!["parity", "export", "blocks", "--format", "hex", "blockchain.json"]; let conf = parse(&args); assert_eq!(conf.into_command().unwrap().cmd, Cmd::Blockchain(BlockchainCmd::Export(ExportBlockchain { spec: Default::default(), diff --git a/parity/params.rs b/parity/params.rs index 28233400e..3ce07e889 100644 --- a/parity/params.rs +++ b/parity/params.rs @@ -257,17 +257,13 @@ pub fn tracing_switch_to_bool(switch: Switch, user_defaults: &UserDefaults) -> R } } -pub fn fatdb_switch_to_bool(switch: Switch, user_defaults: &UserDefaults, algorithm: Algorithm) -> Result { +pub fn fatdb_switch_to_bool(switch: Switch, user_defaults: &UserDefaults, _algorithm: Algorithm) -> Result { let result = match (user_defaults.is_first_launch, switch, user_defaults.fat_db) { (false, Switch::On, false) => Err("FatDB resync required".into()), (_, Switch::On, _) => Ok(true), (_, Switch::Off, _) => Ok(false), (_, Switch::Auto, def) => Ok(def), }; - - if result.clone().unwrap_or(false) && algorithm != Algorithm::Archive { - return Err("Fat DB is not supported with the chosen pruning option. Please rerun with `--pruning=archive`".into()); - } result } diff --git a/parity/run.rs b/parity/run.rs index f56ba5b92..f977c450c 100644 --- a/parity/run.rs +++ b/parity/run.rs @@ -156,7 +156,7 @@ pub fn execute(cmd: RunCmd, logger: Arc) -> Result<(), String> { // get the mode let mode = try!(mode_switch_to_bool(cmd.mode, &user_defaults)); trace!(target: "mode", "mode is {:?}", mode); - let network_enabled = match &mode { &Mode::Dark(_) | &Mode::Off => false, _ => true, }; + let network_enabled = match mode { Mode::Dark(_) | Mode::Off => false, _ => true, }; // prepare client and snapshot paths. let client_path = db_dirs.client_path(algorithm); @@ -219,7 +219,7 @@ pub fn execute(cmd: RunCmd, logger: Arc) -> Result<(), String> { // create client config let client_config = to_client_config( &cmd.cache_config, - mode, + mode.clone(), tracing, fat_db, cmd.compaction, @@ -354,6 +354,8 @@ pub fn execute(cmd: RunCmd, logger: Arc) -> Result<(), String> { // save user defaults user_defaults.pruning = algorithm; user_defaults.tracing = tracing; + user_defaults.fat_db = fat_db; + user_defaults.mode = mode; try!(user_defaults.save(&user_defaults_path)); let on_mode_change = move |mode: &Mode| { diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index 4a8c4d76a..8598372f8 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -29,7 +29,7 @@ fetch = { path = "../util/fetch" } rustc-serialize = "0.3" transient-hashmap = "0.1" serde_macros = { version = "0.8.0", optional = true } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" } ethcore-ipc = { path = "../ipc/rpc" } time = "0.1" diff --git a/rpc/src/v1/impls/parity.rs b/rpc/src/v1/impls/parity.rs index 1fdcbdef8..74f467e5e 100644 --- a/rpc/src/v1/impls/parity.rs +++ b/rpc/src/v1/impls/parity.rs @@ -28,7 +28,6 @@ use ethstore::random_phrase; use ethsync::{SyncProvider, ManageNetwork}; use ethcore::miner::MinerService; use ethcore::client::{MiningBlockChainClient}; -use ethcore::ids::BlockID; use ethcore::mode::Mode; use ethcore::account_provider::AccountProvider; @@ -38,9 +37,11 @@ use v1::types::{ Bytes, U256, H160, H256, H512, Peers, Transaction, RpcSettings, Histogram, TransactionStats, LocalTransactionStatus, + BlockNumber, }; use v1::helpers::{errors, SigningQueue, SignerService, NetworkSettings}; use v1::helpers::dispatch::DEFAULT_MAC; +use v1::helpers::auto_args::Trailing; /// Parity implementation. pub struct ParityClient where @@ -234,19 +235,20 @@ impl Parity for ParityClient where Ok(Brain::new(phrase).generate().unwrap().address().into()) } - fn list_accounts(&self) -> Result>, Error> { + fn list_accounts(&self, count: u64, after: Option, block_number: Trailing) -> Result>, Error> { try!(self.active()); Ok(take_weak!(self.client) - .list_accounts(BlockID::Latest) + .list_accounts(block_number.0.into(), after.map(Into::into).as_ref(), count) .map(|a| a.into_iter().map(Into::into).collect())) } - fn list_storage_keys(&self, _address: H160) -> Result>, Error> { + fn list_storage_keys(&self, address: H160, count: u64, after: Option, block_number: Trailing) -> Result>, Error> { try!(self.active()); - // TODO: implement this - Ok(None) + Ok(take_weak!(self.client) + .list_storage(block_number.0.into(), &address.into(), after.map(Into::into).as_ref(), count) + .map(|a| a.into_iter().map(Into::into).collect())) } fn encrypt_message(&self, key: H512, phrase: Bytes) -> Result { diff --git a/rpc/src/v1/tests/mocked/signer.rs b/rpc/src/v1/tests/mocked/signer.rs index e2ba580e0..447c809cd 100644 --- a/rpc/src/v1/tests/mocked/signer.rs +++ b/rpc/src/v1/tests/mocked/signer.rs @@ -89,7 +89,7 @@ fn should_return_list_of_items_to_confirm() { let request = r#"{"jsonrpc":"2.0","method":"signer_requestsToConfirm","params":[],"id":1}"#; let response = concat!( r#"{"jsonrpc":"2.0","result":["#, - r#"{"id":"0x1","payload":{"transaction":{"data":"0x","from":"0x0000000000000000000000000000000000000001","gas":"0x989680","gasPrice":"0x2710","nonce":null,"to":"0xd46e8dd67c5d32be8058bb8eb970870f07244567","value":"0x1"}}},"#, + r#"{"id":"0x1","payload":{"sendTransaction":{"data":"0x","from":"0x0000000000000000000000000000000000000001","gas":"0x989680","gasPrice":"0x2710","nonce":null,"to":"0xd46e8dd67c5d32be8058bb8eb970870f07244567","value":"0x1"}}},"#, r#"{"id":"0x2","payload":{"sign":{"address":"0x0000000000000000000000000000000000000001","hash":"0x0000000000000000000000000000000000000000000000000000000000000005"}}}"#, r#"],"id":1}"# ); diff --git a/rpc/src/v1/traits/parity.rs b/rpc/src/v1/traits/parity.rs index b4df594e8..18440b654 100644 --- a/rpc/src/v1/traits/parity.rs +++ b/rpc/src/v1/traits/parity.rs @@ -18,11 +18,12 @@ use jsonrpc_core::Error; use std::collections::BTreeMap; -use v1::helpers::auto_args::Wrap; +use v1::helpers::auto_args::{Wrap, Trailing}; use v1::types::{ H160, H256, H512, U256, Bytes, Peers, Transaction, RpcSettings, Histogram, TransactionStats, LocalTransactionStatus, + BlockNumber }; build_rpc_trait! { @@ -103,12 +104,12 @@ build_rpc_trait! { /// Returns all addresses if Fat DB is enabled (`--fat-db`), or null if not. #[rpc(name = "parity_listAccounts")] - fn list_accounts(&self) -> Result>, Error>; + fn list_accounts(&self, u64, Option, Trailing) -> Result>, Error>; /// Returns all storage keys of the given address (first parameter) if Fat DB is enabled (`--fat-db`), /// or null if not. #[rpc(name = "parity_listStorageKeys")] - fn list_storage_keys(&self, H160) -> Result>, Error>; + fn list_storage_keys(&self, H160, u64, Option, Trailing) -> Result>, Error>; /// Encrypt some data with a public key under ECIES. /// First parameter is the 512-byte destination public key, second is the message. diff --git a/rpc/src/v1/types/confirmations.rs b/rpc/src/v1/types/confirmations.rs index bbbad83f3..d8cfa14d6 100644 --- a/rpc/src/v1/types/confirmations.rs +++ b/rpc/src/v1/types/confirmations.rs @@ -105,10 +105,10 @@ impl Serialize for ConfirmationResponse { #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize)] pub enum ConfirmationPayload { /// Send Transaction - #[serde(rename="transaction")] + #[serde(rename="sendTransaction")] SendTransaction(TransactionRequest), /// Sign Transaction - #[serde(rename="transaction")] + #[serde(rename="signTransaction")] SignTransaction(TransactionRequest), /// Signature #[serde(rename="sign")] @@ -221,7 +221,49 @@ mod tests { // when let res = serde_json::to_string(&ConfirmationRequest::from(request)); - let expected = r#"{"id":"0xf","payload":{"transaction":{"from":"0x0000000000000000000000000000000000000000","to":null,"gasPrice":"0x2710","gas":"0x3a98","value":"0x186a0","data":"0x010203","nonce":"0x1"}}}"#; + let expected = r#"{"id":"0xf","payload":{"sendTransaction":{"from":"0x0000000000000000000000000000000000000000","to":null,"gasPrice":"0x2710","gas":"0x3a98","value":"0x186a0","data":"0x010203","nonce":"0x1"}}}"#; + + // then + assert_eq!(res.unwrap(), expected.to_owned()); + } + + #[test] + fn should_serialize_sign_transaction_confirmation() { + // given + let request = helpers::ConfirmationRequest { + id: 15.into(), + payload: helpers::ConfirmationPayload::SignTransaction(helpers::FilledTransactionRequest { + from: 0.into(), + to: None, + gas: 15_000.into(), + gas_price: 10_000.into(), + value: 100_000.into(), + data: vec![1, 2, 3], + nonce: Some(1.into()), + }), + }; + + // when + let res = serde_json::to_string(&ConfirmationRequest::from(request)); + let expected = r#"{"id":"0xf","payload":{"signTransaction":{"from":"0x0000000000000000000000000000000000000000","to":null,"gasPrice":"0x2710","gas":"0x3a98","value":"0x186a0","data":"0x010203","nonce":"0x1"}}}"#; + + // then + assert_eq!(res.unwrap(), expected.to_owned()); + } + + #[test] + fn should_serialize_decrypt_confirmation() { + // given + let request = helpers::ConfirmationRequest { + id: 15.into(), + payload: helpers::ConfirmationPayload::Decrypt( + 10.into(), vec![1, 2, 3].into(), + ), + }; + + // when + let res = serde_json::to_string(&ConfirmationRequest::from(request)); + let expected = r#"{"id":"0xf","payload":{"decrypt":{"address":"0x000000000000000000000000000000000000000a","msg":"0x010203"}}}"#; // then assert_eq!(res.unwrap(), expected.to_owned()); diff --git a/signer/Cargo.toml b/signer/Cargo.toml index 2a3742ec8..4b56ec474 100644 --- a/signer/Cargo.toml +++ b/signer/Cargo.toml @@ -23,7 +23,7 @@ ethcore-rpc = { path = "../rpc" } ethcore-devtools = { path = "../devtools" } parity-ui = { path = "../dapps/ui", version = "1.4", optional = true } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} [features] dev = ["clippy"] diff --git a/signer/src/authcode_store.rs b/signer/src/authcode_store.rs index cbb78db41..55c0fcb15 100644 --- a/signer/src/authcode_store.rs +++ b/signer/src/authcode_store.rs @@ -80,6 +80,7 @@ pub struct AuthCodes { impl AuthCodes { /// Reads `AuthCodes` from file and creates new instance using `DefaultTimeProvider`. + #[cfg_attr(feature="dev", allow(single_char_pattern))] pub fn from_file(file: &Path) -> io::Result { let content = { if let Ok(mut file) = fs::File::open(file) { @@ -128,7 +129,7 @@ impl AuthCodes { let mut file = try!(fs::File::create(file)); let content = self.codes.iter().map(|code| { let mut data = vec![code.code.clone(), encode_time(code.created_at.clone())]; - if let Some(used_at) = code.last_used_at.clone() { + if let Some(used_at) = code.last_used_at { data.push(encode_time(used_at)); } data.join(SEPARATOR) diff --git a/signer/src/ws_server/session.rs b/signer/src/ws_server/session.rs index 5adc3fa80..13d253587 100644 --- a/signer/src/ws_server/session.rs +++ b/signer/src/ws_server/session.rs @@ -99,7 +99,7 @@ fn auth_is_valid(codes_path: &Path, protocols: ws::Result>) -> bool { let res = codes.is_valid(&auth, time); // make sure to save back authcodes - it might have been modified - if let Err(_) = codes.to_file(codes_path) { + if codes.to_file(codes_path).is_err() { warn!(target: "signer", "Couldn't save authorization codes to file."); } res diff --git a/sync/Cargo.toml b/sync/Cargo.toml index c7e30d6a5..738f5f55c 100644 --- a/sync/Cargo.toml +++ b/sync/Cargo.toml @@ -17,7 +17,7 @@ ethcore-network = { path = "../util/network" } ethcore-io = { path = "../util/io" } ethcore = { path = "../ethcore" } rlp = { path = "../util/rlp" } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} log = "0.3" env_logger = "0.3" time = "0.1.34" diff --git a/sync/src/chain.rs b/sync/src/chain.rs index bfc5f7156..bd312c9ee 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -625,7 +625,7 @@ impl ChainSync { Ok(()) } - #[cfg_attr(feature="dev", allow(cyclomatic_complexity))] + #[cfg_attr(feature="dev", allow(cyclomatic_complexity, needless_borrow))] /// Called by peer once it has new block headers during sync fn on_peer_block_headers(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> { let confirmed = match self.peers.get_mut(&peer_id) { @@ -1174,7 +1174,7 @@ impl ChainSync { } }, SyncState::SnapshotData => { - if let RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } = io.snapshot_service().status() { + if let RestorationStatus::Ongoing { state_chunks_done, block_chunks_done, .. } = io.snapshot_service().status() { if self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize > MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD { trace!(target: "sync", "Snapshot queue full, pausing sync"); self.state = SyncState::SnapshotWaiting; @@ -1426,7 +1426,10 @@ impl ChainSync { packet.append(&chain.best_block_hash); packet.append(&chain.genesis_hash); if warp_protocol { - let manifest = io.snapshot_service().manifest(); + let manifest = match self.old_blocks.is_some() { + true => None, + false => io.snapshot_service().manifest(), + }; let block_number = manifest.as_ref().map_or(0, |m| m.block_number); let manifest_hash = manifest.map_or(H256::new(), |m| m.into_rlp().sha3()); packet.append(&manifest_hash); @@ -1745,7 +1748,7 @@ impl ChainSync { self.restart(io); self.continue_sync(io); }, - RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } => { + RestorationStatus::Ongoing { state_chunks_done, block_chunks_done, .. } => { if !self.snapshot.is_complete() && self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize <= MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD { trace!(target:"sync", "Resuming snapshot sync"); self.state = SyncState::SnapshotData; @@ -1999,12 +2002,16 @@ impl ChainSync { #[cfg(test)] mod tests { + use std::collections::{HashSet, VecDeque}; use tests::helpers::*; use tests::snapshot::TestSnapshotService; + use util::{U256, RwLock}; + use util::sha3::Hashable; + use util::hash::{H256, FixedHash}; + use util::bytes::Bytes; + use rlp::{Rlp, RlpStream, UntrustedRlp, View, Stream}; use super::*; use ::SyncConfig; - use util::*; - use rlp::*; use super::{PeerInfo, PeerAsking}; use ethcore::views::BlockView; use ethcore::header::*; diff --git a/sync/src/tests/helpers.rs b/sync/src/tests/helpers.rs index b1c04f84e..10c1277a6 100644 --- a/sync/src/tests/helpers.rs +++ b/sync/src/tests/helpers.rs @@ -158,19 +158,19 @@ impl TestNet { } pub fn peer(&self, i: usize) -> &TestPeer { - self.peers.get(i).unwrap() + &self.peers[i] } pub fn peer_mut(&mut self, i: usize) -> &mut TestPeer { - self.peers.get_mut(i).unwrap() + &mut self.peers[i] } pub fn start(&mut self) { for peer in 0..self.peers.len() { for client in 0..self.peers.len() { if peer != client { - let mut p = self.peers.get_mut(peer).unwrap(); - p.sync.write().update_targets(&mut p.chain); + let mut p = &mut self.peers[peer]; + p.sync.write().update_targets(&p.chain); p.sync.write().on_peer_connected(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId)), client as PeerId); } } @@ -181,7 +181,7 @@ impl TestNet { for peer in 0..self.peers.len() { if let Some(packet) = self.peers[peer].queue.pop_front() { let disconnecting = { - let mut p = self.peers.get_mut(packet.recipient).unwrap(); + let mut p = &mut self.peers[packet.recipient]; trace!("--- {} -> {} ---", peer, packet.recipient); let to_disconnect = { let mut io = TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(peer as PeerId)); @@ -198,7 +198,7 @@ impl TestNet { }; for d in &disconnecting { // notify other peers that this peer is disconnecting - let mut p = self.peers.get_mut(*d).unwrap(); + let mut p = &mut self.peers[*d]; let mut io = TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(peer as PeerId)); p.sync.write().on_peer_aborting(&mut io, peer as PeerId); } diff --git a/test.sh b/test.sh index 44bffa7d9..3e9074478 100755 --- a/test.sh +++ b/test.sh @@ -19,5 +19,5 @@ case $1 in esac . ./scripts/targets.sh -cargo test $OPTIONS --features "$FEATURES" $TARGETS $1 \ +cargo test -j 8 $OPTIONS --features "$FEATURES" $TARGETS $1 \ diff --git a/util/Cargo.toml b/util/Cargo.toml index cc342eeca..1a439ebf8 100644 --- a/util/Cargo.toml +++ b/util/Cargo.toml @@ -23,7 +23,7 @@ rlp = { path = "rlp" } heapsize = { version = "0.3", features = ["unstable"] } itertools = "0.4" sha3 = { path = "sha3" } -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} ethcore-devtools = { path = "../devtools" } libc = "0.2.7" vergen = "0.1" diff --git a/util/network/Cargo.toml b/util/network/Cargo.toml index fe1938305..37ff825b0 100644 --- a/util/network/Cargo.toml +++ b/util/network/Cargo.toml @@ -15,7 +15,7 @@ time = "0.1.34" tiny-keccak = "1.0" rust-crypto = "0.2.34" slab = "0.2" -clippy = { version = "0.0.96", optional = true} +clippy = { version = "0.0.103", optional = true} igd = "0.5.0" libc = "0.2.7" parking_lot = "0.3" diff --git a/util/network/src/connection.rs b/util/network/src/connection.rs index 05976b5e6..ea0763b09 100644 --- a/util/network/src/connection.rs +++ b/util/network/src/connection.rs @@ -507,7 +507,7 @@ mod tests { use std::io::{Read, Write, Error, Cursor, ErrorKind}; use mio::{Ready}; use std::collections::VecDeque; - use util::bytes::*; + use util::bytes::Bytes; use devtools::*; use io::*; diff --git a/util/network/src/discovery.rs b/util/network/src/discovery.rs index 18bd858eb..996578af5 100644 --- a/util/network/src/discovery.rs +++ b/util/network/src/discovery.rs @@ -555,10 +555,11 @@ impl Discovery { #[cfg(test)] mod tests { use super::*; - use util::hash::*; - use util::sha3::*; - use std::net::*; - use node_table::*; + use std::net::{SocketAddr}; + use util::sha3::Hashable; + use util::FixedHash; + use node_table::{Node, NodeId, NodeEndpoint}; + use std::str::FromStr; use rustc_serialize::hex::FromHex; use ethkey::{Random, Generator}; diff --git a/util/network/src/handshake.rs b/util/network/src/handshake.rs index d7950b383..0eeda3b54 100644 --- a/util/network/src/handshake.rs +++ b/util/network/src/handshake.rs @@ -333,7 +333,7 @@ mod test { use std::sync::Arc; use rustc_serialize::hex::FromHex; use super::*; - use util::hash::*; + use util::hash::{H256, FixedHash}; use io::*; use mio::tcp::TcpStream; use stats::NetworkStats; diff --git a/util/network/src/node_table.rs b/util/network/src/node_table.rs index be70bd9a1..ab7d17fa9 100644 --- a/util/network/src/node_table.rs +++ b/util/network/src/node_table.rs @@ -357,9 +357,9 @@ pub fn is_valid_node_url(url: &str) -> bool { #[cfg(test)] mod tests { use super::*; + use std::net::{SocketAddr, SocketAddrV4, Ipv4Addr}; + use util::H512; use std::str::FromStr; - use std::net::*; - use util::hash::*; use devtools::*; use AllowIP; diff --git a/util/src/common.rs b/util/src/common.rs index ea2a0f5ea..681b0baef 100644 --- a/util/src/common.rs +++ b/util/src/common.rs @@ -33,6 +33,13 @@ macro_rules! vec_into { } } +#[macro_export] +macro_rules! slice_into { + ( $( $x:expr ),* ) => { + &[ $( $x.into() ),* ] + } +} + #[macro_export] macro_rules! hash_map { () => { HashMap::new() }; diff --git a/util/src/hashdb.rs b/util/src/hashdb.rs index 671b32ed5..092d40d8a 100644 --- a/util/src/hashdb.rs +++ b/util/src/hashdb.rs @@ -107,21 +107,6 @@ pub trait HashDB: AsHashDB + Send + Sync { /// } /// ``` fn remove(&mut self, key: &H256); - - /// Insert auxiliary data into hashdb. - fn insert_aux(&mut self, _hash: Vec, _value: Vec) { - unimplemented!(); - } - - /// Get auxiliary data from hashdb. - fn get_aux(&self, _hash: &[u8]) -> Option { - unimplemented!(); - } - - /// Removes auxiliary data from hashdb. - fn remove_aux(&mut self, _hash: &[u8]) { - unimplemented!(); - } } /// Upcast trait. diff --git a/util/src/journaldb/archivedb.rs b/util/src/journaldb/archivedb.rs index a8800045b..fb087d7b2 100644 --- a/util/src/journaldb/archivedb.rs +++ b/util/src/journaldb/archivedb.rs @@ -26,10 +26,6 @@ use kvdb::{Database, DBTransaction}; #[cfg(test)] use std::env; -/// Suffix appended to auxiliary keys to distinguish them from normal keys. -/// Would be nich to use rocksdb columns for this eventually. -const AUX_FLAG: u8 = 255; - /// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay /// and latent-removal semantics. /// @@ -108,26 +104,6 @@ impl HashDB for ArchiveDB { fn remove(&mut self, key: &H256) { self.overlay.remove(key); } - - fn insert_aux(&mut self, hash: Vec, value: Vec) { - self.overlay.insert_aux(hash, value); - } - - fn get_aux(&self, hash: &[u8]) -> Option { - if let Some(res) = self.overlay.get_aux(hash) { - return Some(res) - } - - let mut db_hash = hash.to_vec(); - db_hash.push(AUX_FLAG); - - self.backing.get(self.column, &db_hash) - .expect("Low-level database error. Some issue with your hard disk?") - } - - fn remove_aux(&mut self, hash: &[u8]) { - self.overlay.remove_aux(hash); - } } impl JournalDB for ArchiveDB { @@ -164,11 +140,6 @@ impl JournalDB for ArchiveDB { } } - for (mut key, value) in self.overlay.drain_aux() { - key.push(AUX_FLAG); - batch.put(self.column, &key, &value); - } - if self.latest_era.map_or(true, |e| now > e) { batch.put(self.column, &LATEST_ERA_KEY, &encode(&now)); self.latest_era = Some(now); @@ -204,11 +175,6 @@ impl JournalDB for ArchiveDB { } } - for (mut key, value) in self.overlay.drain_aux() { - key.push(AUX_FLAG); - batch.put(self.column, &key, &value); - } - Ok((inserts + deletes) as u32) } @@ -235,8 +201,8 @@ mod tests { #![cfg_attr(feature="dev", allow(similar_names))] use common::*; + use hashdb::{HashDB, DBValue}; use super::*; - use hashdb::*; use journaldb::traits::JournalDB; use kvdb::Database; diff --git a/util/src/journaldb/earlymergedb.rs b/util/src/journaldb/earlymergedb.rs index d17c0ef1e..60263a2cd 100644 --- a/util/src/journaldb/earlymergedb.rs +++ b/util/src/journaldb/earlymergedb.rs @@ -554,9 +554,9 @@ mod tests { #![cfg_attr(feature="dev", allow(similar_names))] use common::*; + use hashdb::{HashDB, DBValue}; use super::*; use super::super::traits::JournalDB; - use hashdb::*; use log::init_log; use kvdb::{Database, DatabaseConfig}; diff --git a/util/src/journaldb/overlayrecentdb.rs b/util/src/journaldb/overlayrecentdb.rs index 42fe84557..34f942a0b 100644 --- a/util/src/journaldb/overlayrecentdb.rs +++ b/util/src/journaldb/overlayrecentdb.rs @@ -422,7 +422,7 @@ mod tests { use common::*; use super::*; - use hashdb::*; + use hashdb::{HashDB, DBValue}; use log::init_log; use journaldb::JournalDB; use kvdb::Database; diff --git a/util/src/journaldb/refcounteddb.rs b/util/src/journaldb/refcounteddb.rs index d63f8837d..82261965c 100644 --- a/util/src/journaldb/refcounteddb.rs +++ b/util/src/journaldb/refcounteddb.rs @@ -215,9 +215,9 @@ mod tests { #![cfg_attr(feature="dev", allow(similar_names))] use common::*; + use hashdb::{HashDB, DBValue}; use super::*; use super::super::traits::JournalDB; - use hashdb::*; #[test] fn long_history() { diff --git a/util/src/kvdb.rs b/util/src/kvdb.rs index 8be22e0aa..de3b033ec 100644 --- a/util/src/kvdb.rs +++ b/util/src/kvdb.rs @@ -628,7 +628,7 @@ impl Drop for Database { #[cfg(test)] mod tests { - use hash::*; + use hash::H256; use super::*; use devtools::*; use std::str::FromStr; diff --git a/util/src/memorydb.rs b/util/src/memorydb.rs index 338f12b1e..20dd3a41f 100644 --- a/util/src/memorydb.rs +++ b/util/src/memorydb.rs @@ -17,7 +17,6 @@ //! Reference-counted memory-based `HashDB` implementation. use hash::*; -use bytes::*; use rlp::*; use sha3::*; use hashdb::*; @@ -72,7 +71,6 @@ use std::collections::hash_map::Entry; #[derive(Default, Clone, PartialEq)] pub struct MemoryDB { data: H256FastMap<(DBValue, i32)>, - aux: HashMap, } impl MemoryDB { @@ -80,7 +78,6 @@ impl MemoryDB { pub fn new() -> MemoryDB { MemoryDB { data: H256FastMap::default(), - aux: HashMap::new(), } } @@ -118,11 +115,6 @@ impl MemoryDB { mem::replace(&mut self.data, H256FastMap::default()) } - /// Return the internal map of auxiliary data, clearing the current state. - pub fn drain_aux(&mut self) -> HashMap { - mem::replace(&mut self.aux, HashMap::new()) - } - /// Grab the raw information associated with a key. Returns None if the key /// doesn't exist. /// @@ -138,7 +130,6 @@ impl MemoryDB { /// Returns the size of allocated heap memory pub fn mem_used(&self) -> usize { self.data.heap_size_of_children() - + self.aux.heap_size_of_children() } /// Remove an element and delete it from storage if reference count reaches zero. @@ -256,18 +247,6 @@ impl HashDB for MemoryDB { self.data.insert(key.clone(), (DBValue::new(), -1)); } } - - fn insert_aux(&mut self, hash: Vec, value: Vec) { - self.aux.insert(hash, DBValue::from_vec(value)); - } - - fn get_aux(&self, hash: &[u8]) -> Option { - self.aux.get(hash).cloned() - } - - fn remove_aux(&mut self, hash: &[u8]) { - self.aux.remove(hash); - } } #[test] diff --git a/util/src/stats.rs b/util/src/stats.rs index a106ba29c..2a950ff4f 100644 --- a/util/src/stats.rs +++ b/util/src/stats.rs @@ -67,7 +67,7 @@ mod tests { #[test] fn check_histogram() { - let hist = Histogram::new(&vec_into![643,689,1408,2000,2296,2512,4250,4320,4842,4958,5804,6065,6098,6354,7002,7145,7845,8589,8593,8895], 5).unwrap(); + let hist = Histogram::new(slice_into![643,689,1408,2000,2296,2512,4250,4320,4842,4958,5804,6065,6098,6354,7002,7145,7845,8589,8593,8895], 5).unwrap(); let correct_bounds: Vec = vec_into![643, 2294, 3945, 5596, 7247, 8898]; assert_eq!(Histogram { bucket_bounds: correct_bounds, counts: vec![4,2,4,6,4] }, hist); } @@ -75,7 +75,7 @@ mod tests { #[test] fn smaller_data_range_than_bucket_range() { assert_eq!( - Histogram::new(&vec_into![1, 2, 2], 3), + Histogram::new(slice_into![1, 2, 2], 3), Some(Histogram { bucket_bounds: vec_into![1, 2, 3, 4], counts: vec![1, 2, 0] }) ); } @@ -83,7 +83,7 @@ mod tests { #[test] fn data_range_is_not_multiple_of_bucket_range() { assert_eq!( - Histogram::new(&vec_into![1, 2, 5], 2), + Histogram::new(slice_into![1, 2, 5], 2), Some(Histogram { bucket_bounds: vec_into![1, 4, 7], counts: vec![2, 1] }) ); } @@ -91,13 +91,13 @@ mod tests { #[test] fn data_range_is_multiple_of_bucket_range() { assert_eq!( - Histogram::new(&vec_into![1, 2, 6], 2), + Histogram::new(slice_into![1, 2, 6], 2), Some(Histogram { bucket_bounds: vec_into![1, 4, 7], counts: vec![2, 1] }) ); } #[test] fn none_when_too_few_data() { - assert!(Histogram::new(&vec_into![], 1).is_none()); + assert!(Histogram::new(slice_into![], 1).is_none()); } } diff --git a/util/src/trie/fatdb.rs b/util/src/trie/fatdb.rs index 700156429..ca3f4ca79 100644 --- a/util/src/trie/fatdb.rs +++ b/util/src/trie/fatdb.rs @@ -17,7 +17,7 @@ use hash::H256; use sha3::Hashable; use hashdb::{HashDB, DBValue}; -use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder}; +use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder, TrieIterator}; /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// Additionaly it stores inserted hash-key mappings for later retrieval. @@ -46,7 +46,7 @@ impl<'db> FatDB<'db> { } impl<'db> Trie for FatDB<'db> { - fn iter<'a>(&'a self) -> super::Result + 'a>> { + fn iter<'a>(&'a self) -> super::Result + 'a>> { FatDBIterator::new(&self.raw).map(|iter| Box::new(iter) as Box<_>) } @@ -81,6 +81,12 @@ impl<'db> FatDBIterator<'db> { } } +impl<'db> TrieIterator for FatDBIterator<'db> { + fn seek(&mut self, key: &[u8]) -> super::Result<()> { + self.trie_iterator.seek(&key.sha3()) + } +} + impl<'db> Iterator for FatDBIterator<'db> { type Item = TrieItem<'db>; @@ -88,7 +94,8 @@ impl<'db> Iterator for FatDBIterator<'db> { self.trie_iterator.next() .map(|res| res.map(|(hash, value)| { - (self.trie.db().get_aux(&hash).expect("Missing fatdb hash").to_vec(), value) + let aux_hash = hash.sha3(); + (self.trie.db().get(&aux_hash).expect("Missing fatdb hash").to_vec(), value) }) ) } diff --git a/util/src/trie/fatdbmut.rs b/util/src/trie/fatdbmut.rs index fa1c168e8..c81c62f71 100644 --- a/util/src/trie/fatdbmut.rs +++ b/util/src/trie/fatdbmut.rs @@ -51,6 +51,10 @@ impl<'db> FatDBMut<'db> { pub fn db_mut(&mut self) -> &mut HashDB { self.raw.db_mut() } + + fn to_aux_key(key: &[u8]) -> H256 { + key.sha3() + } } impl<'db> TrieMut for FatDBMut<'db> { @@ -76,12 +80,14 @@ impl<'db> TrieMut for FatDBMut<'db> { let hash = key.sha3(); try!(self.raw.insert(&hash, value)); let db = self.raw.db_mut(); - db.insert_aux(hash.to_vec(), key.to_vec()); + db.emplace(Self::to_aux_key(&hash), DBValue::from_slice(key)); Ok(()) } fn remove(&mut self, key: &[u8]) -> super::Result<()> { - self.raw.remove(&key.sha3()) + let hash = key.sha3(); + self.raw.db_mut().remove(&Self::to_aux_key(&hash)); + self.raw.remove(&hash) } } diff --git a/util/src/trie/mod.rs b/util/src/trie/mod.rs index d4cc04962..9c4284b89 100644 --- a/util/src/trie/mod.rs +++ b/util/src/trie/mod.rs @@ -102,7 +102,7 @@ pub trait Trie { where 'a: 'b, R: Recorder; /// Returns an iterator over elements of trie. - fn iter<'a>(&'a self) -> Result + 'a>>; + fn iter<'a>(&'a self) -> Result + 'a>>; } /// A key-value datastore implemented as a database-backed modified Merkle tree. @@ -130,6 +130,12 @@ pub trait TrieMut { fn remove(&mut self, key: &[u8]) -> Result<()>; } +/// A trie iterator that also supports random access. +pub trait TrieIterator : Iterator { + /// Position the iterator on the first element with key > `key` + fn seek(&mut self, key: &[u8]) -> Result<()>; +} + /// Trie types #[derive(Debug, PartialEq, Clone)] pub enum TrieSpec { @@ -193,7 +199,7 @@ impl<'db> Trie for TrieKinds<'db> { wrapper!(self, get_recorded, key, r) } - fn iter<'a>(&'a self) -> Result + 'a>> { + fn iter<'a>(&'a self) -> Result + 'a>> { wrapper!(self, iter,) } } diff --git a/util/src/trie/sectriedb.rs b/util/src/trie/sectriedb.rs index b1d7bbc0c..0861f53f3 100644 --- a/util/src/trie/sectriedb.rs +++ b/util/src/trie/sectriedb.rs @@ -18,7 +18,7 @@ use hash::H256; use sha3::Hashable; use hashdb::{HashDB, DBValue}; use super::triedb::TrieDB; -use super::{Trie, TrieItem, Recorder}; +use super::{Trie, TrieItem, Recorder, TrieIterator}; /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// @@ -49,7 +49,7 @@ impl<'db> SecTrieDB<'db> { } impl<'db> Trie for SecTrieDB<'db> { - fn iter<'a>(&'a self) -> super::Result + 'a>> { + fn iter<'a>(&'a self) -> super::Result + 'a>> { TrieDB::iter(&self.raw) } diff --git a/util/src/trie/triedb.rs b/util/src/trie/triedb.rs index d929c9d68..ecd8bdded 100644 --- a/util/src/trie/triedb.rs +++ b/util/src/trie/triedb.rs @@ -20,7 +20,7 @@ use nibbleslice::*; use rlp::*; use super::node::Node; use super::recorder::{Recorder, NoOp}; -use super::{Trie, TrieItem, TrieError}; +use super::{Trie, TrieItem, TrieError, TrieIterator}; /// A `Trie` implementation using a generic `HashDB` backing database. /// @@ -295,6 +295,64 @@ impl<'a> TrieDBIterator<'a> { Ok(r) } + fn seek_descend<'key> ( &mut self, node: &[u8], key: &NibbleSlice<'key>, d: u32) -> super::Result<()> { + match Node::decoded(node) { + Node::Leaf(ref slice, _) => { + let slice = &NibbleSlice::from_encoded(slice).0; + if slice == key { + self.trail.push(Crumb { + status: Status::At, + node: Node::decoded(node), + }); + } else { + self.trail.push(Crumb { + status: Status::Exiting, + node: Node::decoded(node), + }); + } + self.key_nibbles.extend(slice.iter()); + Ok(()) + }, + Node::Extension(ref slice, ref item) => { + let slice = &NibbleSlice::from_encoded(slice).0; + if key.starts_with(slice) { + let mut r = NoOp; + self.trail.push(Crumb { + status: Status::At, + node: Node::decoded(node), + }); + self.key_nibbles.extend(slice.iter()); + let data = try!(self.db.get_raw_or_lookup(&*item, &mut r, d)); + self.seek_descend(&data, &key.mid(slice.len()), d + 1) + } else { + try!(self.descend(node)); + Ok(()) + } + }, + Node::Branch(ref nodes, _) => match key.is_empty() { + true => { + self.trail.push(Crumb { + status: Status::At, + node: Node::decoded(node), + }); + Ok(()) + }, + false => { + let mut r = NoOp; + let i = key.at(0); + self.trail.push(Crumb { + status: Status::AtChild(i as usize), + node: Node::decoded(node), + }); + self.key_nibbles.push(i); + let child = try!(self.db.get_raw_or_lookup(&*nodes[i as usize], &mut r, d)); + self.seek_descend(&child, &key.mid(1), d + 1) + } + }, + _ => Ok(()) + } + } + /// Descend into a payload. fn descend(&mut self, d: &[u8]) -> super::Result<()> { self.trail.push(Crumb { @@ -316,6 +374,17 @@ impl<'a> TrieDBIterator<'a> { } } +impl<'a> TrieIterator for TrieDBIterator<'a> { + /// Position the iterator on the first element with key >= `key` + fn seek(&mut self, key: &[u8]) -> super::Result<()> { + self.trail.clear(); + self.key_nibbles.clear(); + let mut r = NoOp; + let root_rlp = try!(self.db.root_data(&mut r)); + self.seek_descend(&root_rlp, &NibbleSlice::new(key), 1) + } +} + impl<'a> Iterator for TrieDBIterator<'a> { type Item = TrieItem<'a>; @@ -372,7 +441,7 @@ impl<'a> Iterator for TrieDBIterator<'a> { } impl<'db> Trie for TrieDB<'db> { - fn iter<'a>(&'a self) -> super::Result + 'a>> { + fn iter<'a>(&'a self) -> super::Result + 'a>> { TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>) } @@ -415,3 +484,48 @@ fn iterator() { assert_eq!(d.iter().map(|i| i.clone().to_vec()).collect::>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::>()); assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::>()); } + +#[test] +fn iterator_seek() { + use memorydb::*; + use super::TrieMut; + use super::triedbmut::*; + + let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ]; + + let mut memdb = MemoryDB::new(); + let mut root = H256::new(); + { + let mut t = TrieDBMut::new(&mut memdb, &mut root); + for x in &d { + t.insert(x, x).unwrap(); + } + } + + let t = TrieDB::new(&memdb, &root).unwrap(); + let mut iter = t.iter().unwrap(); + assert_eq!(iter.next(), Some(Ok((b"A".to_vec(), DBValue::from_slice(b"A"))))); + iter.seek(b"!").unwrap(); + assert_eq!(d, iter.map(|x| x.unwrap().1).collect::>()); + let mut iter = t.iter().unwrap(); + iter.seek(b"A").unwrap(); + assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"AA").unwrap(); + assert_eq!(&d[2..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"A!").unwrap(); + assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"AB").unwrap(); + assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"AB!").unwrap(); + assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"B").unwrap(); + assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::>()[..]); + let mut iter = t.iter().unwrap(); + iter.seek(b"C").unwrap(); + assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::>()[..]); +}