diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7b05c1e01..2833a46c3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -38,7 +38,8 @@ linux-stable: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5" - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu tags: - rust - rust-stable @@ -106,7 +107,8 @@ linux-centos: - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5 - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-centos @@ -144,7 +146,8 @@ linux-i686: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5" - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-i686 @@ -189,7 +192,8 @@ linux-armv7: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -235,6 +239,7 @@ linux-arm: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -272,7 +277,8 @@ linux-armv6: - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -316,7 +322,8 @@ linux-aarch64: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -352,7 +359,8 @@ darwin: - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5" - - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - osx artifacts: @@ -413,7 +421,8 @@ windows: - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5 - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5 - - curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://icarus.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM% + - curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM% + - curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1338/push-build/%CI_BUILD_REF_NAME%/%PLATFORM% tags: - rust-windows artifacts: @@ -526,6 +535,7 @@ push-release: - triggers image: ethcore/rust:stable script: - - curl --data "secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF + - curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF + - curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF tags: - curl diff --git a/Cargo.lock b/Cargo.lock index 86f7cef57..923418a2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1501,7 +1501,7 @@ dependencies = [ [[package]] name = "parity-ui-precompiled" version = "1.4.0" -source = "git+https://github.com/ethcore/js-precompiled.git#a74caf6d8fe4b3371b291fb47f15c043504ef738" +source = "git+https://github.com/ethcore/js-precompiled.git#fbc7864393ebbc78ea8f7bc4729f2ac3bdcb9a0e" dependencies = [ "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/dapps/src/apps/fs.rs b/dapps/src/apps/fs.rs index 12bad2e3d..9984a112a 100644 --- a/dapps/src/apps/fs.rs +++ b/dapps/src/apps/fs.rs @@ -17,7 +17,7 @@ use std::io; use std::io::Read; use std::fs; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use page::{LocalPageEndpoint, PageCache}; use endpoint::{Endpoints, EndpointInfo}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest}; @@ -28,10 +28,79 @@ struct LocalDapp { info: EndpointInfo, } -fn local_dapps(dapps_path: String) -> Vec { - let files = fs::read_dir(dapps_path.as_str()); +/// Tries to find and read manifest file in given `path` to extract `EndpointInfo` +/// If manifest is not found sensible default `EndpointInfo` is returned based on given `name`. +fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo { + path.push(MANIFEST_FILENAME); + + fs::File::open(path.clone()) + .map_err(|e| format!("{:?}", e)) + .and_then(|mut f| { + // Reat file + let mut s = String::new(); + f.read_to_string(&mut s).map_err(|e| format!("{:?}", e))?; + // Try to deserialize manifest + deserialize_manifest(s) + }) + .map(Into::into) + .unwrap_or_else(|e| { + warn!(target: "dapps", "Cannot read manifest file at: {:?}. Error: {:?}", path, e); + + EndpointInfo { + name: name.into(), + description: name.into(), + version: "0.0.0".into(), + author: "?".into(), + icon_url: "icon.png".into(), + } + }) +} + +/// Returns Dapp Id and Local Dapp Endpoint for given filesystem path. +/// Parses the path to extract last component (for name). +/// `None` is returned when path is invalid or non-existent. +pub fn local_endpoint>(path: P, signer_address: Option<(String, u16)>) -> Option<(String, Box)> { + let path = path.as_ref().to_owned(); + path.canonicalize().ok().and_then(|path| { + let name = path.file_name().and_then(|name| name.to_str()); + name.map(|name| { + let dapp = local_dapp(name.into(), path.clone()); + (dapp.id, Box::new(LocalPageEndpoint::new( + dapp.path, dapp.info, PageCache::Disabled, signer_address.clone()) + )) + }) + }) +} + + +fn local_dapp(name: String, path: PathBuf) -> LocalDapp { + // try to get manifest file + let info = read_manifest(&name, path.clone()); + LocalDapp { + id: name, + path: path, + info: info, + } +} + +/// Returns endpoints for Local Dapps found for given filesystem path. +/// Scans the directory and collects `LocalPageEndpoints`. +pub fn local_endpoints>(dapps_path: P, signer_address: Option<(String, u16)>) -> Endpoints { + let mut pages = Endpoints::new(); + for dapp in local_dapps(dapps_path.as_ref()) { + pages.insert( + dapp.id, + Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, signer_address.clone())) + ); + } + pages +} + + +fn local_dapps(dapps_path: &Path) -> Vec { + let files = fs::read_dir(dapps_path); if let Err(e) = files { - warn!(target: "dapps", "Unable to load local dapps from: {}. Reason: {:?}", dapps_path, e); + warn!(target: "dapps", "Unable to load local dapps from: {}. Reason: {:?}", dapps_path.display(), e); return vec![]; } @@ -59,51 +128,6 @@ fn local_dapps(dapps_path: String) -> Vec { } m.ok() }) - .map(|(name, path)| { - // try to get manifest file - let info = read_manifest(&name, path.clone()); - LocalDapp { - id: name, - path: path, - info: info, - } - }) + .map(|(name, path)| local_dapp(name, path)) .collect() } - -fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo { - path.push(MANIFEST_FILENAME); - - fs::File::open(path.clone()) - .map_err(|e| format!("{:?}", e)) - .and_then(|mut f| { - // Reat file - let mut s = String::new(); - f.read_to_string(&mut s).map_err(|e| format!("{:?}", e))?; - // Try to deserialize manifest - deserialize_manifest(s) - }) - .map(Into::into) - .unwrap_or_else(|e| { - warn!(target: "dapps", "Cannot read manifest file at: {:?}. Error: {:?}", path, e); - - EndpointInfo { - name: name.into(), - description: name.into(), - version: "0.0.0".into(), - author: "?".into(), - icon_url: "icon.png".into(), - } - }) -} - -pub fn local_endpoints(dapps_path: String, signer_address: Option<(String, u16)>) -> Endpoints { - let mut pages = Endpoints::new(); - for dapp in local_dapps(dapps_path) { - pages.insert( - dapp.id, - Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, signer_address.clone())) - ); - } - pages -} diff --git a/dapps/src/apps/mod.rs b/dapps/src/apps/mod.rs index 51f8f5572..f32cf9042 100644 --- a/dapps/src/apps/mod.rs +++ b/dapps/src/apps/mod.rs @@ -14,6 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . +use std::path::PathBuf; use std::sync::Arc; use endpoint::{Endpoints, Endpoint}; use page::PageEndpoint; @@ -43,7 +44,8 @@ pub fn utils() -> Box { } pub fn all_endpoints( - dapps_path: String, + dapps_path: PathBuf, + extra_dapps: Vec, signer_address: Option<(String, u16)>, web_proxy_tokens: Arc, remote: Remote, @@ -51,6 +53,13 @@ pub fn all_endpoints( ) -> Endpoints { // fetch fs dapps at first to avoid overwriting builtins let mut pages = fs::local_endpoints(dapps_path, signer_address.clone()); + for path in extra_dapps { + if let Some((id, endpoint)) = fs::local_endpoint(path.clone(), signer_address.clone()) { + pages.insert(id, endpoint); + } else { + warn!(target: "dapps", "Ignoring invalid dapp at {}", path.display()); + } + } // NOTE [ToDr] Dapps will be currently embeded on 8180 insert::(&mut pages, "ui", Embeddable::Yes(signer_address.clone())); diff --git a/dapps/src/lib.rs b/dapps/src/lib.rs index effa29fcc..64ee0c341 100644 --- a/dapps/src/lib.rs +++ b/dapps/src/lib.rs @@ -88,6 +88,7 @@ mod web; #[cfg(test)] mod tests; +use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; use std::net::SocketAddr; use std::collections::HashMap; @@ -123,7 +124,8 @@ impl WebProxyTokens for F where F: Fn(String) -> bool + Send + Sync { /// Webapps HTTP+RPC server build. pub struct ServerBuilder { - dapps_path: String, + dapps_path: PathBuf, + extra_dapps: Vec, handler: Arc, registrar: Arc, sync_status: Arc, @@ -141,9 +143,10 @@ impl Extendable for ServerBuilder { impl ServerBuilder { /// Construct new dapps server - pub fn new(dapps_path: String, registrar: Arc, remote: Remote) -> Self { + pub fn new>(dapps_path: P, registrar: Arc, remote: Remote) -> Self { ServerBuilder { - dapps_path: dapps_path, + dapps_path: dapps_path.as_ref().to_owned(), + extra_dapps: vec![], handler: Arc::new(IoHandler::new()), registrar: registrar, sync_status: Arc::new(|| false), @@ -160,6 +163,7 @@ impl ServerBuilder { pub fn fetch(self, fetch: X) -> ServerBuilder { ServerBuilder { dapps_path: self.dapps_path, + extra_dapps: vec![], handler: self.handler, registrar: self.registrar, sync_status: self.sync_status, @@ -188,6 +192,12 @@ impl ServerBuilder { self } + /// Change extra dapps paths (apart from `dapps_path`) + pub fn extra_dapps>(mut self, extra_dapps: &[P]) -> Self { + self.extra_dapps = extra_dapps.iter().map(|p| p.as_ref().to_owned()).collect(); + self + } + /// Asynchronously start server with no authentication, /// returns result with `Server` handle on success or an error. pub fn start_unsecured_http(self, addr: &SocketAddr, hosts: Option>) -> Result { @@ -197,6 +207,7 @@ impl ServerBuilder { NoAuth, self.handler.clone(), self.dapps_path.clone(), + self.extra_dapps.clone(), self.signer_address.clone(), self.registrar.clone(), self.sync_status.clone(), @@ -215,6 +226,7 @@ impl ServerBuilder { HttpBasicAuth::single_user(username, password), self.handler.clone(), self.dapps_path.clone(), + self.extra_dapps.clone(), self.signer_address.clone(), self.registrar.clone(), self.sync_status.clone(), @@ -270,7 +282,8 @@ impl Server { hosts: Option>, authorization: A, handler: Arc, - dapps_path: String, + dapps_path: PathBuf, + extra_dapps: Vec, signer_address: Option<(String, u16)>, registrar: Arc, sync_status: Arc, @@ -287,7 +300,14 @@ impl Server { remote.clone(), fetch.clone(), )); - let endpoints = Arc::new(apps::all_endpoints(dapps_path, signer_address.clone(), web_proxy_tokens, remote.clone(), fetch.clone())); + let endpoints = Arc::new(apps::all_endpoints( + dapps_path, + extra_dapps, + signer_address.clone(), + web_proxy_tokens, + remote.clone(), + fetch.clone(), + )); let cors_domains = Self::cors_domains(signer_address.clone()); let special = Arc::new({ diff --git a/dapps/src/tests/helpers/mod.rs b/dapps/src/tests/helpers/mod.rs index d3f97b35b..5cc367fcc 100644 --- a/dapps/src/tests/helpers/mod.rs +++ b/dapps/src/tests/helpers/mod.rs @@ -51,7 +51,7 @@ pub fn init_server(hosts: Option>, process: F, remote: Remote) let mut dapps_path = env::temp_dir(); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); let server = process(ServerBuilder::new( - dapps_path.to_str().unwrap().into(), registrar.clone(), remote, + &dapps_path, registrar.clone(), remote, )) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(); @@ -66,7 +66,7 @@ pub fn serve_with_auth(user: &str, pass: &str) -> Server { let registrar = Arc::new(FakeRegistrar::new()); let mut dapps_path = env::temp_dir(); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); - ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone(), Remote::new_sync()) + ServerBuilder::new(&dapps_path, registrar.clone(), Remote::new_sync()) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap() } diff --git a/ethcore/src/state/account.rs b/ethcore/src/state/account.rs index 63e8ff9de..87b52a894 100644 --- a/ethcore/src/state/account.rs +++ b/ethcore/src/state/account.rs @@ -178,8 +178,8 @@ impl Account { SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \ using it will not fail."); - let item: U256 = match db.get(key){ - Ok(x) => x.map_or_else(U256::zero, |v| decode(&*v)), + let item: U256 = match db.get_with(key, ::rlp::decode) { + Ok(x) => x.unwrap_or_else(U256::zero), Err(e) => panic!("Encountered potential DB corruption: {}", e), }; let value: H256 = item.into(); @@ -453,12 +453,12 @@ impl Account { /// omitted. pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result, Box> { use util::trie::{Trie, TrieDB}; - use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder}; + use util::trie::recorder::Recorder; - let mut recorder = TrieRecorder::with_depth(from_level); + let mut recorder = Recorder::with_depth(from_level); let trie = TrieDB::new(db, &self.storage_root)?; - let _ = trie.get_recorded(&storage_key, &mut recorder)?; + let _ = trie.get_with(&storage_key, &mut recorder)?; Ok(recorder.drain().into_iter().map(|r| r.data).collect()) } diff --git a/ethcore/src/state/mod.rs b/ethcore/src/state/mod.rs index c9730c1c3..cfd53053e 100644 --- a/ethcore/src/state/mod.rs +++ b/ethcore/src/state/mod.rs @@ -32,7 +32,7 @@ use state_db::StateDB; use util::*; -use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder}; +use util::trie::recorder::Recorder; mod account; mod substate; @@ -425,8 +425,8 @@ impl State { // account is not found in the global cache, get from the DB and insert into local let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); - let maybe_acc = match db.get(address) { - Ok(acc) => acc.map(|v| Account::from_rlp(&v)), + let maybe_acc = match db.get_with(address, Account::from_rlp) { + Ok(acc) => acc, Err(e) => panic!("Potential DB corruption encountered: {}", e), }; let r = maybe_acc.as_ref().map_or(H256::new(), |a| { @@ -690,8 +690,8 @@ impl State { // not found in the global cache, get from the DB and insert into local let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); - let mut maybe_acc = match db.get(a) { - Ok(acc) => acc.map(|v| Account::from_rlp(&v)), + let mut maybe_acc = match db.get_with(a, Account::from_rlp) { + Ok(acc) => acc, Err(e) => panic!("Potential DB corruption encountered: {}", e), }; if let Some(ref mut account) = maybe_acc.as_mut() { @@ -722,9 +722,8 @@ impl State { None => { let maybe_acc = if self.db.check_non_null_bloom(a) { let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); - match db.get(a) { - Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))), - Ok(None) => AccountEntry::new_clean(None), + match db.get_with(a, Account::from_rlp) { + Ok(acc) => AccountEntry::new_clean(acc), Err(e) => panic!("Potential DB corruption encountered: {}", e), } } else { @@ -770,9 +769,9 @@ impl State { /// Requires a secure trie to be used for accurate results. /// `account_key` == sha3(address) pub fn prove_account(&self, account_key: H256, from_level: u32) -> Result, Box> { - let mut recorder = TrieRecorder::with_depth(from_level); + let mut recorder = Recorder::with_depth(from_level); let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; - let _ = trie.get_recorded(&account_key, &mut recorder)?; + trie.get_with(&account_key, &mut recorder)?; Ok(recorder.drain().into_iter().map(|r| r.data).collect()) } @@ -786,8 +785,8 @@ impl State { // TODO: probably could look into cache somehow but it's keyed by // address, not sha3(address). let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; - let acc = match trie.get(&account_key)? { - Some(rlp) => Account::from_rlp(&rlp), + let acc = match trie.get_with(&account_key, Account::from_rlp)? { + Some(acc) => acc, None => return Ok(Vec::new()), }; @@ -799,8 +798,8 @@ impl State { /// Only works when backed by a secure trie. pub fn code_by_address_hash(&self, account_key: H256) -> Result, Box> { let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; - let mut acc = match trie.get(&account_key)? { - Some(rlp) => Account::from_rlp(&rlp), + let mut acc = match trie.get_with(&account_key, Account::from_rlp)? { + Some(acc) => acc, None => return Ok(None), }; diff --git a/js/package.json b/js/package.json index 13e03f0c7..d5a35ad89 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "parity.js", - "version": "0.2.173", + "version": "0.2.178", "main": "release/index.js", "jsnext:main": "src/index.js", "author": "Parity Team ", diff --git a/js/src/api/contract/contract.js b/js/src/api/contract/contract.js index 70853749d..9c3b02b72 100644 --- a/js/src/api/contract/contract.js +++ b/js/src/api/contract/contract.js @@ -75,6 +75,10 @@ export default class Contract { return this._functions; } + get receipt () { + return this._receipt; + } + get instance () { this._instance.address = this._address; return this._instance; @@ -139,6 +143,7 @@ export default class Contract { } setState({ state: 'hasReceipt', receipt }); + this._receipt = receipt; this._address = receipt.contractAddress; return this._address; }); diff --git a/js/src/modals/CreateAccount/NewAccount/newAccount.js b/js/src/modals/CreateAccount/NewAccount/newAccount.js index c0bcca91a..ed2c24612 100644 --- a/js/src/modals/CreateAccount/NewAccount/newAccount.js +++ b/js/src/modals/CreateAccount/NewAccount/newAccount.js @@ -40,7 +40,7 @@ export default class CreateAccount extends Component { accountNameError: ERRORS.noName, accounts: null, isValidName: false, - isValidPass: false, + isValidPass: true, passwordHint: '', password1: '', password1Error: null, diff --git a/js/src/modals/CreateAccount/NewImport/newImport.js b/js/src/modals/CreateAccount/NewImport/newImport.js index 17a1cfd44..91ef39d95 100644 --- a/js/src/modals/CreateAccount/NewImport/newImport.js +++ b/js/src/modals/CreateAccount/NewImport/newImport.js @@ -37,7 +37,7 @@ export default class NewImport extends Component { accountName: '', accountNameError: ERRORS.noName, isValidFile: false, - isValidPass: false, + isValidPass: true, isValidName: false, password: '', passwordError: null, diff --git a/js/src/modals/CreateAccount/RawKey/rawKey.js b/js/src/modals/CreateAccount/RawKey/rawKey.js index f284cf323..d0b3a4c71 100644 --- a/js/src/modals/CreateAccount/RawKey/rawKey.js +++ b/js/src/modals/CreateAccount/RawKey/rawKey.js @@ -36,7 +36,7 @@ export default class RawKey extends Component { accountNameError: ERRORS.noName, isValidKey: false, isValidName: false, - isValidPass: false, + isValidPass: true, passwordHint: '', password1: '', password1Error: null, @@ -119,8 +119,6 @@ export default class RawKey extends Component { const rawKey = event.target.value; let rawKeyError = null; - console.log(rawKey.length, rawKey); - if (!rawKey || !rawKey.trim().length) { rawKeyError = ERRORS.noKey; } else if (rawKey.substr(0, 2) !== '0x' || rawKey.substr(2).length !== 64 || !api.util.isHex(rawKey)) { diff --git a/js/src/modals/CreateAccount/RecoveryPhrase/recoveryPhrase.js b/js/src/modals/CreateAccount/RecoveryPhrase/recoveryPhrase.js index fd5043024..dc80e27ae 100644 --- a/js/src/modals/CreateAccount/RecoveryPhrase/recoveryPhrase.js +++ b/js/src/modals/CreateAccount/RecoveryPhrase/recoveryPhrase.js @@ -31,9 +31,9 @@ export default class RecoveryPhrase extends Component { state = { accountName: '', accountNameError: ERRORS.noName, - isValidPass: false, + isValidPass: true, isValidName: false, - isValidPhrase: false, + isValidPhrase: true, passwordHint: '', password1: '', password1Error: null, diff --git a/js/src/modals/CreateAccount/createAccount.js b/js/src/modals/CreateAccount/createAccount.js index 569d374cc..53be1f918 100644 --- a/js/src/modals/CreateAccount/createAccount.js +++ b/js/src/modals/CreateAccount/createAccount.js @@ -240,6 +240,7 @@ export default class CreateAccount extends Component { if (createType === 'fromNew' || createType === 'fromPhrase') { let phrase = this.state.phrase; + if (createType === 'fromPhrase' && windowsPhrase) { phrase = phrase .split(' ') // get the words @@ -271,7 +272,9 @@ export default class CreateAccount extends Component { this.newError(error); }); - } else if (createType === 'fromRaw') { + } + + if (createType === 'fromRaw') { return api.parity .newAccountFromSecret(this.state.rawKey, this.state.password) .then((address) => { @@ -296,7 +299,9 @@ export default class CreateAccount extends Component { this.newError(error); }); - } else if (createType === 'fromGeth') { + } + + if (createType === 'fromGeth') { return api.parity .importGethAccounts(this.state.gethAddresses) .then((result) => { diff --git a/js/src/modals/DeployContract/deployContract.js b/js/src/modals/DeployContract/deployContract.js index 6b09986ad..1bda0dddf 100644 --- a/js/src/modals/DeployContract/deployContract.js +++ b/js/src/modals/DeployContract/deployContract.js @@ -455,10 +455,15 @@ class DeployContract extends Component { this.setState({ step: 'DEPLOYMENT' }); - api - .newContract(abiParsed) + const contract = api.newContract(abiParsed); + + contract .deploy(options, params, this.onDeploymentState) .then((address) => { + const blockNumber = contract._receipt + ? contract.receipt.blockNumber.toNumber() + : null; + return Promise.all([ api.parity.setAccountName(address, name), api.parity.setAccountMeta(address, { @@ -466,8 +471,9 @@ class DeployContract extends Component { contract: true, timestamp: Date.now(), deleted: false, - source, - description + blockNumber, + description, + source }) ]) .then(() => { diff --git a/js/src/modals/PasswordManager/store.js b/js/src/modals/PasswordManager/store.js index c60576e0f..659543c28 100644 --- a/js/src/modals/PasswordManager/store.js +++ b/js/src/modals/PasswordManager/store.js @@ -133,7 +133,7 @@ export default class Store { } testPassword = (password) => { - this.setBusy(false); + this.setBusy(true); return this._api.parity .testPassword(this.address, password || this.validatePassword) diff --git a/js/src/redux/providers/balances.js b/js/src/redux/providers/balances.js index 0a73ef4f9..8d46e42d2 100644 --- a/js/src/redux/providers/balances.js +++ b/js/src/redux/providers/balances.js @@ -86,7 +86,7 @@ export default class Balances { // If syncing, only retrieve balances once every // few seconds if (syncing) { - this.shortThrottledFetch(); + this.shortThrottledFetch.cancel(); return this.longThrottledFetch(); } diff --git a/js/src/redux/providers/balancesActions.js b/js/src/redux/providers/balancesActions.js index d90cab678..56a2ebafd 100644 --- a/js/src/redux/providers/balancesActions.js +++ b/js/src/redux/providers/balancesActions.js @@ -173,18 +173,15 @@ export function fetchTokens (_tokenIds) { export function fetchBalances (_addresses) { return (dispatch, getState) => { const { api, personal } = getState(); - const { visibleAccounts, accountsInfo } = personal; + const { visibleAccounts, accounts } = personal; - const addresses = uniq((_addresses || visibleAccounts || []).concat(Object.keys(accountsInfo))); - - if (addresses.length === 0) { - return Promise.resolve(); - } + const addresses = uniq(_addresses || visibleAccounts || []); // With only a single account, more info will be displayed. const fullFetch = addresses.length === 1; - const addressesToFetch = uniq(addresses); + // Add accounts addresses (for notifications, accounts selection, etc.) + const addressesToFetch = uniq(addresses.concat(Object.keys(accounts))); return Promise .all(addressesToFetch.map((addr) => fetchAccount(addr, api, fullFetch))) diff --git a/js/src/redux/providers/certifications/middleware.js b/js/src/redux/providers/certifications/middleware.js index c81aa7e67..498c1cb93 100644 --- a/js/src/redux/providers/certifications/middleware.js +++ b/js/src/redux/providers/certifications/middleware.js @@ -218,6 +218,7 @@ export default class CertificationsMiddleware { const _addresses = action.addresses || []; addresses = uniq(addresses.concat(_addresses)); fetchConfirmedEvents(); + next(action); break; default: diff --git a/js/src/redux/providers/compilerActions.js b/js/src/redux/providers/compilerActions.js deleted file mode 100644 index d638c03a2..000000000 --- a/js/src/redux/providers/compilerActions.js +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2015, 2016 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import PromiseWorker from 'promise-worker'; -import runtime from 'serviceworker-webpack-plugin/lib/runtime'; - -let workerRegistration; - -// Setup the Service Worker -if ('serviceWorker' in navigator) { - workerRegistration = runtime - .register() - .then(() => navigator.serviceWorker.ready) - .then((registration) => { - const _worker = registration.active; - _worker.controller = registration.active; - const worker = new PromiseWorker(_worker); - - return worker; - }); -} else { - workerRegistration = Promise.reject('Service Worker is not available in your browser.'); -} - -export function setWorker (worker) { - return { - type: 'setWorker', - worker - }; -} - -export function setError (error) { - return { - type: 'setError', - error - }; -} - -export function setupWorker () { - return (dispatch, getState) => { - const state = getState(); - - if (state.compiler.worker) { - return; - } - - workerRegistration - .then((worker) => { - dispatch(setWorker(worker)); - }) - .catch((error) => { - console.error('sw', error); - dispatch(setWorker(null)); - }); - }; -} diff --git a/js/src/redux/providers/index.js b/js/src/redux/providers/index.js index 6a000bdac..e1441c479 100644 --- a/js/src/redux/providers/index.js +++ b/js/src/redux/providers/index.js @@ -22,7 +22,7 @@ export Status from './status'; export apiReducer from './apiReducer'; export balancesReducer from './balancesReducer'; export blockchainReducer from './blockchainReducer'; -export compilerReducer from './compilerReducer'; +export workerReducer from './workerReducer'; export imagesReducer from './imagesReducer'; export personalReducer from './personalReducer'; export signerReducer from './signerReducer'; diff --git a/js/src/redux/providers/personalActions.js b/js/src/redux/providers/personalActions.js index 1ed39c05a..5d91aeef8 100644 --- a/js/src/redux/providers/personalActions.js +++ b/js/src/redux/providers/personalActions.js @@ -122,7 +122,7 @@ export function setVisibleAccounts (addresses) { return; } - dispatch(fetchBalances(addresses)); dispatch(_setVisibleAccounts(addresses)); + dispatch(fetchBalances(addresses)); }; } diff --git a/js/src/redux/providers/personalReducer.js b/js/src/redux/providers/personalReducer.js index daadd54b3..b6cd051a4 100644 --- a/js/src/redux/providers/personalReducer.js +++ b/js/src/redux/providers/personalReducer.js @@ -47,7 +47,7 @@ export default handleActions({ setVisibleAccounts (state, action) { const addresses = (action.addresses || []).sort(); - if (isEqual(addresses, state.addresses)) { + if (isEqual(addresses, state.visibleAccounts)) { return state; } diff --git a/js/src/redux/providers/signerMiddleware.js b/js/src/redux/providers/signerMiddleware.js index 018e01e59..ba51d3426 100644 --- a/js/src/redux/providers/signerMiddleware.js +++ b/js/src/redux/providers/signerMiddleware.js @@ -17,7 +17,7 @@ import * as actions from './signerActions'; import { inHex } from '~/api/format/input'; -import { Wallet } from '../../util/wallet'; +import { Signer } from '../../util/signer'; export default class SignerMiddleware { constructor (api) { @@ -58,6 +58,7 @@ export default class SignerMiddleware { promise .then((txHash) => { console.log('confirmRequest', id, txHash); + if (!txHash) { store.dispatch(actions.errorConfirmRequest({ id, err: 'Unable to confirm.' })); return; @@ -73,33 +74,49 @@ export default class SignerMiddleware { // Sign request in-browser const transaction = payload.sendTransaction || payload.signTransaction; + if (wallet && transaction) { - (transaction.nonce.isZero() + const noncePromise = transaction.nonce.isZero() ? this._api.parity.nextNonce(transaction.from) - : Promise.resolve(transaction.nonce) - ).then(nonce => { - let txData = { - to: inHex(transaction.to), - nonce: inHex(transaction.nonce.isZero() ? nonce : transaction.nonce), - gasPrice: inHex(transaction.gasPrice), - gasLimit: inHex(transaction.gas), - value: inHex(transaction.value), - data: inHex(transaction.data) - }; + : Promise.resolve(transaction.nonce); - try { - // NOTE: Derving the key takes significant amount of time, - // make sure to display some kind of "in-progress" state. - const signer = Wallet.fromJson(wallet, password); - const rawTx = signer.signTransaction(txData); + const { worker } = store.getState().worker; - handlePromise(this._api.signer.confirmRequestRaw(id, rawTx)); - } catch (error) { - console.error(error); + const signerPromise = worker && worker._worker.state === 'activated' + ? worker + .postMessage({ + action: 'getSignerSeed', + data: { wallet, password } + }) + .then((result) => { + const seed = Buffer.from(result.data); + return new Signer(seed); + }) + : Signer.fromJson(wallet, password); + + // NOTE: Derving the key takes significant amount of time, + // make sure to display some kind of "in-progress" state. + return Promise + .all([ signerPromise, noncePromise ]) + .then(([ signer, nonce ]) => { + const txData = { + to: inHex(transaction.to), + nonce: inHex(transaction.nonce.isZero() ? nonce : transaction.nonce), + gasPrice: inHex(transaction.gasPrice), + gasLimit: inHex(transaction.gas), + value: inHex(transaction.value), + data: inHex(transaction.data) + }; + + return signer.signTransaction(txData); + }) + .then((rawTx) => { + return handlePromise(this._api.signer.confirmRequestRaw(id, rawTx)); + }) + .catch((error) => { + console.error(error.message); store.dispatch(actions.errorConfirmRequest({ id, err: error.message })); - } - }); - return; + }); } handlePromise(this._api.signer.confirmRequest(id, { gas, gasPrice }, password)); diff --git a/js/src/redux/providers/status.js b/js/src/redux/providers/status.js index ef4c09224..6d0e24c6b 100644 --- a/js/src/redux/providers/status.js +++ b/js/src/redux/providers/status.js @@ -125,12 +125,13 @@ export default class Status { this._store.dispatch(statusCollection(status)); this._status = status; } + + nextTimeout(); }) .catch((error) => { console.error('_pollStatus', error); + nextTimeout(); }); - - nextTimeout(); } /** diff --git a/js/src/redux/providers/worker.js b/js/src/redux/providers/worker.js new file mode 100644 index 000000000..35ca0f173 --- /dev/null +++ b/js/src/redux/providers/worker.js @@ -0,0 +1,68 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import PromiseWorker from 'promise-worker'; +import runtime from 'serviceworker-webpack-plugin/lib/runtime'; + +import { setWorker } from './workerActions'; + +function getWorker () { + // Setup the Service Worker + if ('serviceWorker' in navigator) { + return runtime + .register() + .then(() => navigator.serviceWorker.ready) + .then((registration) => { + const worker = registration.active; + worker.controller = registration.active; + + return new PromiseWorker(worker); + }); + } + + return Promise.reject('Service Worker is not available in your browser.'); +} + +export const setupWorker = (store) => { + const { dispatch, getState } = store; + + const state = getState(); + const stateWorker = state.worker.worker; + + if (stateWorker !== undefined && !(stateWorker && stateWorker._worker.state === 'redundant')) { + return; + } + + getWorker() + .then((worker) => { + if (worker) { + worker._worker.addEventListener('statechange', (event) => { + console.warn('worker state changed to', worker._worker.state); + + // Re-install the new Worker + if (worker._worker.state === 'redundant') { + setupWorker(store); + } + }); + } + + dispatch(setWorker(worker)); + }) + .catch((error) => { + console.error('sw', error); + dispatch(setWorker(null)); + }); +}; diff --git a/js/src/views/Contracts/Summary/index.js b/js/src/redux/providers/workerActions.js similarity index 80% rename from js/src/views/Contracts/Summary/index.js rename to js/src/redux/providers/workerActions.js index 980ecff9a..50a877508 100644 --- a/js/src/views/Contracts/Summary/index.js +++ b/js/src/redux/providers/workerActions.js @@ -14,4 +14,16 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -export default from './summary'; +export function setWorker (worker) { + return { + type: 'setWorker', + worker + }; +} + +export function setError (error) { + return { + type: 'setError', + error + }; +} diff --git a/js/src/redux/providers/compilerReducer.js b/js/src/redux/providers/workerReducer.js similarity index 94% rename from js/src/redux/providers/compilerReducer.js rename to js/src/redux/providers/workerReducer.js index e23bf3b16..27144e11b 100644 --- a/js/src/redux/providers/compilerReducer.js +++ b/js/src/redux/providers/workerReducer.js @@ -24,7 +24,7 @@ const initialState = { export default handleActions({ setWorker (state, action) { const { worker } = action; - return Object.assign({}, state, { worker }); + return Object.assign({}, state, { worker: worker || null }); }, setError (state, action) { diff --git a/js/src/redux/reducers.js b/js/src/redux/reducers.js index 577fca11f..45408de92 100644 --- a/js/src/redux/reducers.js +++ b/js/src/redux/reducers.js @@ -19,7 +19,7 @@ import { routerReducer } from 'react-router-redux'; import { apiReducer, balancesReducer, blockchainReducer, - compilerReducer, imagesReducer, personalReducer, + workerReducer, imagesReducer, personalReducer, signerReducer, statusReducer as nodeStatusReducer, snackbarReducer, walletReducer } from './providers'; @@ -41,13 +41,13 @@ export default function () { balances: balancesReducer, certifications: certificationsReducer, blockchain: blockchainReducer, - compiler: compilerReducer, images: imagesReducer, nodeStatus: nodeStatusReducer, personal: personalReducer, + registry: registryReducer, signer: signerReducer, snackbar: snackbarReducer, wallet: walletReducer, - registry: registryReducer + worker: workerReducer }); } diff --git a/js/src/redux/store.js b/js/src/redux/store.js index dc043e242..9924aa461 100644 --- a/js/src/redux/store.js +++ b/js/src/redux/store.js @@ -20,6 +20,7 @@ import initMiddleware from './middleware'; import initReducers from './reducers'; import { load as loadWallet } from './providers/walletActions'; +import { setupWorker } from './providers/worker'; import { Balances as BalancesProvider, @@ -43,6 +44,7 @@ export default function (api, browserHistory) { new StatusProvider(store, api).start(); store.dispatch(loadWallet(api)); + setupWorker(store); return store; } diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js index 136e6a6b7..3fdfc02ac 100644 --- a/js/src/serviceWorker.js +++ b/js/src/serviceWorker.js @@ -15,6 +15,7 @@ // along with Parity. If not, see . import registerPromiseWorker from 'promise-worker/register'; +import { Signer } from '~/util/signer'; import SolidityUtils from '~/util/solidity'; const CACHE_NAME = 'parity-cache-v1'; @@ -93,12 +94,21 @@ function handleMessage (message) { case 'setFiles': return setFiles(message.data); + case 'getSignerSeed': + return getSignerSeed(message.data); + default: console.warn(`unknown action "${message.action}"`); return null; } } +function getSignerSeed (data) { + console.log('deriving seed from service-worker'); + const { wallet, password } = data; + return Signer.getSeed(wallet, password); +} + function compile (data) { const { build } = data; diff --git a/js/src/ui/Actionbar/Search/search.js b/js/src/ui/Actionbar/Search/search.js index 30d8117bd..31541f082 100644 --- a/js/src/ui/Actionbar/Search/search.js +++ b/js/src/ui/Actionbar/Search/search.js @@ -71,15 +71,15 @@ export default class ActionbarSearch extends Component { key='searchAccount'>
@@ -118,6 +118,10 @@ export default class ActionbarSearch extends Component { handleSearchClick = () => { const { showSearch } = this.state; + if (!showSearch) { + this.refs.inputChip.focus(); + } + this.handleOpenSearch(!showSearch); } diff --git a/js/src/ui/Actionbar/actionbar.css b/js/src/ui/Actionbar/actionbar.css index aacc3fc39..311e2b546 100644 --- a/js/src/ui/Actionbar/actionbar.css +++ b/js/src/ui/Actionbar/actionbar.css @@ -27,13 +27,14 @@ } .toolbuttons { -} + overflow: hidden; -.toolbuttons button { - margin: 10px 0 10px 16px !important; - color: white !important; -} + button { + margin: 10px 0 10px 16px !important; + color: white !important; + } -.toolbuttons svg { - fill: white !important; + svg { + fill: white !important; + } } diff --git a/js/src/ui/Balance/balance.css b/js/src/ui/Balance/balance.css index a27904832..9fe7cc8ac 100644 --- a/js/src/ui/Balance/balance.css +++ b/js/src/ui/Balance/balance.css @@ -23,7 +23,7 @@ .empty { line-height: 24px; - margin: 0.75em 0.5em 0 0; + margin: 0 0.5em 0 0; opacity: 0.25; } diff --git a/js/src/ui/Container/Title/title.css b/js/src/ui/Container/Title/title.css index 341c25a7f..9b636c034 100644 --- a/js/src/ui/Container/Title/title.css +++ b/js/src/ui/Container/Title/title.css @@ -14,7 +14,7 @@ /* You should have received a copy of the GNU General Public License /* along with Parity. If not, see . */ -.byline { +.byline, .description { overflow: hidden; position: relative; line-height: 1.2em; @@ -31,6 +31,11 @@ } } +.description { + font-size: 0.75em; + margin: 0.5em 0 0; +} + .title { text-transform: uppercase; margin: 0; diff --git a/js/src/ui/Container/Title/title.js b/js/src/ui/Container/Title/title.js index de25b818c..ccd3f9d0e 100644 --- a/js/src/ui/Container/Title/title.js +++ b/js/src/ui/Container/Title/title.js @@ -22,13 +22,14 @@ import styles from './title.css'; export default class Title extends Component { static propTypes = { + byline: nodeOrStringProptype(), className: PropTypes.string, - title: nodeOrStringProptype(), - byline: nodeOrStringProptype() + description: nodeOrStringProptype(), + title: nodeOrStringProptype() } render () { - const { className, title, byline } = this.props; + const { byline, className, title } = this.props; const byLine = typeof byline === 'string' ? ( @@ -46,6 +47,29 @@ export default class Title extends Component {
{ byLine }
+ { this.renderDescription() } + + ); + } + + renderDescription () { + const { description } = this.props; + + if (!description) { + return null; + } + + const desc = typeof description === 'string' + ? ( + + { description } + + ) + : description; + + return ( +
+ { desc }
); } diff --git a/js/src/ui/Form/InputChip/inputChip.js b/js/src/ui/Form/InputChip/inputChip.js index a12825d71..479ca4b09 100644 --- a/js/src/ui/Form/InputChip/inputChip.js +++ b/js/src/ui/Form/InputChip/inputChip.js @@ -170,6 +170,10 @@ export default class InputChip extends Component { .filter(v => v !== value)); this.handleTokensChange(newTokens); + this.focus(); + } + + focus = () => { this.refs.chipInput.focus(); } diff --git a/js/src/ui/MethodDecoding/methodDecoding.css b/js/src/ui/MethodDecoding/methodDecoding.css index adb899e1c..c782d6ce7 100644 --- a/js/src/ui/MethodDecoding/methodDecoding.css +++ b/js/src/ui/MethodDecoding/methodDecoding.css @@ -38,6 +38,10 @@ justify-content: center; } +.details { + line-height: 1.75em; +} + .details, .gasDetails { color: #aaa; diff --git a/js/src/ui/MethodDecoding/methodDecoding.js b/js/src/ui/MethodDecoding/methodDecoding.js index 59704a731..693ae60b5 100644 --- a/js/src/ui/MethodDecoding/methodDecoding.js +++ b/js/src/ui/MethodDecoding/methodDecoding.js @@ -196,7 +196,7 @@ class MethodDecoding extends Component { : text.slice(0, 50) + '...'; return ( -
+
with the { + return new Signer(seed); + }); + } + + static getSeed (json, password) { + try { + const seed = Signer.getSyncSeed(json, password); + return Promise.resolve(seed); + } catch (error) { + return Promise.reject(error); + } + } + + static getSyncSeed (json, password) { if (json.version !== 3) { throw new Error('Only V3 wallets are supported'); } @@ -43,15 +60,17 @@ export class Wallet { if (kdfparams.prf !== 'hmac-sha256') { throw new Error('Unsupported parameters to PBKDF2'); } + derivedKey = pbkdf2Sync(pwd, salt, kdfparams.c, kdfparams.dklen, 'sha256'); } else { throw new Error('Unsupported key derivation scheme'); } const ciphertext = Buffer.from(json.crypto.ciphertext, 'hex'); - let mac = sha3(Buffer.concat([derivedKey.slice(16, 32), ciphertext])); + const mac = sha3(Buffer.concat([derivedKey.slice(16, 32), ciphertext])); + if (mac !== inHex(json.crypto.mac)) { - throw new Error('Key derivation failed - possibly wrong passphrase'); + throw new Error('Key derivation failed - possibly wrong password'); } const decipher = createDecipheriv( @@ -59,6 +78,7 @@ export class Wallet { derivedKey.slice(0, 16), Buffer.from(json.crypto.cipherparams.iv, 'hex') ); + let seed = Buffer.concat([decipher.update(ciphertext), decipher.final()]); while (seed.length < 32) { @@ -66,7 +86,7 @@ export class Wallet { seed = Buffer.concat([nullBuff, seed]); } - return new Wallet(seed); + return seed; } constructor (seed) { diff --git a/js/src/views/Accounts/List/list.js b/js/src/views/Accounts/List/list.js index 9cebdda6e..ce7a2ae99 100644 --- a/js/src/views/Accounts/List/list.js +++ b/js/src/views/Accounts/List/list.js @@ -57,7 +57,7 @@ class List extends Component { } renderAccounts () { - const { accounts, balances, empty, link, handleAddSearchToken } = this.props; + const { accounts, balances, empty } = this.props; if (empty) { return ( @@ -80,20 +80,29 @@ class List extends Component { return (
- + key={ address } + > + { this.renderSummary(account, balance, owners) }
); }); } + renderSummary (account, balance, owners) { + const { handleAddSearchToken, link } = this.props; + + return ( + + ); + } + getAddresses () { const filteredAddresses = this.getFilteredAddresses(); return this.sortAddresses(filteredAddresses); @@ -122,7 +131,15 @@ class List extends Component { }); } - compareAccounts (accountA, accountB, key) { + compareAccounts (accountA, accountB, key, _reverse = null) { + if (key && key.split(':')[1] === '-1') { + return this.compareAccounts(accountA, accountB, key.split(':')[0], true); + } + + if (key === 'timestamp' && _reverse === null) { + return this.compareAccounts(accountA, accountB, key, true); + } + if (key === 'name') { return accountA.name.localeCompare(accountB.name); } @@ -177,7 +194,9 @@ class List extends Component { return tagsA.localeCompare(tagsB); } - const reverse = key === 'timestamp' ? -1 : 1; + const reverse = _reverse + ? -1 + : 1; const metaA = accountA.meta[key]; const metaB = accountB.meta[key]; @@ -220,8 +239,8 @@ class List extends Component { const tags = account.meta.tags || []; const name = account.name || ''; - const values = [] - .concat(tags, name) + const values = tags + .concat(name) .map(v => v.toLowerCase()); return searchValues diff --git a/js/src/views/Accounts/Summary/summary.js b/js/src/views/Accounts/Summary/summary.js index 8658077a5..20924dc4a 100644 --- a/js/src/views/Accounts/Summary/summary.js +++ b/js/src/views/Accounts/Summary/summary.js @@ -19,6 +19,7 @@ import React, { Component, PropTypes } from 'react'; import { Link } from 'react-router'; import { isEqual } from 'lodash'; import ReactTooltip from 'react-tooltip'; +import { FormattedMessage } from 'react-intl'; import { Balance, Container, ContainerTitle, IdentityIcon, IdentityName, Tags, Input } from '~/ui'; import Certifications from '~/ui/Certifications'; @@ -107,14 +108,22 @@ export default class Summary extends Component { /> ); + const description = this.getDescription(account.meta); + return ( - - +
+ + +
{ this.renderOwners() } { this.renderBalance() } @@ -123,6 +132,26 @@ export default class Summary extends Component { ); } + getDescription (meta = {}) { + const { blockNumber } = meta; + + if (!blockNumber) { + return null; + } + + const formattedBlockNumber = (new BigNumber(blockNumber)).toFormat(); + + return ( + + ); + } + renderOwners () { const { owners } = this.props; const ownersValid = (owners || []).filter((owner) => owner.address && new BigNumber(owner.address).gt(0)); diff --git a/js/src/views/Accounts/accounts.css b/js/src/views/Accounts/accounts.css index 2a7cdcec9..25dfdcab4 100644 --- a/js/src/views/Accounts/accounts.css +++ b/js/src/views/Accounts/accounts.css @@ -56,3 +56,12 @@ } } } + +.heading { + display: flex; + flex-direction: row; + + .main { + flex: 1; + } +} diff --git a/js/src/views/Contract/Events/events.js b/js/src/views/Contract/Events/events.js index dba05dfd9..1367624e8 100644 --- a/js/src/views/Contract/Events/events.js +++ b/js/src/views/Contract/Events/events.js @@ -74,6 +74,14 @@ export default class Events extends Component { return ( + + + + + { list }
+ + origin +
diff --git a/js/src/views/Contract/contract.css b/js/src/views/Contract/contract.css index 4752fd04a..924818ebd 100644 --- a/js/src/views/Contract/contract.css +++ b/js/src/views/Contract/contract.css @@ -29,14 +29,36 @@ .event { td { vertical-align: top; - padding: 1em 0.5em; + padding: 0 0.5em 1.5em; div { white-space: nowrap; } + + &.timestamp { + padding-right: 1.5em; + text-align: right; + line-height: 1.5em; + opacity: 0.5; + white-space: nowrap; + } } } +.blockNumber { + color: rgba(255, 255, 255, 0.25); + margin-top: 1.5em; +} + +.origin { + text-align: left; + padding-left: 32px; + text-indent: 1em; + color: rgba(255, 255, 255, 0.5); + text-transform: uppercase; + font-size: 0.9em; +} + .txhash { text-overflow: ellipsis; width: 20%; @@ -54,14 +76,6 @@ opacity: 0.5; } -.timestamp { - padding-top: 1.5em; - text-align: right; - line-height: 1.5em; - opacity: 0.5; - white-space: nowrap; -} - .eventDetails { } diff --git a/js/src/views/Contract/contract.js b/js/src/views/Contract/contract.js index d06c22b92..fc299f7cb 100644 --- a/js/src/views/Contract/contract.js +++ b/js/src/views/Contract/contract.js @@ -17,6 +17,9 @@ import React, { Component, PropTypes } from 'react'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; +import { FormattedMessage } from 'react-intl'; +import BigNumber from 'bignumber.js'; + import ActionDelete from 'material-ui/svg-icons/action/delete'; import AvPlayArrow from 'material-ui/svg-icons/av/play-arrow'; import ContentCreate from 'material-ui/svg-icons/content/create'; @@ -136,7 +139,9 @@ class Contract extends Component { account={ account } balance={ balance } isContract - /> + > + { this.renderBlockNumber(account.meta) } + + +
+ ); + } + renderDetails (contract) { const { showDetailsDialog } = this.state; diff --git a/js/src/views/Contracts/Summary/summary.js b/js/src/views/Contracts/Summary/summary.js deleted file mode 100644 index 36e88f039..000000000 --- a/js/src/views/Contracts/Summary/summary.js +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2015, 2016 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import React, { Component, PropTypes } from 'react'; -import { Link } from 'react-router'; - -import { Container, ContainerTitle, IdentityIcon, IdentityName } from '~/ui'; - -export default class Summary extends Component { - static contextTypes = { - api: React.PropTypes.object.isRequired - } - - static propTypes = { - contract: PropTypes.object.isRequired, - children: PropTypes.node - } - - render () { - const contract = this.props.contract; - - if (!contract) { - return null; - } - - const viewLink = `/app/${contract.address}`; - - return ( - - - { } } - byline={ contract.address } /> - { this.props.children } - - ); - } -} diff --git a/js/src/views/Contracts/contracts.js b/js/src/views/Contracts/contracts.js index cc292275a..532c2ffb4 100644 --- a/js/src/views/Contracts/contracts.js +++ b/js/src/views/Contracts/contracts.js @@ -45,7 +45,7 @@ class Contracts extends Component { state = { addContract: false, deployContract: false, - sortOrder: 'timestamp', + sortOrder: 'blockNumber', searchValues: [], searchTokens: [] } @@ -92,7 +92,8 @@ class Contracts extends Component { empty={ !hasContracts } order={ sortOrder } orderFallback='name' - handleAddSearchToken={ this.onAddSearchToken } /> + handleAddSearchToken={ this.onAddSearchToken } + />
); @@ -109,7 +110,8 @@ class Contracts extends Component { id='sortContracts' order={ this.state.sortOrder } metas={ [ - { key: 'timestamp', label: 'date' } + { key: 'timestamp', label: 'date' }, + { key: 'blockNumber:-1', label: 'mined block' } ] } showDefault={ false } onChange={ onChange } /> diff --git a/js/src/views/Signer/components/TransactionPendingForm/TransactionPendingFormConfirm/transactionPendingFormConfirm.js b/js/src/views/Signer/components/TransactionPendingForm/TransactionPendingFormConfirm/transactionPendingFormConfirm.js index 99bd1c5f3..45eb3e5dd 100644 --- a/js/src/views/Signer/components/TransactionPendingForm/TransactionPendingFormConfirm/transactionPendingFormConfirm.js +++ b/js/src/views/Signer/components/TransactionPendingForm/TransactionPendingFormConfirm/transactionPendingFormConfirm.js @@ -77,13 +77,28 @@ class TransactionPendingFormConfirm extends Component { } } + getPasswordHint () { + const { account } = this.props; + const accountHint = account && account.meta && account.meta.passwordHint; + + if (accountHint) { + return accountHint; + } + + const { wallet } = this.state; + const walletHint = wallet && wallet.meta && wallet.meta.passwordHint; + + return walletHint || null; + } + render () { const { account, address, isSending } = this.props; const { password, wallet, walletError } = this.state; const isExternal = !account.uuid; - const passwordHint = account.meta && account.meta.passwordHint - ? (
(hint) { account.meta.passwordHint }
) + const passwordHintText = this.getPasswordHint(); + const passwordHint = passwordHintText + ? (
(hint) { passwordHintText }
) : null; const isWalletOk = !isExternal || (walletError === null && wallet !== null); @@ -170,12 +185,26 @@ class TransactionPendingFormConfirm extends Component { } onKeySelect = (event) => { + // Check that file have been selected + if (event.target.files.length === 0) { + return this.setState({ + wallet: null, + walletError: null + }); + } + const fileReader = new FileReader(); fileReader.onload = (e) => { try { const wallet = JSON.parse(e.target.result); + try { + if (wallet && typeof wallet.meta === 'string') { + wallet.meta = JSON.parse(wallet.meta); + } + } catch (e) {} + this.setState({ wallet, walletError: null diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index c013775a1..8a3ddf3d1 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -18,7 +18,6 @@ import React, { PropTypes, Component } from 'react'; import { observer } from 'mobx-react'; import { MenuItem, Toggle } from 'material-ui'; import { connect } from 'react-redux'; -import { bindActionCreators } from 'redux'; import CircularProgress from 'material-ui/CircularProgress'; import moment from 'moment'; import { throttle } from 'lodash'; @@ -32,8 +31,6 @@ import SendIcon from 'material-ui/svg-icons/content/send'; import { Actionbar, ActionbarExport, ActionbarImport, Button, Editor, Page, Select, Input } from '~/ui'; import { DeployContract, SaveContract, LoadContract } from '~/modals'; -import { setupWorker } from '~/redux/providers/compilerActions'; - import WriteContractStore from './writeContractStore'; import styles from './writeContract.css'; @@ -42,7 +39,6 @@ class WriteContract extends Component { static propTypes = { accounts: PropTypes.object.isRequired, - setupWorker: PropTypes.func.isRequired, worker: PropTypes.object, workerError: PropTypes.any }; @@ -55,8 +51,7 @@ class WriteContract extends Component { }; componentWillMount () { - const { setupWorker, worker } = this.props; - setupWorker(); + const { worker } = this.props; if (worker !== undefined) { this.store.setWorker(worker); @@ -575,17 +570,10 @@ class WriteContract extends Component { function mapStateToProps (state) { const { accounts } = state.personal; - const { worker, error } = state.compiler; + const { worker, error } = state.worker; return { accounts, worker, workerError: error }; } -function mapDispatchToProps (dispatch) { - return bindActionCreators({ - setupWorker - }, dispatch); -} - export default connect( - mapStateToProps, - mapDispatchToProps + mapStateToProps )(WriteContract); diff --git a/parity/cli/mod.rs b/parity/cli/mod.rs index 3515d0d35..7e5e4d1d8 100644 --- a/parity/cli/mod.rs +++ b/parity/cli/mod.rs @@ -37,6 +37,7 @@ usage! { cmd_snapshot: bool, cmd_restore: bool, cmd_ui: bool, + cmd_dapp: bool, cmd_tools: bool, cmd_hash: bool, cmd_kill: bool, @@ -525,6 +526,7 @@ mod tests { cmd_snapshot: false, cmd_restore: false, cmd_ui: false, + cmd_dapp: false, cmd_tools: false, cmd_hash: false, cmd_db: false, diff --git a/parity/cli/usage.txt b/parity/cli/usage.txt index 9705c7359..f75dda524 100644 --- a/parity/cli/usage.txt +++ b/parity/cli/usage.txt @@ -5,6 +5,7 @@ Parity. Ethereum Client. Usage: parity [options] parity ui [options] + parity dapp [options] parity daemon [options] parity account (new | list ) [options] parity account import ... [options] diff --git a/parity/configuration.rs b/parity/configuration.rs index f3135e0ee..671c78206 100644 --- a/parity/configuration.rs +++ b/parity/configuration.rs @@ -17,7 +17,7 @@ use std::time::Duration; use std::io::Read; use std::net::SocketAddr; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::cmp::max; use cli::{Args, ArgsError}; use util::{Hashable, U256, Uint, Bytes, version_data, Secret, Address}; @@ -335,6 +335,7 @@ impl Configuration { net_settings: self.network_settings(), dapps_conf: dapps_conf, signer_conf: signer_conf, + dapp: self.dapp_to_open()?, ui: self.args.cmd_ui, name: self.args.flag_identity, custom_bootnodes: self.args.flag_bootnodes.is_some(), @@ -507,10 +508,28 @@ impl Configuration { hosts: self.dapps_hosts(), user: self.args.flag_dapps_user.clone(), pass: self.args.flag_dapps_pass.clone(), - dapps_path: self.directories().dapps, + dapps_path: PathBuf::from(self.directories().dapps), + extra_dapps: if self.args.cmd_dapp { + self.args.arg_path.iter().map(|path| PathBuf::from(path)).collect() + } else { + vec![] + }, } } + fn dapp_to_open(&self) -> Result, String> { + if !self.args.cmd_dapp { + return Ok(None); + } + let path = self.args.arg_path.get(0).map(String::as_str).unwrap_or("."); + let path = Path::new(path).canonicalize() + .map_err(|e| format!("Invalid path: {}. Error: {:?}", path, e))?; + let name = path.file_name() + .and_then(|name| name.to_str()) + .ok_or_else(|| "Root path is not supported.".to_owned())?; + Ok(Some(name.into())) + } + fn gas_pricer_config(&self) -> Result { if let Some(d) = self.args.flag_gasprice.as_ref() { return Ok(GasPricerConfig::Fixed(to_u256(d)?)); @@ -1030,6 +1049,7 @@ mod tests { dapps_conf: Default::default(), signer_conf: Default::default(), ui: false, + dapp: None, name: "".into(), custom_bootnodes: false, fat_db: Default::default(), @@ -1224,6 +1244,22 @@ mod tests { }); } + #[test] + fn should_parse_dapp_opening() { + // given + let temp = RandomTempPath::new(); + let name = temp.file_name().unwrap().to_str().unwrap(); + create_dir(temp.as_str().to_owned()).unwrap(); + + // when + let conf0 = parse(&["parity", "dapp", temp.to_str().unwrap()]); + + // then + assert_eq!(conf0.dapp_to_open(), Ok(Some(name.into()))); + let extra_dapps = conf0.dapps_config().extra_dapps; + assert_eq!(extra_dapps, vec![temp.to_owned()]); + } + #[test] fn should_not_bail_on_empty_line_in_reserved_peers() { let temp = RandomTempPath::new(); diff --git a/parity/dapps.rs b/parity/dapps.rs index 8ec526a05..591c17593 100644 --- a/parity/dapps.rs +++ b/parity/dapps.rs @@ -14,6 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . +use std::path::PathBuf; use std::sync::Arc; use io::PanicHandler; use rpc_apis; @@ -33,7 +34,8 @@ pub struct Configuration { pub hosts: Option>, pub user: Option, pub pass: Option, - pub dapps_path: String, + pub dapps_path: PathBuf, + pub extra_dapps: Vec, } impl Default for Configuration { @@ -46,7 +48,8 @@ impl Default for Configuration { hosts: Some(Vec::new()), user: None, pass: None, - dapps_path: replace_home(&data_dir, "$BASE/dapps"), + dapps_path: replace_home(&data_dir, "$BASE/dapps").into(), + extra_dapps: vec![], } } } @@ -80,7 +83,14 @@ pub fn new(configuration: Configuration, deps: Dependencies) -> Result, _url: &SocketAddr, _allowed_hosts: Option>, _auth: Option<(String, String)>, @@ -106,6 +118,7 @@ mod server { #[cfg(feature = "dapps")] mod server { use super::Dependencies; + use std::path::PathBuf; use std::sync::Arc; use std::net::SocketAddr; use std::io; @@ -122,7 +135,8 @@ mod server { pub fn setup_dapps_server( deps: Dependencies, - dapps_path: String, + dapps_path: PathBuf, + extra_dapps: Vec, url: &SocketAddr, allowed_hosts: Option>, auth: Option<(String, String)>, @@ -130,7 +144,7 @@ mod server { use ethcore_dapps as dapps; let server = dapps::ServerBuilder::new( - dapps_path, + &dapps_path, Arc::new(Registrar { client: deps.client.clone() }), deps.remote.clone(), ); @@ -141,6 +155,7 @@ mod server { .fetch(deps.fetch.clone()) .sync_status(Arc::new(move || is_major_importing(Some(sync.status().state), client.queue_info()))) .web_proxy_tokens(Arc::new(move |token| signer.is_valid_web_proxy_access_token(&token))) + .extra_dapps(&extra_dapps) .signer_address(deps.signer.address()); let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext); diff --git a/parity/run.rs b/parity/run.rs index a878c2aae..e4c5fca6f 100644 --- a/parity/run.rs +++ b/parity/run.rs @@ -92,6 +92,7 @@ pub struct RunCmd { pub net_settings: NetworkSettings, pub dapps_conf: dapps::Configuration, pub signer_conf: signer::Configuration, + pub dapp: Option, pub ui: bool, pub name: String, pub custom_bootnodes: bool, @@ -118,6 +119,17 @@ pub fn open_ui(dapps_conf: &dapps::Configuration, signer_conf: &signer::Configur Ok(()) } +pub fn open_dapp(dapps_conf: &dapps::Configuration, dapp: &str) -> Result<(), String> { + if !dapps_conf.enabled { + return Err("Cannot use DAPP command with Dapps turned off.".into()) + } + + let url = format!("http://{}:{}/{}/", dapps_conf.interface, dapps_conf.port, dapp); + url::open(&url); + Ok(()) +} + + pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc) -> Result { if cmd.ui && cmd.dapps_conf.enabled { // Check if Parity is already running @@ -441,6 +453,10 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc) -> R open_ui(&cmd.dapps_conf, &cmd.signer_conf)?; } + if let Some(dapp) = cmd.dapp { + open_dapp(&cmd.dapps_conf, &dapp)?; + } + // Handle exit let restart = wait_for_exit(panic_handler, Some(updater), can_restart); diff --git a/sync/src/chain.rs b/sync/src/chain.rs index df69dbb08..a87c7d1be 100644 --- a/sync/src/chain.rs +++ b/sync/src/chain.rs @@ -125,6 +125,8 @@ const MAX_NEW_HASHES: usize = 64; const MAX_TX_TO_IMPORT: usize = 512; const MAX_NEW_BLOCK_AGE: BlockNumber = 20; const MAX_TRANSACTION_SIZE: usize = 300*1024; +// Maximal number of transactions in sent in single packet. +const MAX_TRANSACTIONS_TO_PROPAGATE: usize = 64; // Min number of blocks to be behind for a snapshot sync const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000; const SNAPSHOT_MIN_PEERS: usize = 3; @@ -1447,7 +1449,7 @@ impl ChainSync { } let mut item_count = r.item_count(); - trace!(target: "sync", "{} -> Transactions ({} entries)", peer_id, item_count); + trace!(target: "sync", "{:02} -> Transactions ({} entries)", peer_id, item_count); item_count = min(item_count, MAX_TX_TO_IMPORT); let mut transactions = Vec::with_capacity(item_count); for i in 0 .. item_count { @@ -1987,11 +1989,14 @@ impl ChainSync { stats.propagated(*hash, id, block_number); } peer_info.last_sent_transactions = all_transactions_hashes.clone(); - return Some((*peer_id, all_transactions_rlp.clone())); + return Some((*peer_id, all_transactions_hashes.len(), all_transactions_rlp.clone())); } // Get hashes of all transactions to send to this peer - let to_send = all_transactions_hashes.difference(&peer_info.last_sent_transactions).cloned().collect::>(); + let to_send = all_transactions_hashes.difference(&peer_info.last_sent_transactions) + .take(MAX_TRANSACTIONS_TO_PROPAGATE) + .cloned() + .collect::>(); if to_send.is_empty() { return None; } @@ -2007,22 +2012,28 @@ impl ChainSync { } } - peer_info.last_sent_transactions = all_transactions_hashes.clone(); - Some((*peer_id, packet.out())) + peer_info.last_sent_transactions = all_transactions_hashes + .intersection(&peer_info.last_sent_transactions) + .chain(&to_send) + .cloned() + .collect(); + Some((*peer_id, to_send.len(), packet.out())) }) .collect::>() }; // Send RLPs - let sent = lucky_peers.len(); - if sent > 0 { - for (peer_id, rlp) in lucky_peers { + let peers = lucky_peers.len(); + if peers > 0 { + let mut max_sent = 0; + for (peer_id, sent, rlp) in lucky_peers { self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp); + trace!(target: "sync", "{:02} <- Transactions ({} entries)", peer_id, sent); + max_sent = max(max_sent, sent); } - - trace!(target: "sync", "Sent up to {} transactions to {} peers.", transactions.len(), sent); + debug!(target: "sync", "Sent up to {} transactions to {} peers.", max_sent, peers); } - sent + peers } fn propagate_latest_blocks(&mut self, io: &mut SyncIo, sealed: &[H256]) { @@ -2042,7 +2053,6 @@ impl ChainSync { trace!(target: "sync", "Sent sealed block to all peers"); }; } - self.propagate_new_transactions(io); self.last_sent_block_number = chain_info.best_block_number; } @@ -2070,7 +2080,9 @@ impl ChainSync { /// called when block is imported to chain - propagates the blocks and updates transactions sent to peers pub fn chain_new_blocks(&mut self, io: &mut SyncIo, _imported: &[H256], invalid: &[H256], enacted: &[H256], _retracted: &[H256], sealed: &[H256], proposed: &[Bytes]) { let queue_info = io.chain().queue_info(); - if !self.status().is_syncing(queue_info) || !sealed.is_empty() { + let is_syncing = self.status().is_syncing(queue_info); + + if !is_syncing || !sealed.is_empty() { trace!(target: "sync", "Propagating blocks, state={:?}", self.state); self.propagate_latest_blocks(io, sealed); self.propagate_proposed_blocks(io, proposed); @@ -2080,7 +2092,7 @@ impl ChainSync { self.restart(io); } - if !enacted.is_empty() { + if !is_syncing && !enacted.is_empty() { // Select random peers to re-broadcast transactions to. let mut random = random::new(); let len = self.peers.len(); @@ -2531,7 +2543,7 @@ mod tests { } #[test] - fn propagates_new_transactions_after_new_block() { + fn does_not_propagate_new_transactions_after_new_block() { let mut client = TestBlockChainClient::new(); client.add_blocks(100, EachBlockWith::Uncle); client.insert_transaction_to_queue(); @@ -2541,16 +2553,16 @@ mod tests { let mut io = TestIo::new(&mut client, &ss, &queue, None); let peer_count = sync.propagate_new_transactions(&mut io); io.chain.insert_transaction_to_queue(); - // New block import should trigger propagation. + // New block import should not trigger propagation. + // (we only propagate on timeout) sync.chain_new_blocks(&mut io, &[], &[], &[], &[], &[], &[]); // 2 message should be send - assert_eq!(2, io.packets.len()); + assert_eq!(1, io.packets.len()); // 1 peer should receive the message assert_eq!(1, peer_count); // TRANSACTIONS_PACKET assert_eq!(0x02, io.packets[0].packet_id); - assert_eq!(0x02, io.packets[1].packet_id); } #[test] diff --git a/util/benches/bigint.rs b/util/benches/bigint.rs index 5f35f52ae..4fa9c180e 100644 --- a/util/benches/bigint.rs +++ b/util/benches/bigint.rs @@ -14,7 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -//! benchmarking for rlp +//! benchmarking for bigint //! should be started with: //! ```bash //! multirust run nightly cargo bench @@ -24,10 +24,10 @@ #![feature(asm)] extern crate test; -extern crate ethcore_bigint as bigint; +extern crate ethcore_util; use test::{Bencher, black_box}; -use bigint::uint::{U256, U512, Uint, U128}; +use ethcore_util::{U256, U512, Uint, U128}; #[bench] fn u256_add(b: &mut Bencher) { diff --git a/util/benches/rlp.rs b/util/benches/rlp.rs index d446f22cf..9a7889ef2 100644 --- a/util/benches/rlp.rs +++ b/util/benches/rlp.rs @@ -24,12 +24,12 @@ extern crate test; extern crate rlp; -extern crate ethcore_bigint as bigint; +extern crate ethcore_util as util; use test::Bencher; use std::str::FromStr; use rlp::*; -use bigint::uint::U256; +use util::U256; #[bench] fn bench_stream_u64_value(b: &mut Bencher) { diff --git a/util/src/nibblevec.rs b/util/src/nibblevec.rs index 75925f52b..57dc28e1d 100644 --- a/util/src/nibblevec.rs +++ b/util/src/nibblevec.rs @@ -18,27 +18,26 @@ //! An owning, nibble-oriented byte vector. use ::NibbleSlice; +use elastic_array::ElasticArray36; -#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Debug)] /// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`. +#[derive(Clone, PartialEq, Eq, Debug)] pub struct NibbleVec { - inner: Vec, + inner: ElasticArray36, len: usize, } +impl Default for NibbleVec { + fn default() -> Self { + NibbleVec::new() + } +} + impl NibbleVec { /// Make a new `NibbleVec` pub fn new() -> Self { NibbleVec { - inner: Vec::new(), - len: 0 - } - } - - /// Make a `NibbleVec` with capacity for `n` nibbles. - pub fn with_capacity(n: usize) -> Self { - NibbleVec { - inner: Vec::with_capacity((n / 2) + (n % 2)), + inner: ElasticArray36::new(), len: 0 } } @@ -49,9 +48,6 @@ impl NibbleVec { /// Retrurns true if `NibbleVec` has zero length pub fn is_empty(&self) -> bool { self.len == 0 } - /// Capacity of the `NibbleVec`. - pub fn capacity(&self) -> usize { self.inner.capacity() * 2 } - /// Try to get the nibble at the given offset. pub fn at(&self, idx: usize) -> u8 { if idx % 2 == 0 { @@ -109,7 +105,7 @@ impl NibbleVec { impl<'a> From> for NibbleVec { fn from(s: NibbleSlice<'a>) -> Self { - let mut v = NibbleVec::with_capacity(s.len()); + let mut v = NibbleVec::new(); for i in 0..s.len() { v.push(s.at(i)); } diff --git a/util/src/trie/fatdb.rs b/util/src/trie/fatdb.rs index f8d47bb38..942bca6e8 100644 --- a/util/src/trie/fatdb.rs +++ b/util/src/trie/fatdb.rs @@ -16,8 +16,8 @@ use hash::H256; use sha3::Hashable; -use hashdb::{HashDB, DBValue}; -use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder, TrieIterator}; +use hashdb::HashDB; +use super::{TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query}; /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// Additionaly it stores inserted hash-key mappings for later retrieval. @@ -58,10 +58,10 @@ impl<'db> Trie for FatDB<'db> { self.raw.contains(&key.sha3()) } - fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result> - where 'a: 'b, R: Recorder + fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result> + where 'a: 'key { - self.raw.get_recorded(&key.sha3(), rec) + self.raw.get_with(&key.sha3(), query) } } @@ -104,6 +104,7 @@ impl<'db> Iterator for FatDBIterator<'db> { #[test] fn fatdb_to_trie() { use memorydb::MemoryDB; + use hashdb::DBValue; use trie::{FatDBMut, TrieMut}; let mut memdb = MemoryDB::new(); diff --git a/util/src/trie/lookup.rs b/util/src/trie/lookup.rs new file mode 100644 index 000000000..8772ac15e --- /dev/null +++ b/util/src/trie/lookup.rs @@ -0,0 +1,94 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +//! Trie lookup via HashDB. + +use hashdb::HashDB; +use nibbleslice::NibbleSlice; +use rlp::{Rlp, View}; +use ::{H256}; + +use super::{TrieError, Query}; +use super::node::Node; + +/// Trie lookup helper object. +pub struct Lookup<'a, Q: Query> { + /// database to query from. + pub db: &'a HashDB, + /// Query object to record nodes and transform data. + pub query: Q, + /// Hash to start at + pub hash: H256, +} + +impl<'a, Q: Query> Lookup<'a, Q> { + /// Look up the given key. If the value is found, it will be passed to the given + /// function to decode or copy. + pub fn look_up(mut self, mut key: NibbleSlice) -> super::Result> { + let mut hash = self.hash; + + // this loop iterates through non-inline nodes. + for depth in 0.. { + let node_data = match self.db.get(&hash) { + Some(value) => value, + None => return Err(Box::new(match depth { + 0 => TrieError::InvalidStateRoot(hash), + _ => TrieError::IncompleteDatabase(hash), + })), + }; + + self.query.record(&hash, &node_data, depth); + + // this loop iterates through all inline children (usually max 1) + // without incrementing the depth. + let mut node_data = &node_data[..]; + loop { + match Node::decoded(node_data) { + Node::Leaf(slice, value) => { + return Ok(match slice == key { + true => Some(self.query.decode(value)), + false => None, + }) + } + Node::Extension(slice, item) => { + if key.starts_with(&slice) { + node_data = item; + key = key.mid(slice.len()); + } else { + return Ok(None) + } + } + Node::Branch(children, value) => match key.is_empty() { + true => return Ok(value.map(move |val| self.query.decode(val))), + false => { + node_data = children[key.at(0) as usize]; + key = key.mid(1); + } + }, + _ => return Ok(None), + } + + // check if new node data is inline or hash. + let r = Rlp::new(node_data); + if r.is_data() && r.size() == 32 { + hash = r.as_val(); + break + } + } + } + Ok(None) + } +} diff --git a/util/src/trie/mod.rs b/util/src/trie/mod.rs index 01c351fc1..3b16f70b0 100644 --- a/util/src/trie/mod.rs +++ b/util/src/trie/mod.rs @@ -38,6 +38,7 @@ pub mod recorder; mod fatdb; mod fatdbmut; +mod lookup; pub use self::standardmap::{Alphabet, StandardMap, ValueMode}; pub use self::triedbmut::TrieDBMut; @@ -76,6 +77,46 @@ pub type Result = ::std::result::Result>; /// Trie-Item type. pub type TrieItem<'a> = Result<(Vec, DBValue)>; +/// Description of what kind of query will be made to the trie. +/// +/// This is implemented for any &mut recorder (where the query will return +/// a DBValue), any function taking raw bytes (where no recording will be made), +/// or any tuple of (&mut Recorder, FnOnce(&[u8])) +pub trait Query { + /// Output item. + type Item; + + /// Decode a byte-slice into the desired item. + fn decode(self, &[u8]) -> Self::Item; + + /// Record that a node has been passed through. + fn record(&mut self, &H256, &[u8], u32) { } +} + +impl<'a> Query for &'a mut Recorder { + type Item = DBValue; + + fn decode(self, value: &[u8]) -> DBValue { DBValue::from_slice(value) } + fn record(&mut self, hash: &H256, data: &[u8], depth: u32) { + (&mut **self).record(hash, data, depth); + } +} + +impl Query for F where F: for<'a> FnOnce(&'a [u8]) -> T { + type Item = T; + + fn decode(self, value: &[u8]) -> T { (self)(value) } +} + +impl<'a, F, T> Query for (&'a mut Recorder, F) where F: FnOnce(&[u8]) -> T { + type Item = T; + + fn decode(self, value: &[u8]) -> T { (self.1)(value) } + fn record(&mut self, hash: &H256, data: &[u8], depth: u32) { + self.0.record(hash, data, depth) + } +} + /// A key-value datastore implemented as a database-backed modified Merkle tree. pub trait Trie { /// Return the root of the trie. @@ -91,13 +132,13 @@ pub trait Trie { /// What is the value of the given key in this trie? fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result> where 'a: 'key { - self.get_recorded(key, &mut recorder::NoOp) + self.get_with(key, DBValue::from_slice) } - /// Query the value of the given key in this trie while recording visited nodes - /// to the given recorder. If the query encounters an error, the nodes passed to the recorder are unspecified. - fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> Result> - where 'a: 'b, R: Recorder; + /// Search for the key with the given query parameter. See the docs of the `Query` + /// trait for more details. + fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) + -> Result> where 'a: 'key; /// Returns a depth-first iterator over the elements of trie. fn iter<'a>(&'a self) -> Result + 'a>>; @@ -192,9 +233,10 @@ impl<'db> Trie for TrieKinds<'db> { wrapper!(self, contains, key) } - fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], r: &'b mut R) -> Result> - where 'a: 'b, R: Recorder { - wrapper!(self, get_recorded, key, r) + fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result> + where 'a: 'key + { + wrapper!(self, get_with, key, query) } fn iter<'a>(&'a self) -> Result + 'a>> { diff --git a/util/src/trie/node.rs b/util/src/trie/node.rs index 44f1f3bfa..24f2815ec 100644 --- a/util/src/trie/node.rs +++ b/util/src/trie/node.rs @@ -16,6 +16,7 @@ use elastic_array::ElasticArray36; use nibbleslice::*; +use nibblevec::NibbleVec; use bytes::*; use rlp::*; use hashdb::DBValue; @@ -24,40 +25,21 @@ use hashdb::DBValue; pub type NodeKey = ElasticArray36; /// Type of node in the trie and essential information thereof. -#[derive(Eq, PartialEq, Debug)] -pub enum Node { +#[derive(Eq, PartialEq, Debug, Clone)] +pub enum Node<'a> { /// Null trie node; could be an empty root or an empty branch entry. Empty, /// Leaf node; has key slice and value. Value may not be empty. - Leaf(NodeKey, DBValue), + Leaf(NibbleSlice<'a>, &'a [u8]), /// Extension node; has key slice and node data. Data may not be null. - Extension(NodeKey, DBValue), + Extension(NibbleSlice<'a>, &'a [u8]), /// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data. - Branch([NodeKey; 16], Option) + Branch([&'a [u8]; 16], Option<&'a [u8]>) } -impl Clone for Node { - fn clone(&self) -> Node { - match *self { - Node::Empty => Node::Empty, - Node::Leaf(ref k, ref v) => Node::Leaf(k.clone(), v.clone()), - Node::Extension(ref k, ref v) => Node::Extension(k.clone(), v.clone()), - Node::Branch(ref k, ref v) => { - let mut branch = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), - NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), - NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()]; - for i in 0 .. 16 { - branch[i] = k[i].clone(); - } - Node::Branch(branch, v.clone()) - } - } - } -} - -impl Node { +impl<'a> Node<'a> { /// Decode the `node_rlp` and return the Node. - pub fn decoded(node_rlp: &[u8]) -> Node { + pub fn decoded(node_rlp: &'a [u8]) -> Self { let r = Rlp::new(node_rlp); match r.prototype() { // either leaf or extension - decode first item with NibbleSlice::??? @@ -66,18 +48,16 @@ impl Node { // if extension, second item is a node (either SHA3 to be looked up and // fed back into this function or inline RLP which can be fed back into this function). Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) { - (slice, true) => Node::Leaf(slice.encoded(true), DBValue::from_slice(r.at(1).data())), - (slice, false) => Node::Extension(slice.encoded(false), DBValue::from_slice(r.at(1).as_raw())), + (slice, true) => Node::Leaf(slice, r.at(1).data()), + (slice, false) => Node::Extension(slice, r.at(1).as_raw()), }, // branch - first 16 are nodes, 17th is a value (or empty). Prototype::List(17) => { - let mut nodes: [NodeKey; 16] = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), - NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), - NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()]; + let mut nodes = [&[] as &[u8]; 16]; for i in 0..16 { - nodes[i] = NodeKey::from_slice(r.at(i).as_raw()); + nodes[i] = r.at(i).as_raw(); } - Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(DBValue::from_slice(r.at(16).data())) }) + Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) }) }, // an empty branch index. Prototype::Data(0) => Node::Empty, @@ -94,23 +74,23 @@ impl Node { match *self { Node::Leaf(ref slice, ref value) => { let mut stream = RlpStream::new_list(2); - stream.append(&&**slice); - stream.append(&&**value); + stream.append(&&*slice.encoded(true)); + stream.append(value); stream.out() }, Node::Extension(ref slice, ref raw_rlp) => { let mut stream = RlpStream::new_list(2); - stream.append(&&**slice); - stream.append_raw(&&*raw_rlp, 1); + stream.append(&&*slice.encoded(false)); + stream.append_raw(raw_rlp, 1); stream.out() }, Node::Branch(ref nodes, ref value) => { let mut stream = RlpStream::new_list(17); for i in 0..16 { - stream.append_raw(&*nodes[i], 1); + stream.append_raw(nodes[i], 1); } match *value { - Some(ref n) => { stream.append(&&**n); }, + Some(ref n) => { stream.append(n); }, None => { stream.append_empty_data(); }, } stream.out() @@ -123,3 +103,64 @@ impl Node { } } } + +/// An owning node type. Useful for trie iterators. +#[derive(Debug, PartialEq, Eq)] +pub enum OwnedNode { + /// Empty trie node. + Empty, + /// Leaf node: partial key and value. + Leaf(NibbleVec, DBValue), + /// Extension node: partial key and child node. + Extension(NibbleVec, DBValue), + /// Branch node: 16 children and an optional value. + Branch([NodeKey; 16], Option), +} + +impl Clone for OwnedNode { + fn clone(&self) -> Self { + match *self { + OwnedNode::Empty => OwnedNode::Empty, + OwnedNode::Leaf(ref k, ref v) => OwnedNode::Leaf(k.clone(), v.clone()), + OwnedNode::Extension(ref k, ref c) => OwnedNode::Extension(k.clone(), c.clone()), + OwnedNode::Branch(ref c, ref v) => { + let mut children = [ + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + ]; + + for (owned, borrowed) in children.iter_mut().zip(c.iter()) { + *owned = borrowed.clone() + } + + OwnedNode::Branch(children, v.as_ref().cloned()) + } + } + } +} + +impl<'a> From> for OwnedNode { + fn from(node: Node<'a>) -> Self { + match node { + Node::Empty => OwnedNode::Empty, + Node::Leaf(k, v) => OwnedNode::Leaf(k.into(), DBValue::from_slice(v)), + Node::Extension(k, child) => OwnedNode::Extension(k.into(), DBValue::from_slice(child)), + Node::Branch(c, val) => { + let mut children = [ + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), + ]; + + for (owned, borrowed) in children.iter_mut().zip(c.iter()) { + *owned = NodeKey::from_slice(borrowed) + } + + OwnedNode::Branch(children, val.map(DBValue::from_slice)) + } + } + } +} diff --git a/util/src/trie/recorder.rs b/util/src/trie/recorder.rs index 7f98c20e5..868f4d27d 100644 --- a/util/src/trie/recorder.rs +++ b/util/src/trie/recorder.rs @@ -14,6 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . +//! Trie query recorder. + use sha3::Hashable; use {Bytes, H256}; @@ -30,63 +32,36 @@ pub struct Record { pub hash: H256, } -/// Trie node recorder. -/// -/// These are used to record which nodes are visited during a trie query. -/// Inline nodes are not to be recorded, as they are contained within their parent. -pub trait Recorder { - /// Record that the given node has been visited. - /// - /// The depth parameter is the depth of the visited node, with the root node having depth 0. - fn record(&mut self, hash: &H256, data: &[u8], depth: u32); - - /// Drain all accepted records from the recorder in ascending order by depth. - fn drain(&mut self) -> Vec where Self: Sized; -} - -/// A no-op trie recorder. This ignores everything which is thrown at it. -pub struct NoOp; - -impl Recorder for NoOp { - #[inline] - fn record(&mut self, _hash: &H256, _data: &[u8], _depth: u32) {} - - #[inline] - fn drain(&mut self) -> Vec { Vec::new() } -} - -/// A simple recorder. Does nothing fancy but fulfills the `Recorder` interface -/// properly. +/// Records trie nodes as they pass it. #[derive(Debug)] -pub struct BasicRecorder { +pub struct Recorder { nodes: Vec, min_depth: u32, } -impl Default for BasicRecorder { +impl Default for Recorder { fn default() -> Self { - BasicRecorder::new() + Recorder::new() } } -impl BasicRecorder { - /// Create a new `BasicRecorder` which records all given nodes. +impl Recorder { + /// Create a new `Recorder` which records all given nodes. #[inline] pub fn new() -> Self { - BasicRecorder::with_depth(0) + Recorder::with_depth(0) } - /// Create a `BasicRecorder` which only records nodes beyond a given depth. + /// Create a `Recorder` which only records nodes beyond a given depth. pub fn with_depth(depth: u32) -> Self { - BasicRecorder { + Recorder { nodes: Vec::new(), min_depth: depth, } } -} -impl Recorder for BasicRecorder { - fn record(&mut self, hash: &H256, data: &[u8], depth: u32) { + /// Record a visited node, given its hash, data, and depth. + pub fn record(&mut self, hash: &H256, data: &[u8], depth: u32) { debug_assert_eq!(data.sha3(), *hash); if depth >= self.min_depth { @@ -98,7 +73,8 @@ impl Recorder for BasicRecorder { } } - fn drain(&mut self) -> Vec { + /// Drain all visited records. + pub fn drain(&mut self) -> Vec { ::std::mem::replace(&mut self.nodes, Vec::new()) } } @@ -109,20 +85,9 @@ mod tests { use sha3::Hashable; use ::H256; - #[test] - fn no_op_does_nothing() { - let mut no_op = NoOp; - let (node1, node2) = (&[1], &[2]); - let (hash1, hash2) = (node1.sha3(), node2.sha3()); - no_op.record(&hash1, node1, 1); - no_op.record(&hash2, node2, 2); - - assert_eq!(no_op.drain(), Vec::new()); - } - #[test] fn basic_recorder() { - let mut basic = BasicRecorder::new(); + let mut basic = Recorder::new(); let node1 = vec![1, 2, 3, 4]; let node2 = vec![4, 5, 6, 7, 8, 9, 10]; @@ -148,7 +113,7 @@ mod tests { #[test] fn basic_recorder_min_depth() { - let mut basic = BasicRecorder::with_depth(400); + let mut basic = Recorder::with_depth(400); let node1 = vec![1, 2, 3, 4]; let node2 = vec![4, 5, 6, 7, 8, 9, 10]; @@ -192,9 +157,9 @@ mod tests { } let trie = TrieDB::new(&db, &root).unwrap(); - let mut recorder = BasicRecorder::new(); + let mut recorder = Recorder::new(); - trie.get_recorded(b"pirate", &mut recorder).unwrap().unwrap(); + trie.get_with(b"pirate", &mut recorder).unwrap().unwrap(); let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); assert_eq!(nodes, vec![ @@ -213,7 +178,7 @@ mod tests { ] ]); - trie.get_recorded(b"letter", &mut recorder).unwrap().unwrap(); + trie.get_with(b"letter", &mut recorder).unwrap().unwrap(); let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); assert_eq!(nodes, vec![ diff --git a/util/src/trie/sectriedb.rs b/util/src/trie/sectriedb.rs index e3e6bf90d..9b4f68e73 100644 --- a/util/src/trie/sectriedb.rs +++ b/util/src/trie/sectriedb.rs @@ -16,9 +16,9 @@ use hash::H256; use sha3::Hashable; -use hashdb::{HashDB, DBValue}; +use hashdb::HashDB; use super::triedb::TrieDB; -use super::{Trie, TrieItem, Recorder, TrieIterator}; +use super::{Trie, TrieItem, TrieIterator, Query}; /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// @@ -59,16 +59,17 @@ impl<'db> Trie for SecTrieDB<'db> { self.raw.contains(&key.sha3()) } - fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result> - where 'a: 'b, R: Recorder + fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result> + where 'a: 'key { - self.raw.get_recorded(&key.sha3(), rec) + self.raw.get_with(&key.sha3(), query) } } #[test] fn trie_to_sectrie() { use memorydb::MemoryDB; + use hashdb::DBValue; use super::triedbmut::TrieDBMut; use super::super::TrieMut; diff --git a/util/src/trie/triedb.rs b/util/src/trie/triedb.rs index 3508908e7..e23eb1788 100644 --- a/util/src/trie/triedb.rs +++ b/util/src/trie/triedb.rs @@ -18,16 +18,14 @@ use common::*; use hashdb::*; use nibbleslice::*; use rlp::*; -use super::node::Node; -use super::recorder::{Recorder, NoOp}; -use super::{Trie, TrieItem, TrieError, TrieIterator}; +use super::node::{Node, OwnedNode}; +use super::lookup::Lookup; +use super::{Trie, TrieItem, TrieError, TrieIterator, Query}; /// A `Trie` implementation using a generic `HashDB` backing database. /// -/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object, `keys` -/// to get the keys belonging to the trie in the backing database, and `db_items_remaining()` to get -/// which items in the backing database do not belong to this trie. If this is the only trie in the -/// backing database, then `db_items_remaining()` should be empty. +/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object. +/// Use `get` and `contains` to query values associated with keys in the trie. /// /// # Example /// ``` @@ -45,7 +43,6 @@ use super::{Trie, TrieItem, TrieError, TrieIterator}; /// let t = TrieDB::new(&memdb, &root).unwrap(); /// assert!(t.contains(b"foo").unwrap()); /// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar")); -/// assert!(t.db_items_remaining().unwrap().is_empty()); /// } /// ``` pub struct TrieDB<'db> { @@ -76,74 +73,12 @@ impl<'db> TrieDB<'db> { self.db } - /// Determine all the keys in the backing database that belong to the trie. - pub fn keys(&self) -> super::Result> { - let mut ret: Vec = Vec::new(); - ret.push(self.root.clone()); - self.accumulate_keys(self.root_node(&mut NoOp)?, &mut ret)?; - Ok(ret) - } - - /// Convert a vector of hashes to a hashmap of hash to occurrences. - pub fn to_map(hashes: Vec) -> HashMap { - let mut r: HashMap = HashMap::new(); - for h in hashes { - *r.entry(h).or_insert(0) += 1; - } - r - } - - /// Determine occurrences of items in the backing database which are not related to this - /// trie. - pub fn db_items_remaining(&self) -> super::Result> { - let mut ret = self.db.keys(); - for (k, v) in Self::to_map(self.keys()?) { - let keycount = *ret.get(&k).unwrap_or(&0); - match keycount <= v as i32 { - true => ret.remove(&k), - _ => ret.insert(k, keycount - v as i32), - }; - } - Ok(ret) - } - - /// Recursion helper for `keys`. - fn accumulate_keys(&self, node: Node, acc: &mut Vec) -> super::Result<()> { - let mut handle_payload = |payload| { - let p = Rlp::new(payload); - if p.is_data() && p.size() == 32 { - acc.push(p.as_val()); - } - - self.accumulate_keys(self.get_node(payload, &mut NoOp, 0)?, acc) - }; - - match node { - Node::Extension(_, ref payload) => handle_payload(payload)?, - Node::Branch(ref payloads, _) => for payload in payloads { handle_payload(payload)? }, - _ => {}, - } - - Ok(()) - } - - /// Get the root node's RLP. - fn root_node(&self, r: &mut R) -> super::Result { - self.root_data(r).map(|d| Node::decoded(&d)) - } - /// Get the data of the root node. - fn root_data(&self, r: &mut R) -> super::Result { + fn root_data(&self) -> super::Result { self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root))) - .map(|node| { r.record(self.root, &*node, 0); node }) } - /// Get the root node as a `Node`. - fn get_node<'a, R: 'a + Recorder>(&'db self, node: &'db [u8], r: &'a mut R, depth: u32) -> super::Result { - self.get_raw_or_lookup(node, r, depth).map(|n| Node::decoded(&n)) - } - - /// Indentation helper for `formal_all`. + /// Indentation helper for `format_all`. fn fmt_indent(&self, f: &mut fmt::Formatter, size: usize) -> fmt::Result { for _ in 0..size { write!(f, " ")?; @@ -157,8 +92,8 @@ impl<'db> TrieDB<'db> { Node::Leaf(slice, value) => writeln!(f, "'{:?}: {:?}.", slice, value.pretty())?, Node::Extension(ref slice, ref item) => { write!(f, "'{:?} ", slice)?; - if let Ok(node) = self.get_node(&*item, &mut NoOp, 0) { - self.fmt_all(node, f, deepness)?; + if let Ok(node) = self.get_raw_or_lookup(&*item) { + self.fmt_all(Node::decoded(&node), f, deepness)?; } }, Node::Branch(ref nodes, ref value) => { @@ -168,7 +103,8 @@ impl<'db> TrieDB<'db> { writeln!(f, "=: {:?}", v.pretty())? } for i in 0..16 { - match self.get_node(&*nodes[i], &mut NoOp, 0) { + let node = self.get_raw_or_lookup(&*nodes[i]); + match node.as_ref().map(|n| Node::decoded(&*n)) { Ok(Node::Empty) => {}, Ok(n) => { self.fmt_indent(f, deepness + 1)?; @@ -189,64 +125,49 @@ impl<'db> TrieDB<'db> { Ok(()) } - /// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists. - fn do_lookup<'key, R: 'key>(&'db self, key: &NibbleSlice<'key>, r: &'key mut R) -> super::Result> - where 'db: 'key, R: Recorder - { - let root_rlp = self.root_data(r)?; - self.get_from_node(&root_rlp, key, r, 1) - } - - /// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no - /// value exists for the key. - /// - /// Note: Not a public API; use Trie trait functions. - fn get_from_node<'key, R: 'key>( - &'db self, - node: &'db [u8], - key: &NibbleSlice<'key>, - r: &'key mut R, - d: u32 - ) -> super::Result> where 'db: 'key, R: Recorder { - match Node::decoded(node) { - Node::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == *key => Ok(Some(value.clone())), - Node::Extension(ref slice, ref item) => { - let slice = &NibbleSlice::from_encoded(slice).0; - if key.starts_with(slice) { - let data = self.get_raw_or_lookup(&*item, r, d)?; - self.get_from_node(&data, &key.mid(slice.len()), r, d + 1) - } else { - Ok(None) - } - }, - Node::Branch(ref nodes, ref value) => match key.is_empty() { - true => Ok(value.clone()), - false => { - let node = self.get_raw_or_lookup(&*nodes[key.at(0) as usize], r, d)?; - self.get_from_node(&node, &key.mid(1), r, d + 1) - } - }, - _ => Ok(None) - } - } - /// Given some node-describing data `node`, return the actual node RLP. /// This could be a simple identity operation in the case that the node is sufficiently small, but /// may require a database lookup. - fn get_raw_or_lookup(&'db self, node: &'db [u8], rec: &mut R, d: u32) -> super::Result { + fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> super::Result { // check if its sha3 + len let r = Rlp::new(node); match r.is_data() && r.size() == 32 { true => { let key = r.as_val::(); self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key))) - .map(|raw| { rec.record(&key, &raw, d); raw }) } false => Ok(DBValue::from_slice(node)) } } } +impl<'db> Trie for TrieDB<'db> { + fn iter<'a>(&'a self) -> super::Result + 'a>> { + TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>) + } + + fn root(&self) -> &H256 { self.root } + + fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result> + where 'a: 'key + { + Lookup { + db: self.db, + query: query, + hash: self.root.clone(), + }.look_up(NibbleSlice::new(key)) + } +} + +impl<'db> fmt::Debug for TrieDB<'db> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f, "c={:?} [", self.hash_count)?; + let root_rlp = self.db.get(self.root).expect("Trie root not found!"); + self.fmt_all(Node::decoded(&root_rlp), f, 0)?; + writeln!(f, "]") + } +} + #[derive(Clone, Eq, PartialEq)] enum Status { Entering, @@ -257,7 +178,7 @@ enum Status { #[derive(Clone, Eq, PartialEq)] struct Crumb { - node: Node, + node: OwnedNode, status: Status, } @@ -265,10 +186,10 @@ impl Crumb { /// Move on to next status in the node's sequence. fn increment(&mut self) { self.status = match (&self.status, &self.node) { - (_, &Node::Empty) => Status::Exiting, + (_, &OwnedNode::Empty) => Status::Exiting, (&Status::Entering, _) => Status::At, - (&Status::At, &Node::Branch(_, _)) => Status::AtChild(0), - (&Status::AtChild(x), &Node::Branch(_, _)) if x < 15 => Status::AtChild(x + 1), + (&Status::At, &OwnedNode::Branch(_, _)) => Status::AtChild(0), + (&Status::AtChild(x), &OwnedNode::Branch(_, _)) if x < 15 => Status::AtChild(x + 1), _ => Status::Exiting, } } @@ -291,41 +212,40 @@ impl<'a> TrieDBIterator<'a> { key_nibbles: Vec::new(), }; - db.root_data(&mut NoOp).and_then(|root| r.descend(&root))?; + db.root_data().and_then(|root| r.descend(&root))?; Ok(r) } - fn seek_descend<'key> ( &mut self, node: &[u8], key: &NibbleSlice<'key>, d: u32) -> super::Result<()> { - match Node::decoded(node) { + fn seek_descend<'key>(&mut self, node_data: DBValue, key: &NibbleSlice<'key>) -> super::Result<()> { + let node = Node::decoded(&node_data); + match node { Node::Leaf(ref slice, _) => { - let slice = &NibbleSlice::from_encoded(slice).0; if slice == key { self.trail.push(Crumb { status: Status::At, - node: Node::decoded(node), + node: node.clone().into(), }); } else { self.trail.push(Crumb { status: Status::Exiting, - node: Node::decoded(node), + node: node.clone().into(), }); } + self.key_nibbles.extend(slice.iter()); Ok(()) }, Node::Extension(ref slice, ref item) => { - let slice = &NibbleSlice::from_encoded(slice).0; if key.starts_with(slice) { - let mut r = NoOp; self.trail.push(Crumb { status: Status::At, - node: Node::decoded(node), + node: node.clone().into(), }); self.key_nibbles.extend(slice.iter()); - let data = self.db.get_raw_or_lookup(&*item, &mut r, d)?; - self.seek_descend(&data, &key.mid(slice.len()), d + 1) + let data = self.db.get_raw_or_lookup(&*item)?; + self.seek_descend(data, &key.mid(slice.len())) } else { - self.descend(node)?; + self.descend(&node_data)?; Ok(()) } }, @@ -333,20 +253,19 @@ impl<'a> TrieDBIterator<'a> { true => { self.trail.push(Crumb { status: Status::At, - node: Node::decoded(node), + node: node.clone().into(), }); Ok(()) }, false => { - let mut r = NoOp; let i = key.at(0); self.trail.push(Crumb { status: Status::AtChild(i as usize), - node: Node::decoded(node), + node: node.clone().into(), }); self.key_nibbles.push(i); - let child = self.db.get_raw_or_lookup(&*nodes[i as usize], &mut r, d)?; - self.seek_descend(&child, &key.mid(1), d + 1) + let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?; + self.seek_descend(child, &key.mid(1)) } }, _ => Ok(()) @@ -357,10 +276,12 @@ impl<'a> TrieDBIterator<'a> { fn descend(&mut self, d: &[u8]) -> super::Result<()> { self.trail.push(Crumb { status: Status::Entering, - node: self.db.get_node(d, &mut NoOp, 0)?, + node: Node::decoded(&self.db.get_raw_or_lookup(d)?).into(), }); - match self.trail.last().expect("just pushed item; qed").node { - Node::Leaf(ref n, _) | Node::Extension(ref n, _) => { self.key_nibbles.extend(NibbleSlice::from_encoded(n).0.iter()); }, + match &self.trail.last().expect("just pushed item; qed").node { + &OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => { + self.key_nibbles.extend((0..n.len()).map(|i| n.at(i))); + }, _ => {} } @@ -379,9 +300,8 @@ impl<'a> TrieIterator for TrieDBIterator<'a> { fn seek(&mut self, key: &[u8]) -> super::Result<()> { self.trail.clear(); self.key_nibbles.clear(); - let mut r = NoOp; - let root_rlp = self.db.root_data(&mut r)?; - self.seek_descend(&root_rlp, &NibbleSlice::new(key), 1) + let root_rlp = self.db.root_data()?; + self.seek_descend(root_rlp, &NibbleSlice::new(key)) } } @@ -397,27 +317,27 @@ impl<'a> Iterator for TrieDBIterator<'a> { match (b.status, b.node) { (Status::Exiting, n) => { match n { - Node::Leaf(n, _) | Node::Extension(n, _) => { + OwnedNode::Leaf(n, _) | OwnedNode::Extension(n, _) => { let l = self.key_nibbles.len(); - self.key_nibbles.truncate(l - NibbleSlice::from_encoded(&*n).0.len()); + self.key_nibbles.truncate(l - n.len()); }, - Node::Branch(_, _) => { self.key_nibbles.pop(); }, + OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); }, _ => {} } self.trail.pop(); // continue }, - (Status::At, Node::Leaf(_, v)) | (Status::At, Node::Branch(_, Some(v))) => { + (Status::At, OwnedNode::Leaf(_, v)) | (Status::At, OwnedNode::Branch(_, Some(v))) => { return Some(Ok((self.key(), v))); }, - (Status::At, Node::Extension(_, d)) => { + (Status::At, OwnedNode::Extension(_, d)) => { if let Err(e) = self.descend(&*d) { return Some(Err(e)); } // continue }, - (Status::At, Node::Branch(_, _)) => {}, - (Status::AtChild(i), Node::Branch(ref children, _)) if children[i].len() > 0 => { + (Status::At, OwnedNode::Branch(_, _)) => {}, + (Status::AtChild(i), OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => { match i { 0 => self.key_nibbles.push(0), i => *self.key_nibbles.last_mut() @@ -428,7 +348,7 @@ impl<'a> Iterator for TrieDBIterator<'a> { } // continue }, - (Status::AtChild(i), Node::Branch(_, _)) => { + (Status::AtChild(i), OwnedNode::Branch(_, _)) => { if i == 0 { self.key_nibbles.push(0); } @@ -440,29 +360,6 @@ impl<'a> Iterator for TrieDBIterator<'a> { } } -impl<'db> Trie for TrieDB<'db> { - fn iter<'a>(&'a self) -> super::Result + 'a>> { - TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>) - } - - fn root(&self) -> &H256 { self.root } - - fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result> - where 'a: 'b, R: Recorder - { - self.do_lookup(&NibbleSlice::new(key), rec) - } -} - -impl<'db> fmt::Debug for TrieDB<'db> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - writeln!(f, "c={:?} [", self.hash_count)?; - let root_rlp = self.db.get(self.root).expect("Trie root not found!"); - self.fmt_all(Node::decoded(&root_rlp), f, 0)?; - writeln!(f, "]") - } -} - #[test] fn iterator() { use memorydb::*; @@ -529,3 +426,23 @@ fn iterator_seek() { iter.seek(b"C").unwrap(); assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::>()[..]); } + +#[test] +fn get_len() { + use memorydb::*; + use super::TrieMut; + use super::triedbmut::*; + + let mut memdb = MemoryDB::new(); + let mut root = H256::new(); + { + let mut t = TrieDBMut::new(&mut memdb, &mut root); + t.insert(b"A", b"ABC").unwrap(); + t.insert(b"B", b"ABCBA").unwrap(); + } + + let t = TrieDB::new(&memdb, &root).unwrap(); + assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()), Ok(Some(3))); + assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()), Ok(Some(5))); + assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()), Ok(None)); +} diff --git a/util/src/trie/triedbmut.rs b/util/src/trie/triedbmut.rs index 90d5867dd..0563adc1f 100644 --- a/util/src/trie/triedbmut.rs +++ b/util/src/trie/triedbmut.rs @@ -17,6 +17,7 @@ //! In-memory trie representation. use super::{TrieError, TrieMut}; +use super::lookup::Lookup; use super::node::Node as RlpNode; use super::node::NodeKey; @@ -100,22 +101,22 @@ impl Node { fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self { match RlpNode::decoded(rlp) { RlpNode::Empty => Node::Empty, - RlpNode::Leaf(k, v) => Node::Leaf(k, v), + RlpNode::Leaf(k, v) => Node::Leaf(k.encoded(true), DBValue::from_slice(&v)), RlpNode::Extension(key, cb) => { - Node::Extension(key, Self::inline_or_hash(&*cb, db, storage)) + Node::Extension(key.encoded(false), Self::inline_or_hash(cb, db, storage)) } RlpNode::Branch(children_rlp, val) => { let mut children = empty_children(); for i in 0..16 { - let raw = &children_rlp[i]; - let child_rlp = Rlp::new(&*raw); + let raw = children_rlp[i]; + let child_rlp = Rlp::new(raw); if !child_rlp.is_empty() { - children[i] = Some(Self::inline_or_hash(&*raw, db, storage)); + children[i] = Some(Self::inline_or_hash(raw, db, storage)); } } - Node::Branch(children, val) + Node::Branch(children, val.map(DBValue::from_slice)) } } } @@ -370,7 +371,11 @@ impl<'a> TrieDBMut<'a> { where 'x: 'key { match *handle { - NodeHandle::Hash(ref hash) => self.do_db_lookup(hash, partial), + NodeHandle::Hash(ref hash) => Lookup { + db: &*self.db, + query: DBValue::from_slice, + hash: hash.clone(), + }.look_up(partial), NodeHandle::InMemory(ref handle) => match self.storage[handle] { Node::Empty => Ok(None), Node::Leaf(ref key, ref value) => { @@ -403,54 +408,6 @@ impl<'a> TrieDBMut<'a> { } } - /// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists. - fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> super::Result> - where 'x: 'key - { - self.db.get(hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(*hash))) - .and_then(|node_rlp| self.get_from_db_node(&node_rlp, key)) - } - - /// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no - /// value exists for the key. - /// - /// Note: Not a public API; use Trie trait functions. - fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> super::Result> - where 'x: 'key - { - match RlpNode::decoded(node) { - RlpNode::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == key => Ok(Some(value.clone())), - RlpNode::Extension(ref slice, ref item) => { - let slice = &NibbleSlice::from_encoded(slice).0; - if key.starts_with(slice) { - self.get_from_db_node(&self.get_raw_or_lookup(&*item)?, key.mid(slice.len())) - } else { - Ok(None) - } - }, - RlpNode::Branch(ref nodes, ref value) => match key.is_empty() { - true => Ok(value.clone()), - false => self.get_from_db_node(&self.get_raw_or_lookup(&*nodes[key.at(0) as usize])?, key.mid(1)) - }, - _ => Ok(None), - } - } - - /// Given some node-describing data `node`, return the actual node RLP. - /// This could be a simple identity operation in the case that the node is sufficiently small, but - /// may require a database lookup. - fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> super::Result { - // check if its sha3 + len - let r = Rlp::new(node); - match r.is_data() && r.size() == 32 { - true => { - let key = r.as_val::(); - self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key))) - } - false => Ok(DBValue::from_slice(node)) - } - } - /// insert a key, value pair into the trie, creating new nodes if necessary. fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option) -> super::Result<(StorageHandle, bool)>