merge master into jr-reverse-caching

This commit is contained in:
Jannis R 2017-01-09 12:50:26 +01:00
commit 92742e663a
No known key found for this signature in database
GPG Key ID: 0FE83946296A88A5
68 changed files with 1067 additions and 721 deletions

View File

@ -38,7 +38,8 @@ linux-stable:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
tags: tags:
- rust - rust
- rust-stable - rust-stable
@ -106,7 +107,8 @@ linux-centos:
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5 - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-centos - rust-centos
@ -144,7 +146,8 @@ linux-i686:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-i686 - rust-i686
@ -189,7 +192,8 @@ linux-armv7:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -235,6 +239,7 @@ linux-arm:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -272,7 +277,8 @@ linux-armv6:
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -316,7 +322,8 @@ linux-aarch64:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -352,7 +359,8 @@ darwin:
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg"
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5" - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5" --body "parity-"$VER"-osx-installer-EXPERIMENTAL.pkg.md5"
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
tags: tags:
- osx - osx
artifacts: artifacts:
@ -413,7 +421,8 @@ windows:
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5 - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5 - aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://icarus.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM% - curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1338/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
tags: tags:
- rust-windows - rust-windows
artifacts: artifacts:
@ -526,6 +535,7 @@ push-release:
- triggers - triggers
image: ethcore/rust:stable image: ethcore/rust:stable
script: script:
- curl --data "secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF - curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
- curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
tags: tags:
- curl - curl

2
Cargo.lock generated
View File

@ -1501,7 +1501,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#a74caf6d8fe4b3371b291fb47f15c043504ef738" source = "git+https://github.com/ethcore/js-precompiled.git#fbc7864393ebbc78ea8f7bc4729f2ac3bdcb9a0e"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -17,7 +17,7 @@
use std::io; use std::io;
use std::io::Read; use std::io::Read;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use page::{LocalPageEndpoint, PageCache}; use page::{LocalPageEndpoint, PageCache};
use endpoint::{Endpoints, EndpointInfo}; use endpoint::{Endpoints, EndpointInfo};
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest};
@ -28,10 +28,79 @@ struct LocalDapp {
info: EndpointInfo, info: EndpointInfo,
} }
fn local_dapps(dapps_path: String) -> Vec<LocalDapp> { /// Tries to find and read manifest file in given `path` to extract `EndpointInfo`
let files = fs::read_dir(dapps_path.as_str()); /// If manifest is not found sensible default `EndpointInfo` is returned based on given `name`.
fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo {
path.push(MANIFEST_FILENAME);
fs::File::open(path.clone())
.map_err(|e| format!("{:?}", e))
.and_then(|mut f| {
// Reat file
let mut s = String::new();
f.read_to_string(&mut s).map_err(|e| format!("{:?}", e))?;
// Try to deserialize manifest
deserialize_manifest(s)
})
.map(Into::into)
.unwrap_or_else(|e| {
warn!(target: "dapps", "Cannot read manifest file at: {:?}. Error: {:?}", path, e);
EndpointInfo {
name: name.into(),
description: name.into(),
version: "0.0.0".into(),
author: "?".into(),
icon_url: "icon.png".into(),
}
})
}
/// Returns Dapp Id and Local Dapp Endpoint for given filesystem path.
/// Parses the path to extract last component (for name).
/// `None` is returned when path is invalid or non-existent.
pub fn local_endpoint<P: AsRef<Path>>(path: P, signer_address: Option<(String, u16)>) -> Option<(String, Box<LocalPageEndpoint>)> {
let path = path.as_ref().to_owned();
path.canonicalize().ok().and_then(|path| {
let name = path.file_name().and_then(|name| name.to_str());
name.map(|name| {
let dapp = local_dapp(name.into(), path.clone());
(dapp.id, Box::new(LocalPageEndpoint::new(
dapp.path, dapp.info, PageCache::Disabled, signer_address.clone())
))
})
})
}
fn local_dapp(name: String, path: PathBuf) -> LocalDapp {
// try to get manifest file
let info = read_manifest(&name, path.clone());
LocalDapp {
id: name,
path: path,
info: info,
}
}
/// Returns endpoints for Local Dapps found for given filesystem path.
/// Scans the directory and collects `LocalPageEndpoints`.
pub fn local_endpoints<P: AsRef<Path>>(dapps_path: P, signer_address: Option<(String, u16)>) -> Endpoints {
let mut pages = Endpoints::new();
for dapp in local_dapps(dapps_path.as_ref()) {
pages.insert(
dapp.id,
Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, signer_address.clone()))
);
}
pages
}
fn local_dapps(dapps_path: &Path) -> Vec<LocalDapp> {
let files = fs::read_dir(dapps_path);
if let Err(e) = files { if let Err(e) = files {
warn!(target: "dapps", "Unable to load local dapps from: {}. Reason: {:?}", dapps_path, e); warn!(target: "dapps", "Unable to load local dapps from: {}. Reason: {:?}", dapps_path.display(), e);
return vec![]; return vec![];
} }
@ -59,51 +128,6 @@ fn local_dapps(dapps_path: String) -> Vec<LocalDapp> {
} }
m.ok() m.ok()
}) })
.map(|(name, path)| { .map(|(name, path)| local_dapp(name, path))
// try to get manifest file
let info = read_manifest(&name, path.clone());
LocalDapp {
id: name,
path: path,
info: info,
}
})
.collect() .collect()
} }
fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo {
path.push(MANIFEST_FILENAME);
fs::File::open(path.clone())
.map_err(|e| format!("{:?}", e))
.and_then(|mut f| {
// Reat file
let mut s = String::new();
f.read_to_string(&mut s).map_err(|e| format!("{:?}", e))?;
// Try to deserialize manifest
deserialize_manifest(s)
})
.map(Into::into)
.unwrap_or_else(|e| {
warn!(target: "dapps", "Cannot read manifest file at: {:?}. Error: {:?}", path, e);
EndpointInfo {
name: name.into(),
description: name.into(),
version: "0.0.0".into(),
author: "?".into(),
icon_url: "icon.png".into(),
}
})
}
pub fn local_endpoints(dapps_path: String, signer_address: Option<(String, u16)>) -> Endpoints {
let mut pages = Endpoints::new();
for dapp in local_dapps(dapps_path) {
pages.insert(
dapp.id,
Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, signer_address.clone()))
);
}
pages
}

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use endpoint::{Endpoints, Endpoint}; use endpoint::{Endpoints, Endpoint};
use page::PageEndpoint; use page::PageEndpoint;
@ -43,7 +44,8 @@ pub fn utils() -> Box<Endpoint> {
} }
pub fn all_endpoints<F: Fetch>( pub fn all_endpoints<F: Fetch>(
dapps_path: String, dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
web_proxy_tokens: Arc<WebProxyTokens>, web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote, remote: Remote,
@ -51,6 +53,13 @@ pub fn all_endpoints<F: Fetch>(
) -> Endpoints { ) -> Endpoints {
// fetch fs dapps at first to avoid overwriting builtins // fetch fs dapps at first to avoid overwriting builtins
let mut pages = fs::local_endpoints(dapps_path, signer_address.clone()); let mut pages = fs::local_endpoints(dapps_path, signer_address.clone());
for path in extra_dapps {
if let Some((id, endpoint)) = fs::local_endpoint(path.clone(), signer_address.clone()) {
pages.insert(id, endpoint);
} else {
warn!(target: "dapps", "Ignoring invalid dapp at {}", path.display());
}
}
// NOTE [ToDr] Dapps will be currently embeded on 8180 // NOTE [ToDr] Dapps will be currently embeded on 8180
insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(signer_address.clone())); insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(signer_address.clone()));

View File

@ -88,6 +88,7 @@ mod web;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::net::SocketAddr; use std::net::SocketAddr;
use std::collections::HashMap; use std::collections::HashMap;
@ -123,7 +124,8 @@ impl<F> WebProxyTokens for F where F: Fn(String) -> bool + Send + Sync {
/// Webapps HTTP+RPC server build. /// Webapps HTTP+RPC server build.
pub struct ServerBuilder<T: Fetch = FetchClient> { pub struct ServerBuilder<T: Fetch = FetchClient> {
dapps_path: String, dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
@ -141,9 +143,10 @@ impl<T: Fetch> Extendable for ServerBuilder<T> {
impl ServerBuilder { impl ServerBuilder {
/// Construct new dapps server /// Construct new dapps server
pub fn new(dapps_path: String, registrar: Arc<ContractClient>, remote: Remote) -> Self { pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
ServerBuilder { ServerBuilder {
dapps_path: dapps_path, dapps_path: dapps_path.as_ref().to_owned(),
extra_dapps: vec![],
handler: Arc::new(IoHandler::new()), handler: Arc::new(IoHandler::new()),
registrar: registrar, registrar: registrar,
sync_status: Arc::new(|| false), sync_status: Arc::new(|| false),
@ -160,6 +163,7 @@ impl<T: Fetch> ServerBuilder<T> {
pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> { pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> {
ServerBuilder { ServerBuilder {
dapps_path: self.dapps_path, dapps_path: self.dapps_path,
extra_dapps: vec![],
handler: self.handler, handler: self.handler,
registrar: self.registrar, registrar: self.registrar,
sync_status: self.sync_status, sync_status: self.sync_status,
@ -188,6 +192,12 @@ impl<T: Fetch> ServerBuilder<T> {
self self
} }
/// Change extra dapps paths (apart from `dapps_path`)
pub fn extra_dapps<P: AsRef<Path>>(mut self, extra_dapps: &[P]) -> Self {
self.extra_dapps = extra_dapps.iter().map(|p| p.as_ref().to_owned()).collect();
self
}
/// Asynchronously start server with no authentication, /// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error. /// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(self, addr: &SocketAddr, hosts: Option<Vec<String>>) -> Result<Server, ServerError> { pub fn start_unsecured_http(self, addr: &SocketAddr, hosts: Option<Vec<String>>) -> Result<Server, ServerError> {
@ -197,6 +207,7 @@ impl<T: Fetch> ServerBuilder<T> {
NoAuth, NoAuth,
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.extra_dapps.clone(),
self.signer_address.clone(), self.signer_address.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
@ -215,6 +226,7 @@ impl<T: Fetch> ServerBuilder<T> {
HttpBasicAuth::single_user(username, password), HttpBasicAuth::single_user(username, password),
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.extra_dapps.clone(),
self.signer_address.clone(), self.signer_address.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
@ -270,7 +282,8 @@ impl Server {
hosts: Option<Vec<String>>, hosts: Option<Vec<String>>,
authorization: A, authorization: A,
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
dapps_path: String, dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
@ -287,7 +300,14 @@ impl Server {
remote.clone(), remote.clone(),
fetch.clone(), fetch.clone(),
)); ));
let endpoints = Arc::new(apps::all_endpoints(dapps_path, signer_address.clone(), web_proxy_tokens, remote.clone(), fetch.clone())); let endpoints = Arc::new(apps::all_endpoints(
dapps_path,
extra_dapps,
signer_address.clone(),
web_proxy_tokens,
remote.clone(),
fetch.clone(),
));
let cors_domains = Self::cors_domains(signer_address.clone()); let cors_domains = Self::cors_domains(signer_address.clone());
let special = Arc::new({ let special = Arc::new({

View File

@ -51,7 +51,7 @@ pub fn init_server<F, B>(hosts: Option<Vec<String>>, process: F, remote: Remote)
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let server = process(ServerBuilder::new( let server = process(ServerBuilder::new(
dapps_path.to_str().unwrap().into(), registrar.clone(), remote, &dapps_path, registrar.clone(), remote,
)) ))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(); .start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap();
@ -66,7 +66,7 @@ pub fn serve_with_auth(user: &str, pass: &str) -> Server {
let registrar = Arc::new(FakeRegistrar::new()); let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone(), Remote::new_sync()) ServerBuilder::new(&dapps_path, registrar.clone(), Remote::new_sync())
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap() .start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap()
} }

View File

@ -178,8 +178,8 @@ impl Account {
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \ SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail."); using it will not fail.");
let item: U256 = match db.get(key){ let item: U256 = match db.get_with(key, ::rlp::decode) {
Ok(x) => x.map_or_else(U256::zero, |v| decode(&*v)), Ok(x) => x.unwrap_or_else(U256::zero),
Err(e) => panic!("Encountered potential DB corruption: {}", e), Err(e) => panic!("Encountered potential DB corruption: {}", e),
}; };
let value: H256 = item.into(); let value: H256 = item.into();
@ -453,12 +453,12 @@ impl Account {
/// omitted. /// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> { pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB}; use util::trie::{Trie, TrieDB};
use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder}; use util::trie::recorder::Recorder;
let mut recorder = TrieRecorder::with_depth(from_level); let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(db, &self.storage_root)?; let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_recorded(&storage_key, &mut recorder)?; let _ = trie.get_with(&storage_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect()) Ok(recorder.drain().into_iter().map(|r| r.data).collect())
} }

View File

@ -32,7 +32,7 @@ use state_db::StateDB;
use util::*; use util::*;
use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder}; use util::trie::recorder::Recorder;
mod account; mod account;
mod substate; mod substate;
@ -425,8 +425,8 @@ impl State {
// account is not found in the global cache, get from the DB and insert into local // account is not found in the global cache, get from the DB and insert into local
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(address) { let maybe_acc = match db.get_with(address, Account::from_rlp) {
Ok(acc) => acc.map(|v| Account::from_rlp(&v)), Ok(acc) => acc,
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
}; };
let r = maybe_acc.as_ref().map_or(H256::new(), |a| { let r = maybe_acc.as_ref().map_or(H256::new(), |a| {
@ -690,8 +690,8 @@ impl State {
// not found in the global cache, get from the DB and insert into local // not found in the global cache, get from the DB and insert into local
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let mut maybe_acc = match db.get(a) { let mut maybe_acc = match db.get_with(a, Account::from_rlp) {
Ok(acc) => acc.map(|v| Account::from_rlp(&v)), Ok(acc) => acc,
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
}; };
if let Some(ref mut account) = maybe_acc.as_mut() { if let Some(ref mut account) = maybe_acc.as_mut() {
@ -722,9 +722,8 @@ impl State {
None => { None => {
let maybe_acc = if self.db.check_non_null_bloom(a) { let maybe_acc = if self.db.check_non_null_bloom(a) {
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
match db.get(a) { match db.get_with(a, Account::from_rlp) {
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))), Ok(acc) => AccountEntry::new_clean(acc),
Ok(None) => AccountEntry::new_clean(None),
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
} }
} else { } else {
@ -770,9 +769,9 @@ impl State {
/// Requires a secure trie to be used for accurate results. /// Requires a secure trie to be used for accurate results.
/// `account_key` == sha3(address) /// `account_key` == sha3(address)
pub fn prove_account(&self, account_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> { pub fn prove_account(&self, account_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
let mut recorder = TrieRecorder::with_depth(from_level); let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
let _ = trie.get_recorded(&account_key, &mut recorder)?; trie.get_with(&account_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect()) Ok(recorder.drain().into_iter().map(|r| r.data).collect())
} }
@ -786,8 +785,8 @@ impl State {
// TODO: probably could look into cache somehow but it's keyed by // TODO: probably could look into cache somehow but it's keyed by
// address, not sha3(address). // address, not sha3(address).
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
let acc = match trie.get(&account_key)? { let acc = match trie.get_with(&account_key, Account::from_rlp)? {
Some(rlp) => Account::from_rlp(&rlp), Some(acc) => acc,
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
}; };
@ -799,8 +798,8 @@ impl State {
/// Only works when backed by a secure trie. /// Only works when backed by a secure trie.
pub fn code_by_address_hash(&self, account_key: H256) -> Result<Option<Bytes>, Box<TrieError>> { pub fn code_by_address_hash(&self, account_key: H256) -> Result<Option<Bytes>, Box<TrieError>> {
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
let mut acc = match trie.get(&account_key)? { let mut acc = match trie.get_with(&account_key, Account::from_rlp)? {
Some(rlp) => Account::from_rlp(&rlp), Some(acc) => acc,
None => return Ok(None), None => return Ok(None),
}; };

View File

@ -1,6 +1,6 @@
{ {
"name": "parity.js", "name": "parity.js",
"version": "0.2.173", "version": "0.2.178",
"main": "release/index.js", "main": "release/index.js",
"jsnext:main": "src/index.js", "jsnext:main": "src/index.js",
"author": "Parity Team <admin@parity.io>", "author": "Parity Team <admin@parity.io>",

View File

@ -75,6 +75,10 @@ export default class Contract {
return this._functions; return this._functions;
} }
get receipt () {
return this._receipt;
}
get instance () { get instance () {
this._instance.address = this._address; this._instance.address = this._address;
return this._instance; return this._instance;
@ -139,6 +143,7 @@ export default class Contract {
} }
setState({ state: 'hasReceipt', receipt }); setState({ state: 'hasReceipt', receipt });
this._receipt = receipt;
this._address = receipt.contractAddress; this._address = receipt.contractAddress;
return this._address; return this._address;
}); });

View File

@ -40,7 +40,7 @@ export default class CreateAccount extends Component {
accountNameError: ERRORS.noName, accountNameError: ERRORS.noName,
accounts: null, accounts: null,
isValidName: false, isValidName: false,
isValidPass: false, isValidPass: true,
passwordHint: '', passwordHint: '',
password1: '', password1: '',
password1Error: null, password1Error: null,

View File

@ -37,7 +37,7 @@ export default class NewImport extends Component {
accountName: '', accountName: '',
accountNameError: ERRORS.noName, accountNameError: ERRORS.noName,
isValidFile: false, isValidFile: false,
isValidPass: false, isValidPass: true,
isValidName: false, isValidName: false,
password: '', password: '',
passwordError: null, passwordError: null,

View File

@ -36,7 +36,7 @@ export default class RawKey extends Component {
accountNameError: ERRORS.noName, accountNameError: ERRORS.noName,
isValidKey: false, isValidKey: false,
isValidName: false, isValidName: false,
isValidPass: false, isValidPass: true,
passwordHint: '', passwordHint: '',
password1: '', password1: '',
password1Error: null, password1Error: null,
@ -119,8 +119,6 @@ export default class RawKey extends Component {
const rawKey = event.target.value; const rawKey = event.target.value;
let rawKeyError = null; let rawKeyError = null;
console.log(rawKey.length, rawKey);
if (!rawKey || !rawKey.trim().length) { if (!rawKey || !rawKey.trim().length) {
rawKeyError = ERRORS.noKey; rawKeyError = ERRORS.noKey;
} else if (rawKey.substr(0, 2) !== '0x' || rawKey.substr(2).length !== 64 || !api.util.isHex(rawKey)) { } else if (rawKey.substr(0, 2) !== '0x' || rawKey.substr(2).length !== 64 || !api.util.isHex(rawKey)) {

View File

@ -31,9 +31,9 @@ export default class RecoveryPhrase extends Component {
state = { state = {
accountName: '', accountName: '',
accountNameError: ERRORS.noName, accountNameError: ERRORS.noName,
isValidPass: false, isValidPass: true,
isValidName: false, isValidName: false,
isValidPhrase: false, isValidPhrase: true,
passwordHint: '', passwordHint: '',
password1: '', password1: '',
password1Error: null, password1Error: null,

View File

@ -240,6 +240,7 @@ export default class CreateAccount extends Component {
if (createType === 'fromNew' || createType === 'fromPhrase') { if (createType === 'fromNew' || createType === 'fromPhrase') {
let phrase = this.state.phrase; let phrase = this.state.phrase;
if (createType === 'fromPhrase' && windowsPhrase) { if (createType === 'fromPhrase' && windowsPhrase) {
phrase = phrase phrase = phrase
.split(' ') // get the words .split(' ') // get the words
@ -271,7 +272,9 @@ export default class CreateAccount extends Component {
this.newError(error); this.newError(error);
}); });
} else if (createType === 'fromRaw') { }
if (createType === 'fromRaw') {
return api.parity return api.parity
.newAccountFromSecret(this.state.rawKey, this.state.password) .newAccountFromSecret(this.state.rawKey, this.state.password)
.then((address) => { .then((address) => {
@ -296,7 +299,9 @@ export default class CreateAccount extends Component {
this.newError(error); this.newError(error);
}); });
} else if (createType === 'fromGeth') { }
if (createType === 'fromGeth') {
return api.parity return api.parity
.importGethAccounts(this.state.gethAddresses) .importGethAccounts(this.state.gethAddresses)
.then((result) => { .then((result) => {

View File

@ -455,10 +455,15 @@ class DeployContract extends Component {
this.setState({ step: 'DEPLOYMENT' }); this.setState({ step: 'DEPLOYMENT' });
api const contract = api.newContract(abiParsed);
.newContract(abiParsed)
contract
.deploy(options, params, this.onDeploymentState) .deploy(options, params, this.onDeploymentState)
.then((address) => { .then((address) => {
const blockNumber = contract._receipt
? contract.receipt.blockNumber.toNumber()
: null;
return Promise.all([ return Promise.all([
api.parity.setAccountName(address, name), api.parity.setAccountName(address, name),
api.parity.setAccountMeta(address, { api.parity.setAccountMeta(address, {
@ -466,8 +471,9 @@ class DeployContract extends Component {
contract: true, contract: true,
timestamp: Date.now(), timestamp: Date.now(),
deleted: false, deleted: false,
source, blockNumber,
description description,
source
}) })
]) ])
.then(() => { .then(() => {

View File

@ -133,7 +133,7 @@ export default class Store {
} }
testPassword = (password) => { testPassword = (password) => {
this.setBusy(false); this.setBusy(true);
return this._api.parity return this._api.parity
.testPassword(this.address, password || this.validatePassword) .testPassword(this.address, password || this.validatePassword)

View File

@ -86,7 +86,7 @@ export default class Balances {
// If syncing, only retrieve balances once every // If syncing, only retrieve balances once every
// few seconds // few seconds
if (syncing) { if (syncing) {
this.shortThrottledFetch(); this.shortThrottledFetch.cancel();
return this.longThrottledFetch(); return this.longThrottledFetch();
} }

View File

@ -173,18 +173,15 @@ export function fetchTokens (_tokenIds) {
export function fetchBalances (_addresses) { export function fetchBalances (_addresses) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal } = getState(); const { api, personal } = getState();
const { visibleAccounts, accountsInfo } = personal; const { visibleAccounts, accounts } = personal;
const addresses = uniq((_addresses || visibleAccounts || []).concat(Object.keys(accountsInfo))); const addresses = uniq(_addresses || visibleAccounts || []);
if (addresses.length === 0) {
return Promise.resolve();
}
// With only a single account, more info will be displayed. // With only a single account, more info will be displayed.
const fullFetch = addresses.length === 1; const fullFetch = addresses.length === 1;
const addressesToFetch = uniq(addresses); // Add accounts addresses (for notifications, accounts selection, etc.)
const addressesToFetch = uniq(addresses.concat(Object.keys(accounts)));
return Promise return Promise
.all(addressesToFetch.map((addr) => fetchAccount(addr, api, fullFetch))) .all(addressesToFetch.map((addr) => fetchAccount(addr, api, fullFetch)))

View File

@ -218,6 +218,7 @@ export default class CertificationsMiddleware {
const _addresses = action.addresses || []; const _addresses = action.addresses || [];
addresses = uniq(addresses.concat(_addresses)); addresses = uniq(addresses.concat(_addresses));
fetchConfirmedEvents(); fetchConfirmedEvents();
next(action);
break; break;
default: default:

View File

@ -1,69 +0,0 @@
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import PromiseWorker from 'promise-worker';
import runtime from 'serviceworker-webpack-plugin/lib/runtime';
let workerRegistration;
// Setup the Service Worker
if ('serviceWorker' in navigator) {
workerRegistration = runtime
.register()
.then(() => navigator.serviceWorker.ready)
.then((registration) => {
const _worker = registration.active;
_worker.controller = registration.active;
const worker = new PromiseWorker(_worker);
return worker;
});
} else {
workerRegistration = Promise.reject('Service Worker is not available in your browser.');
}
export function setWorker (worker) {
return {
type: 'setWorker',
worker
};
}
export function setError (error) {
return {
type: 'setError',
error
};
}
export function setupWorker () {
return (dispatch, getState) => {
const state = getState();
if (state.compiler.worker) {
return;
}
workerRegistration
.then((worker) => {
dispatch(setWorker(worker));
})
.catch((error) => {
console.error('sw', error);
dispatch(setWorker(null));
});
};
}

View File

@ -22,7 +22,7 @@ export Status from './status';
export apiReducer from './apiReducer'; export apiReducer from './apiReducer';
export balancesReducer from './balancesReducer'; export balancesReducer from './balancesReducer';
export blockchainReducer from './blockchainReducer'; export blockchainReducer from './blockchainReducer';
export compilerReducer from './compilerReducer'; export workerReducer from './workerReducer';
export imagesReducer from './imagesReducer'; export imagesReducer from './imagesReducer';
export personalReducer from './personalReducer'; export personalReducer from './personalReducer';
export signerReducer from './signerReducer'; export signerReducer from './signerReducer';

View File

@ -122,7 +122,7 @@ export function setVisibleAccounts (addresses) {
return; return;
} }
dispatch(fetchBalances(addresses));
dispatch(_setVisibleAccounts(addresses)); dispatch(_setVisibleAccounts(addresses));
dispatch(fetchBalances(addresses));
}; };
} }

View File

@ -47,7 +47,7 @@ export default handleActions({
setVisibleAccounts (state, action) { setVisibleAccounts (state, action) {
const addresses = (action.addresses || []).sort(); const addresses = (action.addresses || []).sort();
if (isEqual(addresses, state.addresses)) { if (isEqual(addresses, state.visibleAccounts)) {
return state; return state;
} }

View File

@ -17,7 +17,7 @@
import * as actions from './signerActions'; import * as actions from './signerActions';
import { inHex } from '~/api/format/input'; import { inHex } from '~/api/format/input';
import { Wallet } from '../../util/wallet'; import { Signer } from '../../util/signer';
export default class SignerMiddleware { export default class SignerMiddleware {
constructor (api) { constructor (api) {
@ -58,6 +58,7 @@ export default class SignerMiddleware {
promise promise
.then((txHash) => { .then((txHash) => {
console.log('confirmRequest', id, txHash); console.log('confirmRequest', id, txHash);
if (!txHash) { if (!txHash) {
store.dispatch(actions.errorConfirmRequest({ id, err: 'Unable to confirm.' })); store.dispatch(actions.errorConfirmRequest({ id, err: 'Unable to confirm.' }));
return; return;
@ -73,12 +74,32 @@ export default class SignerMiddleware {
// Sign request in-browser // Sign request in-browser
const transaction = payload.sendTransaction || payload.signTransaction; const transaction = payload.sendTransaction || payload.signTransaction;
if (wallet && transaction) { if (wallet && transaction) {
(transaction.nonce.isZero() const noncePromise = transaction.nonce.isZero()
? this._api.parity.nextNonce(transaction.from) ? this._api.parity.nextNonce(transaction.from)
: Promise.resolve(transaction.nonce) : Promise.resolve(transaction.nonce);
).then(nonce => {
let txData = { const { worker } = store.getState().worker;
const signerPromise = worker && worker._worker.state === 'activated'
? worker
.postMessage({
action: 'getSignerSeed',
data: { wallet, password }
})
.then((result) => {
const seed = Buffer.from(result.data);
return new Signer(seed);
})
: Signer.fromJson(wallet, password);
// NOTE: Derving the key takes significant amount of time,
// make sure to display some kind of "in-progress" state.
return Promise
.all([ signerPromise, noncePromise ])
.then(([ signer, nonce ]) => {
const txData = {
to: inHex(transaction.to), to: inHex(transaction.to),
nonce: inHex(transaction.nonce.isZero() ? nonce : transaction.nonce), nonce: inHex(transaction.nonce.isZero() ? nonce : transaction.nonce),
gasPrice: inHex(transaction.gasPrice), gasPrice: inHex(transaction.gasPrice),
@ -87,19 +108,15 @@ export default class SignerMiddleware {
data: inHex(transaction.data) data: inHex(transaction.data)
}; };
try { return signer.signTransaction(txData);
// NOTE: Derving the key takes significant amount of time, })
// make sure to display some kind of "in-progress" state. .then((rawTx) => {
const signer = Wallet.fromJson(wallet, password); return handlePromise(this._api.signer.confirmRequestRaw(id, rawTx));
const rawTx = signer.signTransaction(txData); })
.catch((error) => {
handlePromise(this._api.signer.confirmRequestRaw(id, rawTx)); console.error(error.message);
} catch (error) {
console.error(error);
store.dispatch(actions.errorConfirmRequest({ id, err: error.message })); store.dispatch(actions.errorConfirmRequest({ id, err: error.message }));
}
}); });
return;
} }
handlePromise(this._api.signer.confirmRequest(id, { gas, gasPrice }, password)); handlePromise(this._api.signer.confirmRequest(id, { gas, gasPrice }, password));

View File

@ -125,12 +125,13 @@ export default class Status {
this._store.dispatch(statusCollection(status)); this._store.dispatch(statusCollection(status));
this._status = status; this._status = status;
} }
nextTimeout();
}) })
.catch((error) => { .catch((error) => {
console.error('_pollStatus', error); console.error('_pollStatus', error);
});
nextTimeout(); nextTimeout();
});
} }
/** /**

View File

@ -0,0 +1,68 @@
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import PromiseWorker from 'promise-worker';
import runtime from 'serviceworker-webpack-plugin/lib/runtime';
import { setWorker } from './workerActions';
function getWorker () {
// Setup the Service Worker
if ('serviceWorker' in navigator) {
return runtime
.register()
.then(() => navigator.serviceWorker.ready)
.then((registration) => {
const worker = registration.active;
worker.controller = registration.active;
return new PromiseWorker(worker);
});
}
return Promise.reject('Service Worker is not available in your browser.');
}
export const setupWorker = (store) => {
const { dispatch, getState } = store;
const state = getState();
const stateWorker = state.worker.worker;
if (stateWorker !== undefined && !(stateWorker && stateWorker._worker.state === 'redundant')) {
return;
}
getWorker()
.then((worker) => {
if (worker) {
worker._worker.addEventListener('statechange', (event) => {
console.warn('worker state changed to', worker._worker.state);
// Re-install the new Worker
if (worker._worker.state === 'redundant') {
setupWorker(store);
}
});
}
dispatch(setWorker(worker));
})
.catch((error) => {
console.error('sw', error);
dispatch(setWorker(null));
});
};

View File

@ -14,4 +14,16 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './summary'; export function setWorker (worker) {
return {
type: 'setWorker',
worker
};
}
export function setError (error) {
return {
type: 'setError',
error
};
}

View File

@ -24,7 +24,7 @@ const initialState = {
export default handleActions({ export default handleActions({
setWorker (state, action) { setWorker (state, action) {
const { worker } = action; const { worker } = action;
return Object.assign({}, state, { worker }); return Object.assign({}, state, { worker: worker || null });
}, },
setError (state, action) { setError (state, action) {

View File

@ -19,7 +19,7 @@ import { routerReducer } from 'react-router-redux';
import { import {
apiReducer, balancesReducer, blockchainReducer, apiReducer, balancesReducer, blockchainReducer,
compilerReducer, imagesReducer, personalReducer, workerReducer, imagesReducer, personalReducer,
signerReducer, statusReducer as nodeStatusReducer, signerReducer, statusReducer as nodeStatusReducer,
snackbarReducer, walletReducer snackbarReducer, walletReducer
} from './providers'; } from './providers';
@ -41,13 +41,13 @@ export default function () {
balances: balancesReducer, balances: balancesReducer,
certifications: certificationsReducer, certifications: certificationsReducer,
blockchain: blockchainReducer, blockchain: blockchainReducer,
compiler: compilerReducer,
images: imagesReducer, images: imagesReducer,
nodeStatus: nodeStatusReducer, nodeStatus: nodeStatusReducer,
personal: personalReducer, personal: personalReducer,
registry: registryReducer,
signer: signerReducer, signer: signerReducer,
snackbar: snackbarReducer, snackbar: snackbarReducer,
wallet: walletReducer, wallet: walletReducer,
registry: registryReducer worker: workerReducer
}); });
} }

View File

@ -20,6 +20,7 @@ import initMiddleware from './middleware';
import initReducers from './reducers'; import initReducers from './reducers';
import { load as loadWallet } from './providers/walletActions'; import { load as loadWallet } from './providers/walletActions';
import { setupWorker } from './providers/worker';
import { import {
Balances as BalancesProvider, Balances as BalancesProvider,
@ -43,6 +44,7 @@ export default function (api, browserHistory) {
new StatusProvider(store, api).start(); new StatusProvider(store, api).start();
store.dispatch(loadWallet(api)); store.dispatch(loadWallet(api));
setupWorker(store);
return store; return store;
} }

View File

@ -15,6 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import registerPromiseWorker from 'promise-worker/register'; import registerPromiseWorker from 'promise-worker/register';
import { Signer } from '~/util/signer';
import SolidityUtils from '~/util/solidity'; import SolidityUtils from '~/util/solidity';
const CACHE_NAME = 'parity-cache-v1'; const CACHE_NAME = 'parity-cache-v1';
@ -93,12 +94,21 @@ function handleMessage (message) {
case 'setFiles': case 'setFiles':
return setFiles(message.data); return setFiles(message.data);
case 'getSignerSeed':
return getSignerSeed(message.data);
default: default:
console.warn(`unknown action "${message.action}"`); console.warn(`unknown action "${message.action}"`);
return null; return null;
} }
} }
function getSignerSeed (data) {
console.log('deriving seed from service-worker');
const { wallet, password } = data;
return Signer.getSeed(wallet, password);
}
function compile (data) { function compile (data) {
const { build } = data; const { build } = data;

View File

@ -71,15 +71,15 @@ export default class ActionbarSearch extends Component {
key='searchAccount'> key='searchAccount'>
<div className={ inputContainerClasses.join(' ') }> <div className={ inputContainerClasses.join(' ') }>
<InputChip <InputChip
addOnBlur
className={ styles.input } className={ styles.input }
hint='Enter search input...' hint='Enter search input...'
ref='inputChip'
tokens={ tokens } tokens={ tokens }
onBlur={ this.handleSearchBlur } onBlur={ this.handleSearchBlur }
onInputChange={ this.handleInputChange } onInputChange={ this.handleInputChange }
onTokensChange={ this.handleTokensChange } onTokensChange={ this.handleTokensChange }
addOnBlur
/> />
</div> </div>
@ -118,6 +118,10 @@ export default class ActionbarSearch extends Component {
handleSearchClick = () => { handleSearchClick = () => {
const { showSearch } = this.state; const { showSearch } = this.state;
if (!showSearch) {
this.refs.inputChip.focus();
}
this.handleOpenSearch(!showSearch); this.handleOpenSearch(!showSearch);
} }

View File

@ -27,13 +27,14 @@
} }
.toolbuttons { .toolbuttons {
} overflow: hidden;
.toolbuttons button { button {
margin: 10px 0 10px 16px !important; margin: 10px 0 10px 16px !important;
color: white !important; color: white !important;
} }
.toolbuttons svg { svg {
fill: white !important; fill: white !important;
}
} }

View File

@ -23,7 +23,7 @@
.empty { .empty {
line-height: 24px; line-height: 24px;
margin: 0.75em 0.5em 0 0; margin: 0 0.5em 0 0;
opacity: 0.25; opacity: 0.25;
} }

View File

@ -14,7 +14,7 @@
/* You should have received a copy of the GNU General Public License /* You should have received a copy of the GNU General Public License
/* along with Parity. If not, see <http://www.gnu.org/licenses/>. /* along with Parity. If not, see <http://www.gnu.org/licenses/>.
*/ */
.byline { .byline, .description {
overflow: hidden; overflow: hidden;
position: relative; position: relative;
line-height: 1.2em; line-height: 1.2em;
@ -31,6 +31,11 @@
} }
} }
.description {
font-size: 0.75em;
margin: 0.5em 0 0;
}
.title { .title {
text-transform: uppercase; text-transform: uppercase;
margin: 0; margin: 0;

View File

@ -22,13 +22,14 @@ import styles from './title.css';
export default class Title extends Component { export default class Title extends Component {
static propTypes = { static propTypes = {
byline: nodeOrStringProptype(),
className: PropTypes.string, className: PropTypes.string,
title: nodeOrStringProptype(), description: nodeOrStringProptype(),
byline: nodeOrStringProptype() title: nodeOrStringProptype()
} }
render () { render () {
const { className, title, byline } = this.props; const { byline, className, title } = this.props;
const byLine = typeof byline === 'string' const byLine = typeof byline === 'string'
? ( ? (
@ -46,6 +47,29 @@ export default class Title extends Component {
<div className={ styles.byline }> <div className={ styles.byline }>
{ byLine } { byLine }
</div> </div>
{ this.renderDescription() }
</div>
);
}
renderDescription () {
const { description } = this.props;
if (!description) {
return null;
}
const desc = typeof description === 'string'
? (
<span title={ description }>
{ description }
</span>
)
: description;
return (
<div className={ styles.description }>
{ desc }
</div> </div>
); );
} }

View File

@ -170,6 +170,10 @@ export default class InputChip extends Component {
.filter(v => v !== value)); .filter(v => v !== value));
this.handleTokensChange(newTokens); this.handleTokensChange(newTokens);
this.focus();
}
focus = () => {
this.refs.chipInput.focus(); this.refs.chipInput.focus();
} }

View File

@ -38,6 +38,10 @@
justify-content: center; justify-content: center;
} }
.details {
line-height: 1.75em;
}
.details, .details,
.gasDetails { .gasDetails {
color: #aaa; color: #aaa;

View File

@ -196,7 +196,7 @@ class MethodDecoding extends Component {
: text.slice(0, 50) + '...'; : text.slice(0, 50) + '...';
return ( return (
<div> <div className={ styles.details }>
<span>with the </span> <span>with the </span>
<span <span
onClick={ this.toggleInputType } onClick={ this.toggleInputType }

View File

@ -24,9 +24,26 @@ import { sha3 } from '~/api/util/sha3';
// Adapted from https://github.com/kvhnuke/etherwallet/blob/mercury/app/scripts/myetherwallet.js // Adapted from https://github.com/kvhnuke/etherwallet/blob/mercury/app/scripts/myetherwallet.js
export class Wallet { export class Signer {
static fromJson (json, password) { static fromJson (json, password) {
return Signer
.getSeed(json, password)
.then((seed) => {
return new Signer(seed);
});
}
static getSeed (json, password) {
try {
const seed = Signer.getSyncSeed(json, password);
return Promise.resolve(seed);
} catch (error) {
return Promise.reject(error);
}
}
static getSyncSeed (json, password) {
if (json.version !== 3) { if (json.version !== 3) {
throw new Error('Only V3 wallets are supported'); throw new Error('Only V3 wallets are supported');
} }
@ -43,15 +60,17 @@ export class Wallet {
if (kdfparams.prf !== 'hmac-sha256') { if (kdfparams.prf !== 'hmac-sha256') {
throw new Error('Unsupported parameters to PBKDF2'); throw new Error('Unsupported parameters to PBKDF2');
} }
derivedKey = pbkdf2Sync(pwd, salt, kdfparams.c, kdfparams.dklen, 'sha256'); derivedKey = pbkdf2Sync(pwd, salt, kdfparams.c, kdfparams.dklen, 'sha256');
} else { } else {
throw new Error('Unsupported key derivation scheme'); throw new Error('Unsupported key derivation scheme');
} }
const ciphertext = Buffer.from(json.crypto.ciphertext, 'hex'); const ciphertext = Buffer.from(json.crypto.ciphertext, 'hex');
let mac = sha3(Buffer.concat([derivedKey.slice(16, 32), ciphertext])); const mac = sha3(Buffer.concat([derivedKey.slice(16, 32), ciphertext]));
if (mac !== inHex(json.crypto.mac)) { if (mac !== inHex(json.crypto.mac)) {
throw new Error('Key derivation failed - possibly wrong passphrase'); throw new Error('Key derivation failed - possibly wrong password');
} }
const decipher = createDecipheriv( const decipher = createDecipheriv(
@ -59,6 +78,7 @@ export class Wallet {
derivedKey.slice(0, 16), derivedKey.slice(0, 16),
Buffer.from(json.crypto.cipherparams.iv, 'hex') Buffer.from(json.crypto.cipherparams.iv, 'hex')
); );
let seed = Buffer.concat([decipher.update(ciphertext), decipher.final()]); let seed = Buffer.concat([decipher.update(ciphertext), decipher.final()]);
while (seed.length < 32) { while (seed.length < 32) {
@ -66,7 +86,7 @@ export class Wallet {
seed = Buffer.concat([nullBuff, seed]); seed = Buffer.concat([nullBuff, seed]);
} }
return new Wallet(seed); return seed;
} }
constructor (seed) { constructor (seed) {

View File

@ -57,7 +57,7 @@ class List extends Component {
} }
renderAccounts () { renderAccounts () {
const { accounts, balances, empty, link, handleAddSearchToken } = this.props; const { accounts, balances, empty } = this.props;
if (empty) { if (empty) {
return ( return (
@ -80,20 +80,29 @@ class List extends Component {
return ( return (
<div <div
className={ styles.item } className={ styles.item }
key={ address }> key={ address }
<Summary >
link={ link } { this.renderSummary(account, balance, owners) }
account={ account }
balance={ balance }
owners={ owners }
handleAddSearchToken={ handleAddSearchToken }
showCertifications
/>
</div> </div>
); );
}); });
} }
renderSummary (account, balance, owners) {
const { handleAddSearchToken, link } = this.props;
return (
<Summary
account={ account }
balance={ balance }
handleAddSearchToken={ handleAddSearchToken }
link={ link }
owners={ owners }
showCertifications
/>
);
}
getAddresses () { getAddresses () {
const filteredAddresses = this.getFilteredAddresses(); const filteredAddresses = this.getFilteredAddresses();
return this.sortAddresses(filteredAddresses); return this.sortAddresses(filteredAddresses);
@ -122,7 +131,15 @@ class List extends Component {
}); });
} }
compareAccounts (accountA, accountB, key) { compareAccounts (accountA, accountB, key, _reverse = null) {
if (key && key.split(':')[1] === '-1') {
return this.compareAccounts(accountA, accountB, key.split(':')[0], true);
}
if (key === 'timestamp' && _reverse === null) {
return this.compareAccounts(accountA, accountB, key, true);
}
if (key === 'name') { if (key === 'name') {
return accountA.name.localeCompare(accountB.name); return accountA.name.localeCompare(accountB.name);
} }
@ -177,7 +194,9 @@ class List extends Component {
return tagsA.localeCompare(tagsB); return tagsA.localeCompare(tagsB);
} }
const reverse = key === 'timestamp' ? -1 : 1; const reverse = _reverse
? -1
: 1;
const metaA = accountA.meta[key]; const metaA = accountA.meta[key];
const metaB = accountB.meta[key]; const metaB = accountB.meta[key];
@ -220,8 +239,8 @@ class List extends Component {
const tags = account.meta.tags || []; const tags = account.meta.tags || [];
const name = account.name || ''; const name = account.name || '';
const values = [] const values = tags
.concat(tags, name) .concat(name)
.map(v => v.toLowerCase()); .map(v => v.toLowerCase());
return searchValues return searchValues

View File

@ -19,6 +19,7 @@ import React, { Component, PropTypes } from 'react';
import { Link } from 'react-router'; import { Link } from 'react-router';
import { isEqual } from 'lodash'; import { isEqual } from 'lodash';
import ReactTooltip from 'react-tooltip'; import ReactTooltip from 'react-tooltip';
import { FormattedMessage } from 'react-intl';
import { Balance, Container, ContainerTitle, IdentityIcon, IdentityName, Tags, Input } from '~/ui'; import { Balance, Container, ContainerTitle, IdentityIcon, IdentityName, Tags, Input } from '~/ui';
import Certifications from '~/ui/Certifications'; import Certifications from '~/ui/Certifications';
@ -107,14 +108,22 @@ export default class Summary extends Component {
/> />
); );
const description = this.getDescription(account.meta);
return ( return (
<Container> <Container>
<Tags tags={ tags } handleAddSearchToken={ handleAddSearchToken } /> <Tags tags={ tags } handleAddSearchToken={ handleAddSearchToken } />
<div className={ styles.heading }>
<IdentityIcon <IdentityIcon
address={ address } /> address={ address }
/>
<ContainerTitle <ContainerTitle
byline={ addressComponent }
className={ styles.main }
description={ description }
title={ this.renderLink() } title={ this.renderLink() }
byline={ addressComponent } /> />
</div>
{ this.renderOwners() } { this.renderOwners() }
{ this.renderBalance() } { this.renderBalance() }
@ -123,6 +132,26 @@ export default class Summary extends Component {
); );
} }
getDescription (meta = {}) {
const { blockNumber } = meta;
if (!blockNumber) {
return null;
}
const formattedBlockNumber = (new BigNumber(blockNumber)).toFormat();
return (
<FormattedMessage
id='accounts.summary.minedBlock'
defaultMessage='Mined at block #{blockNumber}'
values={ {
blockNumber: formattedBlockNumber
} }
/>
);
}
renderOwners () { renderOwners () {
const { owners } = this.props; const { owners } = this.props;
const ownersValid = (owners || []).filter((owner) => owner.address && new BigNumber(owner.address).gt(0)); const ownersValid = (owners || []).filter((owner) => owner.address && new BigNumber(owner.address).gt(0));

View File

@ -56,3 +56,12 @@
} }
} }
} }
.heading {
display: flex;
flex-direction: row;
.main {
flex: 1;
}
}

View File

@ -74,6 +74,14 @@ export default class Events extends Component {
return ( return (
<Container title='events'> <Container title='events'>
<table className={ styles.events }> <table className={ styles.events }>
<thead>
<tr>
<th />
<th className={ styles.origin }>
origin
</th>
</tr>
</thead>
<tbody>{ list }</tbody> <tbody>{ list }</tbody>
</table> </table>
</Container> </Container>

View File

@ -29,12 +29,34 @@
.event { .event {
td { td {
vertical-align: top; vertical-align: top;
padding: 1em 0.5em; padding: 0 0.5em 1.5em;
div { div {
white-space: nowrap; white-space: nowrap;
} }
&.timestamp {
padding-right: 1.5em;
text-align: right;
line-height: 1.5em;
opacity: 0.5;
white-space: nowrap;
} }
}
}
.blockNumber {
color: rgba(255, 255, 255, 0.25);
margin-top: 1.5em;
}
.origin {
text-align: left;
padding-left: 32px;
text-indent: 1em;
color: rgba(255, 255, 255, 0.5);
text-transform: uppercase;
font-size: 0.9em;
} }
.txhash { .txhash {
@ -54,14 +76,6 @@
opacity: 0.5; opacity: 0.5;
} }
.timestamp {
padding-top: 1.5em;
text-align: right;
line-height: 1.5em;
opacity: 0.5;
white-space: nowrap;
}
.eventDetails { .eventDetails {
} }

View File

@ -17,6 +17,9 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { bindActionCreators } from 'redux'; import { bindActionCreators } from 'redux';
import { FormattedMessage } from 'react-intl';
import BigNumber from 'bignumber.js';
import ActionDelete from 'material-ui/svg-icons/action/delete'; import ActionDelete from 'material-ui/svg-icons/action/delete';
import AvPlayArrow from 'material-ui/svg-icons/av/play-arrow'; import AvPlayArrow from 'material-ui/svg-icons/av/play-arrow';
import ContentCreate from 'material-ui/svg-icons/content/create'; import ContentCreate from 'material-ui/svg-icons/content/create';
@ -136,7 +139,9 @@ class Contract extends Component {
account={ account } account={ account }
balance={ balance } balance={ balance }
isContract isContract
/> >
{ this.renderBlockNumber(account.meta) }
</Header>
<Queries <Queries
accountsInfo={ accountsInfo } accountsInfo={ accountsInfo }
@ -156,6 +161,28 @@ class Contract extends Component {
); );
} }
renderBlockNumber (meta = {}) {
const { blockNumber } = meta;
if (!blockNumber) {
return null;
}
const formattedBlockNumber = (new BigNumber(blockNumber)).toFormat();
return (
<div className={ styles.blockNumber }>
<FormattedMessage
id='contract.minedBlock'
defaultMessage='Mined at block #{blockNumber}'
values={ {
blockNumber: formattedBlockNumber
} }
/>
</div>
);
}
renderDetails (contract) { renderDetails (contract) {
const { showDetailsDialog } = this.state; const { showDetailsDialog } = this.state;

View File

@ -1,52 +0,0 @@
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { Link } from 'react-router';
import { Container, ContainerTitle, IdentityIcon, IdentityName } from '~/ui';
export default class Summary extends Component {
static contextTypes = {
api: React.PropTypes.object.isRequired
}
static propTypes = {
contract: PropTypes.object.isRequired,
children: PropTypes.node
}
render () {
const contract = this.props.contract;
if (!contract) {
return null;
}
const viewLink = `/app/${contract.address}`;
return (
<Container>
<IdentityIcon
address={ contract.address } />
<ContainerTitle
title={ <Link to={ viewLink }>{ <IdentityName address={ contract.address } unknown /> }</Link> }
byline={ contract.address } />
{ this.props.children }
</Container>
);
}
}

View File

@ -45,7 +45,7 @@ class Contracts extends Component {
state = { state = {
addContract: false, addContract: false,
deployContract: false, deployContract: false,
sortOrder: 'timestamp', sortOrder: 'blockNumber',
searchValues: [], searchValues: [],
searchTokens: [] searchTokens: []
} }
@ -92,7 +92,8 @@ class Contracts extends Component {
empty={ !hasContracts } empty={ !hasContracts }
order={ sortOrder } order={ sortOrder }
orderFallback='name' orderFallback='name'
handleAddSearchToken={ this.onAddSearchToken } /> handleAddSearchToken={ this.onAddSearchToken }
/>
</Page> </Page>
</div> </div>
); );
@ -109,7 +110,8 @@ class Contracts extends Component {
id='sortContracts' id='sortContracts'
order={ this.state.sortOrder } order={ this.state.sortOrder }
metas={ [ metas={ [
{ key: 'timestamp', label: 'date' } { key: 'timestamp', label: 'date' },
{ key: 'blockNumber:-1', label: 'mined block' }
] } ] }
showDefault={ false } showDefault={ false }
onChange={ onChange } /> onChange={ onChange } />

View File

@ -77,13 +77,28 @@ class TransactionPendingFormConfirm extends Component {
} }
} }
getPasswordHint () {
const { account } = this.props;
const accountHint = account && account.meta && account.meta.passwordHint;
if (accountHint) {
return accountHint;
}
const { wallet } = this.state;
const walletHint = wallet && wallet.meta && wallet.meta.passwordHint;
return walletHint || null;
}
render () { render () {
const { account, address, isSending } = this.props; const { account, address, isSending } = this.props;
const { password, wallet, walletError } = this.state; const { password, wallet, walletError } = this.state;
const isExternal = !account.uuid; const isExternal = !account.uuid;
const passwordHint = account.meta && account.meta.passwordHint const passwordHintText = this.getPasswordHint();
? (<div><span>(hint) </span>{ account.meta.passwordHint }</div>) const passwordHint = passwordHintText
? (<div><span>(hint) </span>{ passwordHintText }</div>)
: null; : null;
const isWalletOk = !isExternal || (walletError === null && wallet !== null); const isWalletOk = !isExternal || (walletError === null && wallet !== null);
@ -170,12 +185,26 @@ class TransactionPendingFormConfirm extends Component {
} }
onKeySelect = (event) => { onKeySelect = (event) => {
// Check that file have been selected
if (event.target.files.length === 0) {
return this.setState({
wallet: null,
walletError: null
});
}
const fileReader = new FileReader(); const fileReader = new FileReader();
fileReader.onload = (e) => { fileReader.onload = (e) => {
try { try {
const wallet = JSON.parse(e.target.result); const wallet = JSON.parse(e.target.result);
try {
if (wallet && typeof wallet.meta === 'string') {
wallet.meta = JSON.parse(wallet.meta);
}
} catch (e) {}
this.setState({ this.setState({
wallet, wallet,
walletError: null walletError: null

View File

@ -18,7 +18,6 @@ import React, { PropTypes, Component } from 'react';
import { observer } from 'mobx-react'; import { observer } from 'mobx-react';
import { MenuItem, Toggle } from 'material-ui'; import { MenuItem, Toggle } from 'material-ui';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import CircularProgress from 'material-ui/CircularProgress'; import CircularProgress from 'material-ui/CircularProgress';
import moment from 'moment'; import moment from 'moment';
import { throttle } from 'lodash'; import { throttle } from 'lodash';
@ -32,8 +31,6 @@ import SendIcon from 'material-ui/svg-icons/content/send';
import { Actionbar, ActionbarExport, ActionbarImport, Button, Editor, Page, Select, Input } from '~/ui'; import { Actionbar, ActionbarExport, ActionbarImport, Button, Editor, Page, Select, Input } from '~/ui';
import { DeployContract, SaveContract, LoadContract } from '~/modals'; import { DeployContract, SaveContract, LoadContract } from '~/modals';
import { setupWorker } from '~/redux/providers/compilerActions';
import WriteContractStore from './writeContractStore'; import WriteContractStore from './writeContractStore';
import styles from './writeContract.css'; import styles from './writeContract.css';
@ -42,7 +39,6 @@ class WriteContract extends Component {
static propTypes = { static propTypes = {
accounts: PropTypes.object.isRequired, accounts: PropTypes.object.isRequired,
setupWorker: PropTypes.func.isRequired,
worker: PropTypes.object, worker: PropTypes.object,
workerError: PropTypes.any workerError: PropTypes.any
}; };
@ -55,8 +51,7 @@ class WriteContract extends Component {
}; };
componentWillMount () { componentWillMount () {
const { setupWorker, worker } = this.props; const { worker } = this.props;
setupWorker();
if (worker !== undefined) { if (worker !== undefined) {
this.store.setWorker(worker); this.store.setWorker(worker);
@ -575,17 +570,10 @@ class WriteContract extends Component {
function mapStateToProps (state) { function mapStateToProps (state) {
const { accounts } = state.personal; const { accounts } = state.personal;
const { worker, error } = state.compiler; const { worker, error } = state.worker;
return { accounts, worker, workerError: error }; return { accounts, worker, workerError: error };
} }
function mapDispatchToProps (dispatch) {
return bindActionCreators({
setupWorker
}, dispatch);
}
export default connect( export default connect(
mapStateToProps, mapStateToProps
mapDispatchToProps
)(WriteContract); )(WriteContract);

View File

@ -37,6 +37,7 @@ usage! {
cmd_snapshot: bool, cmd_snapshot: bool,
cmd_restore: bool, cmd_restore: bool,
cmd_ui: bool, cmd_ui: bool,
cmd_dapp: bool,
cmd_tools: bool, cmd_tools: bool,
cmd_hash: bool, cmd_hash: bool,
cmd_kill: bool, cmd_kill: bool,
@ -525,6 +526,7 @@ mod tests {
cmd_snapshot: false, cmd_snapshot: false,
cmd_restore: false, cmd_restore: false,
cmd_ui: false, cmd_ui: false,
cmd_dapp: false,
cmd_tools: false, cmd_tools: false,
cmd_hash: false, cmd_hash: false,
cmd_db: false, cmd_db: false,

View File

@ -5,6 +5,7 @@ Parity. Ethereum Client.
Usage: Usage:
parity [options] parity [options]
parity ui [options] parity ui [options]
parity dapp <path> [options]
parity daemon <pid-file> [options] parity daemon <pid-file> [options]
parity account (new | list ) [options] parity account (new | list ) [options]
parity account import <path>... [options] parity account import <path>... [options]

View File

@ -17,7 +17,7 @@
use std::time::Duration; use std::time::Duration;
use std::io::Read; use std::io::Read;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::cmp::max; use std::cmp::max;
use cli::{Args, ArgsError}; use cli::{Args, ArgsError};
use util::{Hashable, U256, Uint, Bytes, version_data, Secret, Address}; use util::{Hashable, U256, Uint, Bytes, version_data, Secret, Address};
@ -335,6 +335,7 @@ impl Configuration {
net_settings: self.network_settings(), net_settings: self.network_settings(),
dapps_conf: dapps_conf, dapps_conf: dapps_conf,
signer_conf: signer_conf, signer_conf: signer_conf,
dapp: self.dapp_to_open()?,
ui: self.args.cmd_ui, ui: self.args.cmd_ui,
name: self.args.flag_identity, name: self.args.flag_identity,
custom_bootnodes: self.args.flag_bootnodes.is_some(), custom_bootnodes: self.args.flag_bootnodes.is_some(),
@ -507,10 +508,28 @@ impl Configuration {
hosts: self.dapps_hosts(), hosts: self.dapps_hosts(),
user: self.args.flag_dapps_user.clone(), user: self.args.flag_dapps_user.clone(),
pass: self.args.flag_dapps_pass.clone(), pass: self.args.flag_dapps_pass.clone(),
dapps_path: self.directories().dapps, dapps_path: PathBuf::from(self.directories().dapps),
extra_dapps: if self.args.cmd_dapp {
self.args.arg_path.iter().map(|path| PathBuf::from(path)).collect()
} else {
vec![]
},
} }
} }
fn dapp_to_open(&self) -> Result<Option<String>, String> {
if !self.args.cmd_dapp {
return Ok(None);
}
let path = self.args.arg_path.get(0).map(String::as_str).unwrap_or(".");
let path = Path::new(path).canonicalize()
.map_err(|e| format!("Invalid path: {}. Error: {:?}", path, e))?;
let name = path.file_name()
.and_then(|name| name.to_str())
.ok_or_else(|| "Root path is not supported.".to_owned())?;
Ok(Some(name.into()))
}
fn gas_pricer_config(&self) -> Result<GasPricerConfig, String> { fn gas_pricer_config(&self) -> Result<GasPricerConfig, String> {
if let Some(d) = self.args.flag_gasprice.as_ref() { if let Some(d) = self.args.flag_gasprice.as_ref() {
return Ok(GasPricerConfig::Fixed(to_u256(d)?)); return Ok(GasPricerConfig::Fixed(to_u256(d)?));
@ -1030,6 +1049,7 @@ mod tests {
dapps_conf: Default::default(), dapps_conf: Default::default(),
signer_conf: Default::default(), signer_conf: Default::default(),
ui: false, ui: false,
dapp: None,
name: "".into(), name: "".into(),
custom_bootnodes: false, custom_bootnodes: false,
fat_db: Default::default(), fat_db: Default::default(),
@ -1224,6 +1244,22 @@ mod tests {
}); });
} }
#[test]
fn should_parse_dapp_opening() {
// given
let temp = RandomTempPath::new();
let name = temp.file_name().unwrap().to_str().unwrap();
create_dir(temp.as_str().to_owned()).unwrap();
// when
let conf0 = parse(&["parity", "dapp", temp.to_str().unwrap()]);
// then
assert_eq!(conf0.dapp_to_open(), Ok(Some(name.into())));
let extra_dapps = conf0.dapps_config().extra_dapps;
assert_eq!(extra_dapps, vec![temp.to_owned()]);
}
#[test] #[test]
fn should_not_bail_on_empty_line_in_reserved_peers() { fn should_not_bail_on_empty_line_in_reserved_peers() {
let temp = RandomTempPath::new(); let temp = RandomTempPath::new();

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use io::PanicHandler; use io::PanicHandler;
use rpc_apis; use rpc_apis;
@ -33,7 +34,8 @@ pub struct Configuration {
pub hosts: Option<Vec<String>>, pub hosts: Option<Vec<String>>,
pub user: Option<String>, pub user: Option<String>,
pub pass: Option<String>, pub pass: Option<String>,
pub dapps_path: String, pub dapps_path: PathBuf,
pub extra_dapps: Vec<PathBuf>,
} }
impl Default for Configuration { impl Default for Configuration {
@ -46,7 +48,8 @@ impl Default for Configuration {
hosts: Some(Vec::new()), hosts: Some(Vec::new()),
user: None, user: None,
pass: None, pass: None,
dapps_path: replace_home(&data_dir, "$BASE/dapps"), dapps_path: replace_home(&data_dir, "$BASE/dapps").into(),
extra_dapps: vec![],
} }
} }
} }
@ -80,7 +83,14 @@ pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<We
(username.to_owned(), password) (username.to_owned(), password)
}); });
Ok(Some(setup_dapps_server(deps, configuration.dapps_path, &addr, configuration.hosts, auth)?)) Ok(Some(setup_dapps_server(
deps,
configuration.dapps_path,
configuration.extra_dapps,
&addr,
configuration.hosts,
auth
)?))
} }
pub use self::server::WebappServer; pub use self::server::WebappServer;
@ -90,11 +100,13 @@ pub use self::server::setup_dapps_server;
mod server { mod server {
use super::Dependencies; use super::Dependencies;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::PathBuf;
pub struct WebappServer; pub struct WebappServer;
pub fn setup_dapps_server( pub fn setup_dapps_server(
_deps: Dependencies, _deps: Dependencies,
_dapps_path: String, _dapps_path: PathBuf,
_extra_dapps: Vec<PathBuf>,
_url: &SocketAddr, _url: &SocketAddr,
_allowed_hosts: Option<Vec<String>>, _allowed_hosts: Option<Vec<String>>,
_auth: Option<(String, String)>, _auth: Option<(String, String)>,
@ -106,6 +118,7 @@ mod server {
#[cfg(feature = "dapps")] #[cfg(feature = "dapps")]
mod server { mod server {
use super::Dependencies; use super::Dependencies;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::io; use std::io;
@ -122,7 +135,8 @@ mod server {
pub fn setup_dapps_server( pub fn setup_dapps_server(
deps: Dependencies, deps: Dependencies,
dapps_path: String, dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
url: &SocketAddr, url: &SocketAddr,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<String>>,
auth: Option<(String, String)>, auth: Option<(String, String)>,
@ -130,7 +144,7 @@ mod server {
use ethcore_dapps as dapps; use ethcore_dapps as dapps;
let server = dapps::ServerBuilder::new( let server = dapps::ServerBuilder::new(
dapps_path, &dapps_path,
Arc::new(Registrar { client: deps.client.clone() }), Arc::new(Registrar { client: deps.client.clone() }),
deps.remote.clone(), deps.remote.clone(),
); );
@ -141,6 +155,7 @@ mod server {
.fetch(deps.fetch.clone()) .fetch(deps.fetch.clone())
.sync_status(Arc::new(move || is_major_importing(Some(sync.status().state), client.queue_info()))) .sync_status(Arc::new(move || is_major_importing(Some(sync.status().state), client.queue_info())))
.web_proxy_tokens(Arc::new(move |token| signer.is_valid_web_proxy_access_token(&token))) .web_proxy_tokens(Arc::new(move |token| signer.is_valid_web_proxy_access_token(&token)))
.extra_dapps(&extra_dapps)
.signer_address(deps.signer.address()); .signer_address(deps.signer.address());
let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext); let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext);

View File

@ -92,6 +92,7 @@ pub struct RunCmd {
pub net_settings: NetworkSettings, pub net_settings: NetworkSettings,
pub dapps_conf: dapps::Configuration, pub dapps_conf: dapps::Configuration,
pub signer_conf: signer::Configuration, pub signer_conf: signer::Configuration,
pub dapp: Option<String>,
pub ui: bool, pub ui: bool,
pub name: String, pub name: String,
pub custom_bootnodes: bool, pub custom_bootnodes: bool,
@ -118,6 +119,17 @@ pub fn open_ui(dapps_conf: &dapps::Configuration, signer_conf: &signer::Configur
Ok(()) Ok(())
} }
pub fn open_dapp(dapps_conf: &dapps::Configuration, dapp: &str) -> Result<(), String> {
if !dapps_conf.enabled {
return Err("Cannot use DAPP command with Dapps turned off.".into())
}
let url = format!("http://{}:{}/{}/", dapps_conf.interface, dapps_conf.port, dapp);
url::open(&url);
Ok(())
}
pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<bool, String> { pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<bool, String> {
if cmd.ui && cmd.dapps_conf.enabled { if cmd.ui && cmd.dapps_conf.enabled {
// Check if Parity is already running // Check if Parity is already running
@ -441,6 +453,10 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
open_ui(&cmd.dapps_conf, &cmd.signer_conf)?; open_ui(&cmd.dapps_conf, &cmd.signer_conf)?;
} }
if let Some(dapp) = cmd.dapp {
open_dapp(&cmd.dapps_conf, &dapp)?;
}
// Handle exit // Handle exit
let restart = wait_for_exit(panic_handler, Some(updater), can_restart); let restart = wait_for_exit(panic_handler, Some(updater), can_restart);

View File

@ -125,6 +125,8 @@ const MAX_NEW_HASHES: usize = 64;
const MAX_TX_TO_IMPORT: usize = 512; const MAX_TX_TO_IMPORT: usize = 512;
const MAX_NEW_BLOCK_AGE: BlockNumber = 20; const MAX_NEW_BLOCK_AGE: BlockNumber = 20;
const MAX_TRANSACTION_SIZE: usize = 300*1024; const MAX_TRANSACTION_SIZE: usize = 300*1024;
// Maximal number of transactions in sent in single packet.
const MAX_TRANSACTIONS_TO_PROPAGATE: usize = 64;
// Min number of blocks to be behind for a snapshot sync // Min number of blocks to be behind for a snapshot sync
const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000; const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000;
const SNAPSHOT_MIN_PEERS: usize = 3; const SNAPSHOT_MIN_PEERS: usize = 3;
@ -1447,7 +1449,7 @@ impl ChainSync {
} }
let mut item_count = r.item_count(); let mut item_count = r.item_count();
trace!(target: "sync", "{} -> Transactions ({} entries)", peer_id, item_count); trace!(target: "sync", "{:02} -> Transactions ({} entries)", peer_id, item_count);
item_count = min(item_count, MAX_TX_TO_IMPORT); item_count = min(item_count, MAX_TX_TO_IMPORT);
let mut transactions = Vec::with_capacity(item_count); let mut transactions = Vec::with_capacity(item_count);
for i in 0 .. item_count { for i in 0 .. item_count {
@ -1987,11 +1989,14 @@ impl ChainSync {
stats.propagated(*hash, id, block_number); stats.propagated(*hash, id, block_number);
} }
peer_info.last_sent_transactions = all_transactions_hashes.clone(); peer_info.last_sent_transactions = all_transactions_hashes.clone();
return Some((*peer_id, all_transactions_rlp.clone())); return Some((*peer_id, all_transactions_hashes.len(), all_transactions_rlp.clone()));
} }
// Get hashes of all transactions to send to this peer // Get hashes of all transactions to send to this peer
let to_send = all_transactions_hashes.difference(&peer_info.last_sent_transactions).cloned().collect::<HashSet<_>>(); let to_send = all_transactions_hashes.difference(&peer_info.last_sent_transactions)
.take(MAX_TRANSACTIONS_TO_PROPAGATE)
.cloned()
.collect::<HashSet<_>>();
if to_send.is_empty() { if to_send.is_empty() {
return None; return None;
} }
@ -2007,22 +2012,28 @@ impl ChainSync {
} }
} }
peer_info.last_sent_transactions = all_transactions_hashes.clone(); peer_info.last_sent_transactions = all_transactions_hashes
Some((*peer_id, packet.out())) .intersection(&peer_info.last_sent_transactions)
.chain(&to_send)
.cloned()
.collect();
Some((*peer_id, to_send.len(), packet.out()))
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
// Send RLPs // Send RLPs
let sent = lucky_peers.len(); let peers = lucky_peers.len();
if sent > 0 { if peers > 0 {
for (peer_id, rlp) in lucky_peers { let mut max_sent = 0;
for (peer_id, sent, rlp) in lucky_peers {
self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp); self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp);
trace!(target: "sync", "{:02} <- Transactions ({} entries)", peer_id, sent);
max_sent = max(max_sent, sent);
} }
debug!(target: "sync", "Sent up to {} transactions to {} peers.", max_sent, peers);
trace!(target: "sync", "Sent up to {} transactions to {} peers.", transactions.len(), sent);
} }
sent peers
} }
fn propagate_latest_blocks(&mut self, io: &mut SyncIo, sealed: &[H256]) { fn propagate_latest_blocks(&mut self, io: &mut SyncIo, sealed: &[H256]) {
@ -2042,7 +2053,6 @@ impl ChainSync {
trace!(target: "sync", "Sent sealed block to all peers"); trace!(target: "sync", "Sent sealed block to all peers");
}; };
} }
self.propagate_new_transactions(io);
self.last_sent_block_number = chain_info.best_block_number; self.last_sent_block_number = chain_info.best_block_number;
} }
@ -2070,7 +2080,9 @@ impl ChainSync {
/// called when block is imported to chain - propagates the blocks and updates transactions sent to peers /// called when block is imported to chain - propagates the blocks and updates transactions sent to peers
pub fn chain_new_blocks(&mut self, io: &mut SyncIo, _imported: &[H256], invalid: &[H256], enacted: &[H256], _retracted: &[H256], sealed: &[H256], proposed: &[Bytes]) { pub fn chain_new_blocks(&mut self, io: &mut SyncIo, _imported: &[H256], invalid: &[H256], enacted: &[H256], _retracted: &[H256], sealed: &[H256], proposed: &[Bytes]) {
let queue_info = io.chain().queue_info(); let queue_info = io.chain().queue_info();
if !self.status().is_syncing(queue_info) || !sealed.is_empty() { let is_syncing = self.status().is_syncing(queue_info);
if !is_syncing || !sealed.is_empty() {
trace!(target: "sync", "Propagating blocks, state={:?}", self.state); trace!(target: "sync", "Propagating blocks, state={:?}", self.state);
self.propagate_latest_blocks(io, sealed); self.propagate_latest_blocks(io, sealed);
self.propagate_proposed_blocks(io, proposed); self.propagate_proposed_blocks(io, proposed);
@ -2080,7 +2092,7 @@ impl ChainSync {
self.restart(io); self.restart(io);
} }
if !enacted.is_empty() { if !is_syncing && !enacted.is_empty() {
// Select random peers to re-broadcast transactions to. // Select random peers to re-broadcast transactions to.
let mut random = random::new(); let mut random = random::new();
let len = self.peers.len(); let len = self.peers.len();
@ -2531,7 +2543,7 @@ mod tests {
} }
#[test] #[test]
fn propagates_new_transactions_after_new_block() { fn does_not_propagate_new_transactions_after_new_block() {
let mut client = TestBlockChainClient::new(); let mut client = TestBlockChainClient::new();
client.add_blocks(100, EachBlockWith::Uncle); client.add_blocks(100, EachBlockWith::Uncle);
client.insert_transaction_to_queue(); client.insert_transaction_to_queue();
@ -2541,16 +2553,16 @@ mod tests {
let mut io = TestIo::new(&mut client, &ss, &queue, None); let mut io = TestIo::new(&mut client, &ss, &queue, None);
let peer_count = sync.propagate_new_transactions(&mut io); let peer_count = sync.propagate_new_transactions(&mut io);
io.chain.insert_transaction_to_queue(); io.chain.insert_transaction_to_queue();
// New block import should trigger propagation. // New block import should not trigger propagation.
// (we only propagate on timeout)
sync.chain_new_blocks(&mut io, &[], &[], &[], &[], &[], &[]); sync.chain_new_blocks(&mut io, &[], &[], &[], &[], &[], &[]);
// 2 message should be send // 2 message should be send
assert_eq!(2, io.packets.len()); assert_eq!(1, io.packets.len());
// 1 peer should receive the message // 1 peer should receive the message
assert_eq!(1, peer_count); assert_eq!(1, peer_count);
// TRANSACTIONS_PACKET // TRANSACTIONS_PACKET
assert_eq!(0x02, io.packets[0].packet_id); assert_eq!(0x02, io.packets[0].packet_id);
assert_eq!(0x02, io.packets[1].packet_id);
} }
#[test] #[test]

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! benchmarking for rlp //! benchmarking for bigint
//! should be started with: //! should be started with:
//! ```bash //! ```bash
//! multirust run nightly cargo bench //! multirust run nightly cargo bench
@ -24,10 +24,10 @@
#![feature(asm)] #![feature(asm)]
extern crate test; extern crate test;
extern crate ethcore_bigint as bigint; extern crate ethcore_util;
use test::{Bencher, black_box}; use test::{Bencher, black_box};
use bigint::uint::{U256, U512, Uint, U128}; use ethcore_util::{U256, U512, Uint, U128};
#[bench] #[bench]
fn u256_add(b: &mut Bencher) { fn u256_add(b: &mut Bencher) {

View File

@ -24,12 +24,12 @@
extern crate test; extern crate test;
extern crate rlp; extern crate rlp;
extern crate ethcore_bigint as bigint; extern crate ethcore_util as util;
use test::Bencher; use test::Bencher;
use std::str::FromStr; use std::str::FromStr;
use rlp::*; use rlp::*;
use bigint::uint::U256; use util::U256;
#[bench] #[bench]
fn bench_stream_u64_value(b: &mut Bencher) { fn bench_stream_u64_value(b: &mut Bencher) {

View File

@ -18,27 +18,26 @@
//! An owning, nibble-oriented byte vector. //! An owning, nibble-oriented byte vector.
use ::NibbleSlice; use ::NibbleSlice;
use elastic_array::ElasticArray36;
#[derive(Default, PartialEq, Eq, PartialOrd, Ord, Debug)]
/// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`. /// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct NibbleVec { pub struct NibbleVec {
inner: Vec<u8>, inner: ElasticArray36<u8>,
len: usize, len: usize,
} }
impl Default for NibbleVec {
fn default() -> Self {
NibbleVec::new()
}
}
impl NibbleVec { impl NibbleVec {
/// Make a new `NibbleVec` /// Make a new `NibbleVec`
pub fn new() -> Self { pub fn new() -> Self {
NibbleVec { NibbleVec {
inner: Vec::new(), inner: ElasticArray36::new(),
len: 0
}
}
/// Make a `NibbleVec` with capacity for `n` nibbles.
pub fn with_capacity(n: usize) -> Self {
NibbleVec {
inner: Vec::with_capacity((n / 2) + (n % 2)),
len: 0 len: 0
} }
} }
@ -49,9 +48,6 @@ impl NibbleVec {
/// Retrurns true if `NibbleVec` has zero length /// Retrurns true if `NibbleVec` has zero length
pub fn is_empty(&self) -> bool { self.len == 0 } pub fn is_empty(&self) -> bool { self.len == 0 }
/// Capacity of the `NibbleVec`.
pub fn capacity(&self) -> usize { self.inner.capacity() * 2 }
/// Try to get the nibble at the given offset. /// Try to get the nibble at the given offset.
pub fn at(&self, idx: usize) -> u8 { pub fn at(&self, idx: usize) -> u8 {
if idx % 2 == 0 { if idx % 2 == 0 {
@ -109,7 +105,7 @@ impl NibbleVec {
impl<'a> From<NibbleSlice<'a>> for NibbleVec { impl<'a> From<NibbleSlice<'a>> for NibbleVec {
fn from(s: NibbleSlice<'a>) -> Self { fn from(s: NibbleSlice<'a>) -> Self {
let mut v = NibbleVec::with_capacity(s.len()); let mut v = NibbleVec::new();
for i in 0..s.len() { for i in 0..s.len() {
v.push(s.at(i)); v.push(s.at(i));
} }

View File

@ -16,8 +16,8 @@
use hash::H256; use hash::H256;
use sha3::Hashable; use sha3::Hashable;
use hashdb::{HashDB, DBValue}; use hashdb::HashDB;
use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder, TrieIterator}; use super::{TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query};
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
/// Additionaly it stores inserted hash-key mappings for later retrieval. /// Additionaly it stores inserted hash-key mappings for later retrieval.
@ -58,10 +58,10 @@ impl<'db> Trie for FatDB<'db> {
self.raw.contains(&key.sha3()) self.raw.contains(&key.sha3())
} }
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>> fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
where 'a: 'b, R: Recorder where 'a: 'key
{ {
self.raw.get_recorded(&key.sha3(), rec) self.raw.get_with(&key.sha3(), query)
} }
} }
@ -104,6 +104,7 @@ impl<'db> Iterator for FatDBIterator<'db> {
#[test] #[test]
fn fatdb_to_trie() { fn fatdb_to_trie() {
use memorydb::MemoryDB; use memorydb::MemoryDB;
use hashdb::DBValue;
use trie::{FatDBMut, TrieMut}; use trie::{FatDBMut, TrieMut};
let mut memdb = MemoryDB::new(); let mut memdb = MemoryDB::new();

94
util/src/trie/lookup.rs Normal file
View File

@ -0,0 +1,94 @@
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Trie lookup via HashDB.
use hashdb::HashDB;
use nibbleslice::NibbleSlice;
use rlp::{Rlp, View};
use ::{H256};
use super::{TrieError, Query};
use super::node::Node;
/// Trie lookup helper object.
pub struct Lookup<'a, Q: Query> {
/// database to query from.
pub db: &'a HashDB,
/// Query object to record nodes and transform data.
pub query: Q,
/// Hash to start at
pub hash: H256,
}
impl<'a, Q: Query> Lookup<'a, Q> {
/// Look up the given key. If the value is found, it will be passed to the given
/// function to decode or copy.
pub fn look_up(mut self, mut key: NibbleSlice) -> super::Result<Option<Q::Item>> {
let mut hash = self.hash;
// this loop iterates through non-inline nodes.
for depth in 0.. {
let node_data = match self.db.get(&hash) {
Some(value) => value,
None => return Err(Box::new(match depth {
0 => TrieError::InvalidStateRoot(hash),
_ => TrieError::IncompleteDatabase(hash),
})),
};
self.query.record(&hash, &node_data, depth);
// this loop iterates through all inline children (usually max 1)
// without incrementing the depth.
let mut node_data = &node_data[..];
loop {
match Node::decoded(node_data) {
Node::Leaf(slice, value) => {
return Ok(match slice == key {
true => Some(self.query.decode(value)),
false => None,
})
}
Node::Extension(slice, item) => {
if key.starts_with(&slice) {
node_data = item;
key = key.mid(slice.len());
} else {
return Ok(None)
}
}
Node::Branch(children, value) => match key.is_empty() {
true => return Ok(value.map(move |val| self.query.decode(val))),
false => {
node_data = children[key.at(0) as usize];
key = key.mid(1);
}
},
_ => return Ok(None),
}
// check if new node data is inline or hash.
let r = Rlp::new(node_data);
if r.is_data() && r.size() == 32 {
hash = r.as_val();
break
}
}
}
Ok(None)
}
}

View File

@ -38,6 +38,7 @@ pub mod recorder;
mod fatdb; mod fatdb;
mod fatdbmut; mod fatdbmut;
mod lookup;
pub use self::standardmap::{Alphabet, StandardMap, ValueMode}; pub use self::standardmap::{Alphabet, StandardMap, ValueMode};
pub use self::triedbmut::TrieDBMut; pub use self::triedbmut::TrieDBMut;
@ -76,6 +77,46 @@ pub type Result<T> = ::std::result::Result<T, Box<TrieError>>;
/// Trie-Item type. /// Trie-Item type.
pub type TrieItem<'a> = Result<(Vec<u8>, DBValue)>; pub type TrieItem<'a> = Result<(Vec<u8>, DBValue)>;
/// Description of what kind of query will be made to the trie.
///
/// This is implemented for any &mut recorder (where the query will return
/// a DBValue), any function taking raw bytes (where no recording will be made),
/// or any tuple of (&mut Recorder, FnOnce(&[u8]))
pub trait Query {
/// Output item.
type Item;
/// Decode a byte-slice into the desired item.
fn decode(self, &[u8]) -> Self::Item;
/// Record that a node has been passed through.
fn record(&mut self, &H256, &[u8], u32) { }
}
impl<'a> Query for &'a mut Recorder {
type Item = DBValue;
fn decode(self, value: &[u8]) -> DBValue { DBValue::from_slice(value) }
fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
(&mut **self).record(hash, data, depth);
}
}
impl<F, T> Query for F where F: for<'a> FnOnce(&'a [u8]) -> T {
type Item = T;
fn decode(self, value: &[u8]) -> T { (self)(value) }
}
impl<'a, F, T> Query for (&'a mut Recorder, F) where F: FnOnce(&[u8]) -> T {
type Item = T;
fn decode(self, value: &[u8]) -> T { (self.1)(value) }
fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
self.0.record(hash, data, depth)
}
}
/// A key-value datastore implemented as a database-backed modified Merkle tree. /// A key-value datastore implemented as a database-backed modified Merkle tree.
pub trait Trie { pub trait Trie {
/// Return the root of the trie. /// Return the root of the trie.
@ -91,13 +132,13 @@ pub trait Trie {
/// What is the value of the given key in this trie? /// What is the value of the given key in this trie?
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key { fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key {
self.get_recorded(key, &mut recorder::NoOp) self.get_with(key, DBValue::from_slice)
} }
/// Query the value of the given key in this trie while recording visited nodes /// Search for the key with the given query parameter. See the docs of the `Query`
/// to the given recorder. If the query encounters an error, the nodes passed to the recorder are unspecified. /// trait for more details.
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> Result<Option<DBValue>> fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q)
where 'a: 'b, R: Recorder; -> Result<Option<Q::Item>> where 'a: 'key;
/// Returns a depth-first iterator over the elements of trie. /// Returns a depth-first iterator over the elements of trie.
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>>; fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>>;
@ -192,9 +233,10 @@ impl<'db> Trie for TrieKinds<'db> {
wrapper!(self, contains, key) wrapper!(self, contains, key)
} }
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], r: &'b mut R) -> Result<Option<DBValue>> fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>>
where 'a: 'b, R: Recorder { where 'a: 'key
wrapper!(self, get_recorded, key, r) {
wrapper!(self, get_with, key, query)
} }
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>> { fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>> {

View File

@ -16,6 +16,7 @@
use elastic_array::ElasticArray36; use elastic_array::ElasticArray36;
use nibbleslice::*; use nibbleslice::*;
use nibblevec::NibbleVec;
use bytes::*; use bytes::*;
use rlp::*; use rlp::*;
use hashdb::DBValue; use hashdb::DBValue;
@ -24,40 +25,21 @@ use hashdb::DBValue;
pub type NodeKey = ElasticArray36<u8>; pub type NodeKey = ElasticArray36<u8>;
/// Type of node in the trie and essential information thereof. /// Type of node in the trie and essential information thereof.
#[derive(Eq, PartialEq, Debug)] #[derive(Eq, PartialEq, Debug, Clone)]
pub enum Node { pub enum Node<'a> {
/// Null trie node; could be an empty root or an empty branch entry. /// Null trie node; could be an empty root or an empty branch entry.
Empty, Empty,
/// Leaf node; has key slice and value. Value may not be empty. /// Leaf node; has key slice and value. Value may not be empty.
Leaf(NodeKey, DBValue), Leaf(NibbleSlice<'a>, &'a [u8]),
/// Extension node; has key slice and node data. Data may not be null. /// Extension node; has key slice and node data. Data may not be null.
Extension(NodeKey, DBValue), Extension(NibbleSlice<'a>, &'a [u8]),
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data. /// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
Branch([NodeKey; 16], Option<DBValue>) Branch([&'a [u8]; 16], Option<&'a [u8]>)
} }
impl Clone for Node { impl<'a> Node<'a> {
fn clone(&self) -> Node {
match *self {
Node::Empty => Node::Empty,
Node::Leaf(ref k, ref v) => Node::Leaf(k.clone(), v.clone()),
Node::Extension(ref k, ref v) => Node::Extension(k.clone(), v.clone()),
Node::Branch(ref k, ref v) => {
let mut branch = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()];
for i in 0 .. 16 {
branch[i] = k[i].clone();
}
Node::Branch(branch, v.clone())
}
}
}
}
impl Node {
/// Decode the `node_rlp` and return the Node. /// Decode the `node_rlp` and return the Node.
pub fn decoded(node_rlp: &[u8]) -> Node { pub fn decoded(node_rlp: &'a [u8]) -> Self {
let r = Rlp::new(node_rlp); let r = Rlp::new(node_rlp);
match r.prototype() { match r.prototype() {
// either leaf or extension - decode first item with NibbleSlice::??? // either leaf or extension - decode first item with NibbleSlice::???
@ -66,18 +48,16 @@ impl Node {
// if extension, second item is a node (either SHA3 to be looked up and // if extension, second item is a node (either SHA3 to be looked up and
// fed back into this function or inline RLP which can be fed back into this function). // fed back into this function or inline RLP which can be fed back into this function).
Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) { Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) {
(slice, true) => Node::Leaf(slice.encoded(true), DBValue::from_slice(r.at(1).data())), (slice, true) => Node::Leaf(slice, r.at(1).data()),
(slice, false) => Node::Extension(slice.encoded(false), DBValue::from_slice(r.at(1).as_raw())), (slice, false) => Node::Extension(slice, r.at(1).as_raw()),
}, },
// branch - first 16 are nodes, 17th is a value (or empty). // branch - first 16 are nodes, 17th is a value (or empty).
Prototype::List(17) => { Prototype::List(17) => {
let mut nodes: [NodeKey; 16] = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), let mut nodes = [&[] as &[u8]; 16];
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()];
for i in 0..16 { for i in 0..16 {
nodes[i] = NodeKey::from_slice(r.at(i).as_raw()); nodes[i] = r.at(i).as_raw();
} }
Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(DBValue::from_slice(r.at(16).data())) }) Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) })
}, },
// an empty branch index. // an empty branch index.
Prototype::Data(0) => Node::Empty, Prototype::Data(0) => Node::Empty,
@ -94,23 +74,23 @@ impl Node {
match *self { match *self {
Node::Leaf(ref slice, ref value) => { Node::Leaf(ref slice, ref value) => {
let mut stream = RlpStream::new_list(2); let mut stream = RlpStream::new_list(2);
stream.append(&&**slice); stream.append(&&*slice.encoded(true));
stream.append(&&**value); stream.append(value);
stream.out() stream.out()
}, },
Node::Extension(ref slice, ref raw_rlp) => { Node::Extension(ref slice, ref raw_rlp) => {
let mut stream = RlpStream::new_list(2); let mut stream = RlpStream::new_list(2);
stream.append(&&**slice); stream.append(&&*slice.encoded(false));
stream.append_raw(&&*raw_rlp, 1); stream.append_raw(raw_rlp, 1);
stream.out() stream.out()
}, },
Node::Branch(ref nodes, ref value) => { Node::Branch(ref nodes, ref value) => {
let mut stream = RlpStream::new_list(17); let mut stream = RlpStream::new_list(17);
for i in 0..16 { for i in 0..16 {
stream.append_raw(&*nodes[i], 1); stream.append_raw(nodes[i], 1);
} }
match *value { match *value {
Some(ref n) => { stream.append(&&**n); }, Some(ref n) => { stream.append(n); },
None => { stream.append_empty_data(); }, None => { stream.append_empty_data(); },
} }
stream.out() stream.out()
@ -123,3 +103,64 @@ impl Node {
} }
} }
} }
/// An owning node type. Useful for trie iterators.
#[derive(Debug, PartialEq, Eq)]
pub enum OwnedNode {
/// Empty trie node.
Empty,
/// Leaf node: partial key and value.
Leaf(NibbleVec, DBValue),
/// Extension node: partial key and child node.
Extension(NibbleVec, DBValue),
/// Branch node: 16 children and an optional value.
Branch([NodeKey; 16], Option<DBValue>),
}
impl Clone for OwnedNode {
fn clone(&self) -> Self {
match *self {
OwnedNode::Empty => OwnedNode::Empty,
OwnedNode::Leaf(ref k, ref v) => OwnedNode::Leaf(k.clone(), v.clone()),
OwnedNode::Extension(ref k, ref c) => OwnedNode::Extension(k.clone(), c.clone()),
OwnedNode::Branch(ref c, ref v) => {
let mut children = [
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
];
for (owned, borrowed) in children.iter_mut().zip(c.iter()) {
*owned = borrowed.clone()
}
OwnedNode::Branch(children, v.as_ref().cloned())
}
}
}
}
impl<'a> From<Node<'a>> for OwnedNode {
fn from(node: Node<'a>) -> Self {
match node {
Node::Empty => OwnedNode::Empty,
Node::Leaf(k, v) => OwnedNode::Leaf(k.into(), DBValue::from_slice(v)),
Node::Extension(k, child) => OwnedNode::Extension(k.into(), DBValue::from_slice(child)),
Node::Branch(c, val) => {
let mut children = [
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
];
for (owned, borrowed) in children.iter_mut().zip(c.iter()) {
*owned = NodeKey::from_slice(borrowed)
}
OwnedNode::Branch(children, val.map(DBValue::from_slice))
}
}
}
}

View File

@ -14,6 +14,8 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Trie query recorder.
use sha3::Hashable; use sha3::Hashable;
use {Bytes, H256}; use {Bytes, H256};
@ -30,63 +32,36 @@ pub struct Record {
pub hash: H256, pub hash: H256,
} }
/// Trie node recorder. /// Records trie nodes as they pass it.
///
/// These are used to record which nodes are visited during a trie query.
/// Inline nodes are not to be recorded, as they are contained within their parent.
pub trait Recorder {
/// Record that the given node has been visited.
///
/// The depth parameter is the depth of the visited node, with the root node having depth 0.
fn record(&mut self, hash: &H256, data: &[u8], depth: u32);
/// Drain all accepted records from the recorder in ascending order by depth.
fn drain(&mut self) -> Vec<Record> where Self: Sized;
}
/// A no-op trie recorder. This ignores everything which is thrown at it.
pub struct NoOp;
impl Recorder for NoOp {
#[inline]
fn record(&mut self, _hash: &H256, _data: &[u8], _depth: u32) {}
#[inline]
fn drain(&mut self) -> Vec<Record> { Vec::new() }
}
/// A simple recorder. Does nothing fancy but fulfills the `Recorder` interface
/// properly.
#[derive(Debug)] #[derive(Debug)]
pub struct BasicRecorder { pub struct Recorder {
nodes: Vec<Record>, nodes: Vec<Record>,
min_depth: u32, min_depth: u32,
} }
impl Default for BasicRecorder { impl Default for Recorder {
fn default() -> Self { fn default() -> Self {
BasicRecorder::new() Recorder::new()
} }
} }
impl BasicRecorder { impl Recorder {
/// Create a new `BasicRecorder` which records all given nodes. /// Create a new `Recorder` which records all given nodes.
#[inline] #[inline]
pub fn new() -> Self { pub fn new() -> Self {
BasicRecorder::with_depth(0) Recorder::with_depth(0)
} }
/// Create a `BasicRecorder` which only records nodes beyond a given depth. /// Create a `Recorder` which only records nodes beyond a given depth.
pub fn with_depth(depth: u32) -> Self { pub fn with_depth(depth: u32) -> Self {
BasicRecorder { Recorder {
nodes: Vec::new(), nodes: Vec::new(),
min_depth: depth, min_depth: depth,
} }
} }
}
impl Recorder for BasicRecorder { /// Record a visited node, given its hash, data, and depth.
fn record(&mut self, hash: &H256, data: &[u8], depth: u32) { pub fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
debug_assert_eq!(data.sha3(), *hash); debug_assert_eq!(data.sha3(), *hash);
if depth >= self.min_depth { if depth >= self.min_depth {
@ -98,7 +73,8 @@ impl Recorder for BasicRecorder {
} }
} }
fn drain(&mut self) -> Vec<Record> { /// Drain all visited records.
pub fn drain(&mut self) -> Vec<Record> {
::std::mem::replace(&mut self.nodes, Vec::new()) ::std::mem::replace(&mut self.nodes, Vec::new())
} }
} }
@ -109,20 +85,9 @@ mod tests {
use sha3::Hashable; use sha3::Hashable;
use ::H256; use ::H256;
#[test]
fn no_op_does_nothing() {
let mut no_op = NoOp;
let (node1, node2) = (&[1], &[2]);
let (hash1, hash2) = (node1.sha3(), node2.sha3());
no_op.record(&hash1, node1, 1);
no_op.record(&hash2, node2, 2);
assert_eq!(no_op.drain(), Vec::new());
}
#[test] #[test]
fn basic_recorder() { fn basic_recorder() {
let mut basic = BasicRecorder::new(); let mut basic = Recorder::new();
let node1 = vec![1, 2, 3, 4]; let node1 = vec![1, 2, 3, 4];
let node2 = vec![4, 5, 6, 7, 8, 9, 10]; let node2 = vec![4, 5, 6, 7, 8, 9, 10];
@ -148,7 +113,7 @@ mod tests {
#[test] #[test]
fn basic_recorder_min_depth() { fn basic_recorder_min_depth() {
let mut basic = BasicRecorder::with_depth(400); let mut basic = Recorder::with_depth(400);
let node1 = vec![1, 2, 3, 4]; let node1 = vec![1, 2, 3, 4];
let node2 = vec![4, 5, 6, 7, 8, 9, 10]; let node2 = vec![4, 5, 6, 7, 8, 9, 10];
@ -192,9 +157,9 @@ mod tests {
} }
let trie = TrieDB::new(&db, &root).unwrap(); let trie = TrieDB::new(&db, &root).unwrap();
let mut recorder = BasicRecorder::new(); let mut recorder = Recorder::new();
trie.get_recorded(b"pirate", &mut recorder).unwrap().unwrap(); trie.get_with(b"pirate", &mut recorder).unwrap().unwrap();
let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect();
assert_eq!(nodes, vec![ assert_eq!(nodes, vec![
@ -213,7 +178,7 @@ mod tests {
] ]
]); ]);
trie.get_recorded(b"letter", &mut recorder).unwrap().unwrap(); trie.get_with(b"letter", &mut recorder).unwrap().unwrap();
let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect();
assert_eq!(nodes, vec![ assert_eq!(nodes, vec![

View File

@ -16,9 +16,9 @@
use hash::H256; use hash::H256;
use sha3::Hashable; use sha3::Hashable;
use hashdb::{HashDB, DBValue}; use hashdb::HashDB;
use super::triedb::TrieDB; use super::triedb::TrieDB;
use super::{Trie, TrieItem, Recorder, TrieIterator}; use super::{Trie, TrieItem, TrieIterator, Query};
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. /// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
/// ///
@ -59,16 +59,17 @@ impl<'db> Trie for SecTrieDB<'db> {
self.raw.contains(&key.sha3()) self.raw.contains(&key.sha3())
} }
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>> fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
where 'a: 'b, R: Recorder where 'a: 'key
{ {
self.raw.get_recorded(&key.sha3(), rec) self.raw.get_with(&key.sha3(), query)
} }
} }
#[test] #[test]
fn trie_to_sectrie() { fn trie_to_sectrie() {
use memorydb::MemoryDB; use memorydb::MemoryDB;
use hashdb::DBValue;
use super::triedbmut::TrieDBMut; use super::triedbmut::TrieDBMut;
use super::super::TrieMut; use super::super::TrieMut;

View File

@ -18,16 +18,14 @@ use common::*;
use hashdb::*; use hashdb::*;
use nibbleslice::*; use nibbleslice::*;
use rlp::*; use rlp::*;
use super::node::Node; use super::node::{Node, OwnedNode};
use super::recorder::{Recorder, NoOp}; use super::lookup::Lookup;
use super::{Trie, TrieItem, TrieError, TrieIterator}; use super::{Trie, TrieItem, TrieError, TrieIterator, Query};
/// A `Trie` implementation using a generic `HashDB` backing database. /// A `Trie` implementation using a generic `HashDB` backing database.
/// ///
/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object, `keys` /// Use it as a `Trie` trait object. You can use `db()` to get the backing database object.
/// to get the keys belonging to the trie in the backing database, and `db_items_remaining()` to get /// Use `get` and `contains` to query values associated with keys in the trie.
/// which items in the backing database do not belong to this trie. If this is the only trie in the
/// backing database, then `db_items_remaining()` should be empty.
/// ///
/// # Example /// # Example
/// ``` /// ```
@ -45,7 +43,6 @@ use super::{Trie, TrieItem, TrieError, TrieIterator};
/// let t = TrieDB::new(&memdb, &root).unwrap(); /// let t = TrieDB::new(&memdb, &root).unwrap();
/// assert!(t.contains(b"foo").unwrap()); /// assert!(t.contains(b"foo").unwrap());
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar")); /// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar"));
/// assert!(t.db_items_remaining().unwrap().is_empty());
/// } /// }
/// ``` /// ```
pub struct TrieDB<'db> { pub struct TrieDB<'db> {
@ -76,74 +73,12 @@ impl<'db> TrieDB<'db> {
self.db self.db
} }
/// Determine all the keys in the backing database that belong to the trie.
pub fn keys(&self) -> super::Result<Vec<H256>> {
let mut ret: Vec<H256> = Vec::new();
ret.push(self.root.clone());
self.accumulate_keys(self.root_node(&mut NoOp)?, &mut ret)?;
Ok(ret)
}
/// Convert a vector of hashes to a hashmap of hash to occurrences.
pub fn to_map(hashes: Vec<H256>) -> HashMap<H256, u32> {
let mut r: HashMap<H256, u32> = HashMap::new();
for h in hashes {
*r.entry(h).or_insert(0) += 1;
}
r
}
/// Determine occurrences of items in the backing database which are not related to this
/// trie.
pub fn db_items_remaining(&self) -> super::Result<HashMap<H256, i32>> {
let mut ret = self.db.keys();
for (k, v) in Self::to_map(self.keys()?) {
let keycount = *ret.get(&k).unwrap_or(&0);
match keycount <= v as i32 {
true => ret.remove(&k),
_ => ret.insert(k, keycount - v as i32),
};
}
Ok(ret)
}
/// Recursion helper for `keys`.
fn accumulate_keys(&self, node: Node, acc: &mut Vec<H256>) -> super::Result<()> {
let mut handle_payload = |payload| {
let p = Rlp::new(payload);
if p.is_data() && p.size() == 32 {
acc.push(p.as_val());
}
self.accumulate_keys(self.get_node(payload, &mut NoOp, 0)?, acc)
};
match node {
Node::Extension(_, ref payload) => handle_payload(payload)?,
Node::Branch(ref payloads, _) => for payload in payloads { handle_payload(payload)? },
_ => {},
}
Ok(())
}
/// Get the root node's RLP.
fn root_node<R: Recorder>(&self, r: &mut R) -> super::Result<Node> {
self.root_data(r).map(|d| Node::decoded(&d))
}
/// Get the data of the root node. /// Get the data of the root node.
fn root_data<R: Recorder>(&self, r: &mut R) -> super::Result<DBValue> { fn root_data(&self) -> super::Result<DBValue> {
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root))) self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
.map(|node| { r.record(self.root, &*node, 0); node })
} }
/// Get the root node as a `Node`. /// Indentation helper for `format_all`.
fn get_node<'a, R: 'a + Recorder>(&'db self, node: &'db [u8], r: &'a mut R, depth: u32) -> super::Result<Node> {
self.get_raw_or_lookup(node, r, depth).map(|n| Node::decoded(&n))
}
/// Indentation helper for `formal_all`.
fn fmt_indent(&self, f: &mut fmt::Formatter, size: usize) -> fmt::Result { fn fmt_indent(&self, f: &mut fmt::Formatter, size: usize) -> fmt::Result {
for _ in 0..size { for _ in 0..size {
write!(f, " ")?; write!(f, " ")?;
@ -157,8 +92,8 @@ impl<'db> TrieDB<'db> {
Node::Leaf(slice, value) => writeln!(f, "'{:?}: {:?}.", slice, value.pretty())?, Node::Leaf(slice, value) => writeln!(f, "'{:?}: {:?}.", slice, value.pretty())?,
Node::Extension(ref slice, ref item) => { Node::Extension(ref slice, ref item) => {
write!(f, "'{:?} ", slice)?; write!(f, "'{:?} ", slice)?;
if let Ok(node) = self.get_node(&*item, &mut NoOp, 0) { if let Ok(node) = self.get_raw_or_lookup(&*item) {
self.fmt_all(node, f, deepness)?; self.fmt_all(Node::decoded(&node), f, deepness)?;
} }
}, },
Node::Branch(ref nodes, ref value) => { Node::Branch(ref nodes, ref value) => {
@ -168,7 +103,8 @@ impl<'db> TrieDB<'db> {
writeln!(f, "=: {:?}", v.pretty())? writeln!(f, "=: {:?}", v.pretty())?
} }
for i in 0..16 { for i in 0..16 {
match self.get_node(&*nodes[i], &mut NoOp, 0) { let node = self.get_raw_or_lookup(&*nodes[i]);
match node.as_ref().map(|n| Node::decoded(&*n)) {
Ok(Node::Empty) => {}, Ok(Node::Empty) => {},
Ok(n) => { Ok(n) => {
self.fmt_indent(f, deepness + 1)?; self.fmt_indent(f, deepness + 1)?;
@ -189,64 +125,49 @@ impl<'db> TrieDB<'db> {
Ok(()) Ok(())
} }
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
fn do_lookup<'key, R: 'key>(&'db self, key: &NibbleSlice<'key>, r: &'key mut R) -> super::Result<Option<DBValue>>
where 'db: 'key, R: Recorder
{
let root_rlp = self.root_data(r)?;
self.get_from_node(&root_rlp, key, r, 1)
}
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
/// value exists for the key.
///
/// Note: Not a public API; use Trie trait functions.
fn get_from_node<'key, R: 'key>(
&'db self,
node: &'db [u8],
key: &NibbleSlice<'key>,
r: &'key mut R,
d: u32
) -> super::Result<Option<DBValue>> where 'db: 'key, R: Recorder {
match Node::decoded(node) {
Node::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == *key => Ok(Some(value.clone())),
Node::Extension(ref slice, ref item) => {
let slice = &NibbleSlice::from_encoded(slice).0;
if key.starts_with(slice) {
let data = self.get_raw_or_lookup(&*item, r, d)?;
self.get_from_node(&data, &key.mid(slice.len()), r, d + 1)
} else {
Ok(None)
}
},
Node::Branch(ref nodes, ref value) => match key.is_empty() {
true => Ok(value.clone()),
false => {
let node = self.get_raw_or_lookup(&*nodes[key.at(0) as usize], r, d)?;
self.get_from_node(&node, &key.mid(1), r, d + 1)
}
},
_ => Ok(None)
}
}
/// Given some node-describing data `node`, return the actual node RLP. /// Given some node-describing data `node`, return the actual node RLP.
/// This could be a simple identity operation in the case that the node is sufficiently small, but /// This could be a simple identity operation in the case that the node is sufficiently small, but
/// may require a database lookup. /// may require a database lookup.
fn get_raw_or_lookup<R: Recorder>(&'db self, node: &'db [u8], rec: &mut R, d: u32) -> super::Result<DBValue> { fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> super::Result<DBValue> {
// check if its sha3 + len // check if its sha3 + len
let r = Rlp::new(node); let r = Rlp::new(node);
match r.is_data() && r.size() == 32 { match r.is_data() && r.size() == 32 {
true => { true => {
let key = r.as_val::<H256>(); let key = r.as_val::<H256>();
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key))) self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
.map(|raw| { rec.record(&key, &raw, d); raw })
} }
false => Ok(DBValue::from_slice(node)) false => Ok(DBValue::from_slice(node))
} }
} }
} }
impl<'db> Trie for TrieDB<'db> {
fn iter<'a>(&'a self) -> super::Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
}
fn root(&self) -> &H256 { self.root }
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
where 'a: 'key
{
Lookup {
db: self.db,
query: query,
hash: self.root.clone(),
}.look_up(NibbleSlice::new(key))
}
}
impl<'db> fmt::Debug for TrieDB<'db> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "c={:?} [", self.hash_count)?;
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
self.fmt_all(Node::decoded(&root_rlp), f, 0)?;
writeln!(f, "]")
}
}
#[derive(Clone, Eq, PartialEq)] #[derive(Clone, Eq, PartialEq)]
enum Status { enum Status {
Entering, Entering,
@ -257,7 +178,7 @@ enum Status {
#[derive(Clone, Eq, PartialEq)] #[derive(Clone, Eq, PartialEq)]
struct Crumb { struct Crumb {
node: Node, node: OwnedNode,
status: Status, status: Status,
} }
@ -265,10 +186,10 @@ impl Crumb {
/// Move on to next status in the node's sequence. /// Move on to next status in the node's sequence.
fn increment(&mut self) { fn increment(&mut self) {
self.status = match (&self.status, &self.node) { self.status = match (&self.status, &self.node) {
(_, &Node::Empty) => Status::Exiting, (_, &OwnedNode::Empty) => Status::Exiting,
(&Status::Entering, _) => Status::At, (&Status::Entering, _) => Status::At,
(&Status::At, &Node::Branch(_, _)) => Status::AtChild(0), (&Status::At, &OwnedNode::Branch(_, _)) => Status::AtChild(0),
(&Status::AtChild(x), &Node::Branch(_, _)) if x < 15 => Status::AtChild(x + 1), (&Status::AtChild(x), &OwnedNode::Branch(_, _)) if x < 15 => Status::AtChild(x + 1),
_ => Status::Exiting, _ => Status::Exiting,
} }
} }
@ -291,41 +212,40 @@ impl<'a> TrieDBIterator<'a> {
key_nibbles: Vec::new(), key_nibbles: Vec::new(),
}; };
db.root_data(&mut NoOp).and_then(|root| r.descend(&root))?; db.root_data().and_then(|root| r.descend(&root))?;
Ok(r) Ok(r)
} }
fn seek_descend<'key> ( &mut self, node: &[u8], key: &NibbleSlice<'key>, d: u32) -> super::Result<()> { fn seek_descend<'key>(&mut self, node_data: DBValue, key: &NibbleSlice<'key>) -> super::Result<()> {
match Node::decoded(node) { let node = Node::decoded(&node_data);
match node {
Node::Leaf(ref slice, _) => { Node::Leaf(ref slice, _) => {
let slice = &NibbleSlice::from_encoded(slice).0;
if slice == key { if slice == key {
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::At, status: Status::At,
node: Node::decoded(node), node: node.clone().into(),
}); });
} else { } else {
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::Exiting, status: Status::Exiting,
node: Node::decoded(node), node: node.clone().into(),
}); });
} }
self.key_nibbles.extend(slice.iter()); self.key_nibbles.extend(slice.iter());
Ok(()) Ok(())
}, },
Node::Extension(ref slice, ref item) => { Node::Extension(ref slice, ref item) => {
let slice = &NibbleSlice::from_encoded(slice).0;
if key.starts_with(slice) { if key.starts_with(slice) {
let mut r = NoOp;
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::At, status: Status::At,
node: Node::decoded(node), node: node.clone().into(),
}); });
self.key_nibbles.extend(slice.iter()); self.key_nibbles.extend(slice.iter());
let data = self.db.get_raw_or_lookup(&*item, &mut r, d)?; let data = self.db.get_raw_or_lookup(&*item)?;
self.seek_descend(&data, &key.mid(slice.len()), d + 1) self.seek_descend(data, &key.mid(slice.len()))
} else { } else {
self.descend(node)?; self.descend(&node_data)?;
Ok(()) Ok(())
} }
}, },
@ -333,20 +253,19 @@ impl<'a> TrieDBIterator<'a> {
true => { true => {
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::At, status: Status::At,
node: Node::decoded(node), node: node.clone().into(),
}); });
Ok(()) Ok(())
}, },
false => { false => {
let mut r = NoOp;
let i = key.at(0); let i = key.at(0);
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::AtChild(i as usize), status: Status::AtChild(i as usize),
node: Node::decoded(node), node: node.clone().into(),
}); });
self.key_nibbles.push(i); self.key_nibbles.push(i);
let child = self.db.get_raw_or_lookup(&*nodes[i as usize], &mut r, d)?; let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?;
self.seek_descend(&child, &key.mid(1), d + 1) self.seek_descend(child, &key.mid(1))
} }
}, },
_ => Ok(()) _ => Ok(())
@ -357,10 +276,12 @@ impl<'a> TrieDBIterator<'a> {
fn descend(&mut self, d: &[u8]) -> super::Result<()> { fn descend(&mut self, d: &[u8]) -> super::Result<()> {
self.trail.push(Crumb { self.trail.push(Crumb {
status: Status::Entering, status: Status::Entering,
node: self.db.get_node(d, &mut NoOp, 0)?, node: Node::decoded(&self.db.get_raw_or_lookup(d)?).into(),
}); });
match self.trail.last().expect("just pushed item; qed").node { match &self.trail.last().expect("just pushed item; qed").node {
Node::Leaf(ref n, _) | Node::Extension(ref n, _) => { self.key_nibbles.extend(NibbleSlice::from_encoded(n).0.iter()); }, &OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => {
self.key_nibbles.extend((0..n.len()).map(|i| n.at(i)));
},
_ => {} _ => {}
} }
@ -379,9 +300,8 @@ impl<'a> TrieIterator for TrieDBIterator<'a> {
fn seek(&mut self, key: &[u8]) -> super::Result<()> { fn seek(&mut self, key: &[u8]) -> super::Result<()> {
self.trail.clear(); self.trail.clear();
self.key_nibbles.clear(); self.key_nibbles.clear();
let mut r = NoOp; let root_rlp = self.db.root_data()?;
let root_rlp = self.db.root_data(&mut r)?; self.seek_descend(root_rlp, &NibbleSlice::new(key))
self.seek_descend(&root_rlp, &NibbleSlice::new(key), 1)
} }
} }
@ -397,27 +317,27 @@ impl<'a> Iterator for TrieDBIterator<'a> {
match (b.status, b.node) { match (b.status, b.node) {
(Status::Exiting, n) => { (Status::Exiting, n) => {
match n { match n {
Node::Leaf(n, _) | Node::Extension(n, _) => { OwnedNode::Leaf(n, _) | OwnedNode::Extension(n, _) => {
let l = self.key_nibbles.len(); let l = self.key_nibbles.len();
self.key_nibbles.truncate(l - NibbleSlice::from_encoded(&*n).0.len()); self.key_nibbles.truncate(l - n.len());
}, },
Node::Branch(_, _) => { self.key_nibbles.pop(); }, OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); },
_ => {} _ => {}
} }
self.trail.pop(); self.trail.pop();
// continue // continue
}, },
(Status::At, Node::Leaf(_, v)) | (Status::At, Node::Branch(_, Some(v))) => { (Status::At, OwnedNode::Leaf(_, v)) | (Status::At, OwnedNode::Branch(_, Some(v))) => {
return Some(Ok((self.key(), v))); return Some(Ok((self.key(), v)));
}, },
(Status::At, Node::Extension(_, d)) => { (Status::At, OwnedNode::Extension(_, d)) => {
if let Err(e) = self.descend(&*d) { if let Err(e) = self.descend(&*d) {
return Some(Err(e)); return Some(Err(e));
} }
// continue // continue
}, },
(Status::At, Node::Branch(_, _)) => {}, (Status::At, OwnedNode::Branch(_, _)) => {},
(Status::AtChild(i), Node::Branch(ref children, _)) if children[i].len() > 0 => { (Status::AtChild(i), OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
match i { match i {
0 => self.key_nibbles.push(0), 0 => self.key_nibbles.push(0),
i => *self.key_nibbles.last_mut() i => *self.key_nibbles.last_mut()
@ -428,7 +348,7 @@ impl<'a> Iterator for TrieDBIterator<'a> {
} }
// continue // continue
}, },
(Status::AtChild(i), Node::Branch(_, _)) => { (Status::AtChild(i), OwnedNode::Branch(_, _)) => {
if i == 0 { if i == 0 {
self.key_nibbles.push(0); self.key_nibbles.push(0);
} }
@ -440,29 +360,6 @@ impl<'a> Iterator for TrieDBIterator<'a> {
} }
} }
impl<'db> Trie for TrieDB<'db> {
fn iter<'a>(&'a self) -> super::Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
}
fn root(&self) -> &H256 { self.root }
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>>
where 'a: 'b, R: Recorder
{
self.do_lookup(&NibbleSlice::new(key), rec)
}
}
impl<'db> fmt::Debug for TrieDB<'db> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "c={:?} [", self.hash_count)?;
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
self.fmt_all(Node::decoded(&root_rlp), f, 0)?;
writeln!(f, "]")
}
}
#[test] #[test]
fn iterator() { fn iterator() {
use memorydb::*; use memorydb::*;
@ -529,3 +426,23 @@ fn iterator_seek() {
iter.seek(b"C").unwrap(); iter.seek(b"C").unwrap();
assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]); assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::<Vec<_>>()[..]);
} }
#[test]
fn get_len() {
use memorydb::*;
use super::TrieMut;
use super::triedbmut::*;
let mut memdb = MemoryDB::new();
let mut root = H256::new();
{
let mut t = TrieDBMut::new(&mut memdb, &mut root);
t.insert(b"A", b"ABC").unwrap();
t.insert(b"B", b"ABCBA").unwrap();
}
let t = TrieDB::new(&memdb, &root).unwrap();
assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()), Ok(Some(3)));
assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()), Ok(Some(5)));
assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()), Ok(None));
}

View File

@ -17,6 +17,7 @@
//! In-memory trie representation. //! In-memory trie representation.
use super::{TrieError, TrieMut}; use super::{TrieError, TrieMut};
use super::lookup::Lookup;
use super::node::Node as RlpNode; use super::node::Node as RlpNode;
use super::node::NodeKey; use super::node::NodeKey;
@ -100,22 +101,22 @@ impl Node {
fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self { fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self {
match RlpNode::decoded(rlp) { match RlpNode::decoded(rlp) {
RlpNode::Empty => Node::Empty, RlpNode::Empty => Node::Empty,
RlpNode::Leaf(k, v) => Node::Leaf(k, v), RlpNode::Leaf(k, v) => Node::Leaf(k.encoded(true), DBValue::from_slice(&v)),
RlpNode::Extension(key, cb) => { RlpNode::Extension(key, cb) => {
Node::Extension(key, Self::inline_or_hash(&*cb, db, storage)) Node::Extension(key.encoded(false), Self::inline_or_hash(cb, db, storage))
} }
RlpNode::Branch(children_rlp, val) => { RlpNode::Branch(children_rlp, val) => {
let mut children = empty_children(); let mut children = empty_children();
for i in 0..16 { for i in 0..16 {
let raw = &children_rlp[i]; let raw = children_rlp[i];
let child_rlp = Rlp::new(&*raw); let child_rlp = Rlp::new(raw);
if !child_rlp.is_empty() { if !child_rlp.is_empty() {
children[i] = Some(Self::inline_or_hash(&*raw, db, storage)); children[i] = Some(Self::inline_or_hash(raw, db, storage));
} }
} }
Node::Branch(children, val) Node::Branch(children, val.map(DBValue::from_slice))
} }
} }
} }
@ -370,7 +371,11 @@ impl<'a> TrieDBMut<'a> {
where 'x: 'key where 'x: 'key
{ {
match *handle { match *handle {
NodeHandle::Hash(ref hash) => self.do_db_lookup(hash, partial), NodeHandle::Hash(ref hash) => Lookup {
db: &*self.db,
query: DBValue::from_slice,
hash: hash.clone(),
}.look_up(partial),
NodeHandle::InMemory(ref handle) => match self.storage[handle] { NodeHandle::InMemory(ref handle) => match self.storage[handle] {
Node::Empty => Ok(None), Node::Empty => Ok(None),
Node::Leaf(ref key, ref value) => { Node::Leaf(ref key, ref value) => {
@ -403,54 +408,6 @@ impl<'a> TrieDBMut<'a> {
} }
} }
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> super::Result<Option<DBValue>>
where 'x: 'key
{
self.db.get(hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(*hash)))
.and_then(|node_rlp| self.get_from_db_node(&node_rlp, key))
}
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
/// value exists for the key.
///
/// Note: Not a public API; use Trie trait functions.
fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> super::Result<Option<DBValue>>
where 'x: 'key
{
match RlpNode::decoded(node) {
RlpNode::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == key => Ok(Some(value.clone())),
RlpNode::Extension(ref slice, ref item) => {
let slice = &NibbleSlice::from_encoded(slice).0;
if key.starts_with(slice) {
self.get_from_db_node(&self.get_raw_or_lookup(&*item)?, key.mid(slice.len()))
} else {
Ok(None)
}
},
RlpNode::Branch(ref nodes, ref value) => match key.is_empty() {
true => Ok(value.clone()),
false => self.get_from_db_node(&self.get_raw_or_lookup(&*nodes[key.at(0) as usize])?, key.mid(1))
},
_ => Ok(None),
}
}
/// Given some node-describing data `node`, return the actual node RLP.
/// This could be a simple identity operation in the case that the node is sufficiently small, but
/// may require a database lookup.
fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> super::Result<DBValue> {
// check if its sha3 + len
let r = Rlp::new(node);
match r.is_data() && r.size() == 32 {
true => {
let key = r.as_val::<H256>();
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
}
false => Ok(DBValue::from_slice(node))
}
}
/// insert a key, value pair into the trie, creating new nodes if necessary. /// insert a key, value pair into the trie, creating new nodes if necessary.
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>) fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>)
-> super::Result<(StorageHandle, bool)> -> super::Result<(StorageHandle, bool)>