Merge branch 'master' into nvolf
This commit is contained in:
commit
f85b9eb75b
19
.travis.yml
19
.travis.yml
@ -4,11 +4,13 @@ language: rust
|
|||||||
branches:
|
branches:
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
|
- /^beta-.*$/
|
||||||
|
- /^stable-.*$/
|
||||||
matrix:
|
matrix:
|
||||||
fast_finish: true
|
fast_finish: true
|
||||||
include:
|
include:
|
||||||
- rust: nightly
|
- rust: nightly
|
||||||
env: FEATURES="--features ethcore/json-tests" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity"
|
env: FEATURES="--features ethcore/json-tests" KCOV_FEATURES="" TARGETS="-p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity" ARCHIVE_SUFFIX="-${TRAVIS_OS_NAME}-${TRAVIS_TAG}"
|
||||||
cache:
|
cache:
|
||||||
apt: true
|
apt: true
|
||||||
directories:
|
directories:
|
||||||
@ -30,6 +32,7 @@ script:
|
|||||||
- cargo build --release --verbose ${FEATURES}
|
- cargo build --release --verbose ${FEATURES}
|
||||||
- cargo test --release --verbose ${FEATURES} ${TARGETS}
|
- cargo test --release --verbose ${FEATURES} ${TARGETS}
|
||||||
- cargo bench --no-run ${FEATURES} ${TARGETS}
|
- cargo bench --no-run ${FEATURES} ${TARGETS}
|
||||||
|
- tar cvzf parity${ARCHIVE_SUFFIX}.tar.gz -C target/release parity
|
||||||
after_success: |
|
after_success: |
|
||||||
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
|
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
|
||||||
tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
|
tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
|
||||||
@ -46,10 +49,18 @@ after_success: |
|
|||||||
cargo doc --no-deps --verbose ${KCOV_FEATURES} ${TARGETS} &&
|
cargo doc --no-deps --verbose ${KCOV_FEATURES} ${TARGETS} &&
|
||||||
echo '<meta http-equiv=refresh content=0;url=ethcore/index.html>' > target/doc/index.html &&
|
echo '<meta http-equiv=refresh content=0;url=ethcore/index.html>' > target/doc/index.html &&
|
||||||
pip install --user ghp-import &&
|
pip install --user ghp-import &&
|
||||||
/home/travis/.local/bin/ghp-import -n target/doc
|
/home/travis/.local/bin/ghp-import -n target/doc &&
|
||||||
#&&
|
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
|
||||||
#git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
- secure: 3sUjNi9mhdL5h1GTm8LONnDN/SYvUHT+WSkMl93h3nYiLCQXk8eZaPS98AS7oOaTsfW4UvnwckVFCFl49ttInsv4cd/TkAxmrJHe6kPyS9/4NWUdmP8BjicbBvL/ioSdXMECMEYzPDLV+I3KhtC2LcB6ceDEl/XwMOJlzbGf7RbtcXGVQgMLqSYY1YKjQA4vbT5nFgIS/sZu3Z9yFgN0GafnihKcizqoHhdJjs/zxmX+qJepnC6o3V6KcFnS7QHhM1JOr85twE6S422UlvNaEb5ovwLPqmOl5+fA+6shbx4AxFTY6E9Iors+OVY/JliFhrqOdCt0i2P1FUHN4kbGZQkf0rphN/ZOI2uKNFTOyXiPvppfo/ZemKmcqkwkqP9+lf5QqYmtE6hsAYagxn49xJZILl8tAYbdqxF5gxa+TEVrfsBFtz/Sv3q8QhKQNPAmjEcKyMatyEreLUIFEpFTGIco8jN4eXeSoLRdJ+Z75ihttfQWhNfUDgNL30iQLy0AgFSsh/cyb5M8y9lxrGDzDTogvaiKGwr/V45sPkcXWCkmOgMdINqBB6ZtdL3bGHdyjmYj+y3btjf3aP11k++BL0fXIaKn25aS/p/9iyGb1FyGCM03o4ZRQ3YhTOvfMRfRGf6nWbaMx9upv8o5ShSdysewhrnh3082r7u896ny1Ho=
|
- secure: 3sUjNi9mhdL5h1GTm8LONnDN/SYvUHT+WSkMl93h3nYiLCQXk8eZaPS98AS7oOaTsfW4UvnwckVFCFl49ttInsv4cd/TkAxmrJHe6kPyS9/4NWUdmP8BjicbBvL/ioSdXMECMEYzPDLV+I3KhtC2LcB6ceDEl/XwMOJlzbGf7RbtcXGVQgMLqSYY1YKjQA4vbT5nFgIS/sZu3Z9yFgN0GafnihKcizqoHhdJjs/zxmX+qJepnC6o3V6KcFnS7QHhM1JOr85twE6S422UlvNaEb5ovwLPqmOl5+fA+6shbx4AxFTY6E9Iors+OVY/JliFhrqOdCt0i2P1FUHN4kbGZQkf0rphN/ZOI2uKNFTOyXiPvppfo/ZemKmcqkwkqP9+lf5QqYmtE6hsAYagxn49xJZILl8tAYbdqxF5gxa+TEVrfsBFtz/Sv3q8QhKQNPAmjEcKyMatyEreLUIFEpFTGIco8jN4eXeSoLRdJ+Z75ihttfQWhNfUDgNL30iQLy0AgFSsh/cyb5M8y9lxrGDzDTogvaiKGwr/V45sPkcXWCkmOgMdINqBB6ZtdL3bGHdyjmYj+y3btjf3aP11k++BL0fXIaKn25aS/p/9iyGb1FyGCM03o4ZRQ3YhTOvfMRfRGf6nWbaMx9upv8o5ShSdysewhrnh3082r7u896ny1Ho=
|
||||||
- secure: 0/FeVvFl3AhBW0TCPoujY9zOAYoUNMlAz3XjC04vlc4Ksfx0lGU3KFi97LlALxMWV0lfwQc7ixSe2vTgQVQuLVSU9XEW40fQgEjJlmLca2RcRx1kfzJDypuWSiCME7MWmLPH0ac4COdTDS1z5WGggv5YB7GQPCzFvcmOOaPYtF29ngCtkyB2HmNkY/W3omHFEk7Si6bsmOSHZiOAhivPl6ixnGpFyTEKPyraMMqPIj5rbEGkzgeLTiXf2ur143n/tnSr8tmP1MfQi9yS8/ONidMqnxUeuLkeNnb82zj9pVJhVXq0xF44WXJ8Za1jm0ByiTakgqpm8Juk822qjvtNulJ1XZW/fyZQZaN1dy3uq5Ud3W8wS9M7VIVl8CoXozzDpIsdPeUAtkAxeHBsZqL1vAH2yC1YJA7HPySMYzCjYqkJ2r62xYk0gXmNXphfU+F/X/rHzHsTMJPONJ54HQwu12m7zVlKIYBGHgEXg/HAM/g4ljUzl6WWR/nHH/tQM8ND/8FpHluJSZJWacq/1QNhVdTq2x6cqws2fs5A7nVpccR9+6RRgYgv6+YS2LxvFzByuZveGGoKif+uMECXN876j40araUqU528Yz9i8bHJlnM3coRBndaLNWByLcUyXCB9r9IUosUu41rr+L2mVzkSDm0GicuNCzqvzYQ9Q6QY4uQ=
|
- secure: 0/FeVvFl3AhBW0TCPoujY9zOAYoUNMlAz3XjC04vlc4Ksfx0lGU3KFi97LlALxMWV0lfwQc7ixSe2vTgQVQuLVSU9XEW40fQgEjJlmLca2RcRx1kfzJDypuWSiCME7MWmLPH0ac4COdTDS1z5WGggv5YB7GQPCzFvcmOOaPYtF29ngCtkyB2HmNkY/W3omHFEk7Si6bsmOSHZiOAhivPl6ixnGpFyTEKPyraMMqPIj5rbEGkzgeLTiXf2ur143n/tnSr8tmP1MfQi9yS8/ONidMqnxUeuLkeNnb82zj9pVJhVXq0xF44WXJ8Za1jm0ByiTakgqpm8Juk822qjvtNulJ1XZW/fyZQZaN1dy3uq5Ud3W8wS9M7VIVl8CoXozzDpIsdPeUAtkAxeHBsZqL1vAH2yC1YJA7HPySMYzCjYqkJ2r62xYk0gXmNXphfU+F/X/rHzHsTMJPONJ54HQwu12m7zVlKIYBGHgEXg/HAM/g4ljUzl6WWR/nHH/tQM8ND/8FpHluJSZJWacq/1QNhVdTq2x6cqws2fs5A7nVpccR9+6RRgYgv6+YS2LxvFzByuZveGGoKif+uMECXN876j40araUqU528Yz9i8bHJlnM3coRBndaLNWByLcUyXCB9r9IUosUu41rr+L2mVzkSDm0GicuNCzqvzYQ9Q6QY4uQ=
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
provider: releases
|
||||||
|
api_key:
|
||||||
|
secure: "t+oGT/4lsy7IScw5s86Dpntl5Nyck4qG6nhHwMScc6FYzwLldgwgJaafL8Ej+HG+b7nFLriN+Snoa4YQ5o74X5ZlSWubVREOYQlL/fq7vcPB0DwAZ0Jufq1QW2R1M+3SwwF1eAwTv2W3G7A2K7dxjCVvENcy/gdxnZ36NeUPsqaCC9UcI2Yc7+4jyQwvx6ZfBvQeu+HbKENA0eUNs2ZQOID/1IPy0LJBvSyxAQYsysXdjTzGdNu4+Iba20E8uWYe4fAbgz+gwGarXg1L6D6gKyMlWkViqWjvXWBuDJJqMQZ3rw41AwZOoh3mKd2Lc0l6l4oZcEqPuob0yKTNjz1tuJy9xKTC2F2bDzsvUgk1IRfMK5ukXXXS09ZCZWuA9/GtnsqJ1xGTiwX+DhQzpVBHaBiseSNlYE1YN/3jNyGY+iSts1qut+1BwE7swmcTLsAPoAy8Ue+f7ErNoCg1lm71vq7VO2DLn7x2NqHyHUEuJ+7olDHSdE84G7d9otDRu/+TfMOw7GXwTaha6yJRInuNsnj4CFMLNVvYACzCC2idB7f7nUZoSFi9jf18S9fCMPVmazMrFj4g95HWrVHkjpV5zRTeUdTWw6DJl6pC9HFqORHdCvLv4Rc4dm5r3CmOcAQ0ZuiccV2oKzw4/Wic96daae8M5f5KSQ/WTr+h0wXZKp0="
|
||||||
|
skip_cleanup: true
|
||||||
|
file: parity${ARCHIVE_SUFFIX}.tar.gz
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
38
README.md
38
README.md
@ -7,10 +7,9 @@
|
|||||||
[coveralls-image]: https://coveralls.io/repos/github/ethcore/parity/badge.svg?branch=master&t=Fk0OuQ
|
[coveralls-image]: https://coveralls.io/repos/github/ethcore/parity/badge.svg?branch=master&t=Fk0OuQ
|
||||||
[coveralls-url]: https://coveralls.io/r/ethcore/parity?branch=master
|
[coveralls-url]: https://coveralls.io/r/ethcore/parity?branch=master
|
||||||
|
|
||||||
|
|
||||||
### Building from source
|
### Building from source
|
||||||
|
|
||||||
##### Ubuntu 14.04 and later
|
##### Ubuntu 14.04
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# install rocksdb
|
# install rocksdb
|
||||||
@ -22,10 +21,8 @@ apt-get install -y --force-yes librocksdb
|
|||||||
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
|
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
|
||||||
|
|
||||||
# install nightly and make it default
|
# install nightly and make it default
|
||||||
multirust update nightly && multirust default nightly
|
multirust update nightly
|
||||||
|
multirust default nightly
|
||||||
# export rust LIBRARY_PATH
|
|
||||||
export LIBRARY_PATH=/usr/local/lib
|
|
||||||
|
|
||||||
# download and build parity
|
# download and build parity
|
||||||
git clone https://github.com/ethcore/parity
|
git clone https://github.com/ethcore/parity
|
||||||
@ -33,7 +30,31 @@ cd parity
|
|||||||
cargo build --release
|
cargo build --release
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OSX
|
##### Linux
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# install rocksdb
|
||||||
|
git clone --tag v4.1 --depth=1 https://github.com/facebook/rocksdb.git
|
||||||
|
cd rocksdb
|
||||||
|
make shared_lib
|
||||||
|
sudo cp -a librocksdb.so* /usr/lib
|
||||||
|
sudo ldconfig
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
# install rust nightly
|
||||||
|
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sudo sh -s -- --yes
|
||||||
|
|
||||||
|
# install nightly and make it default
|
||||||
|
sudo multirust update nightly
|
||||||
|
sudo multirust default nightly
|
||||||
|
|
||||||
|
# download and build parity
|
||||||
|
git clone https://github.com/ethcore/parity
|
||||||
|
cd parity
|
||||||
|
cargo build --release
|
||||||
|
```
|
||||||
|
|
||||||
|
##### OSX with Homebrew
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# install rocksdb && multirust
|
# install rocksdb && multirust
|
||||||
@ -44,9 +65,6 @@ brew install multirust
|
|||||||
# install nightly and make it default
|
# install nightly and make it default
|
||||||
multirust update nightly && multirust default nightly
|
multirust update nightly && multirust default nightly
|
||||||
|
|
||||||
# export rust LIBRARY_PATH
|
|
||||||
export LIBRARY_PATH=/usr/local/lib
|
|
||||||
|
|
||||||
# download and build parity
|
# download and build parity
|
||||||
git clone https://github.com/ethcore/parity
|
git clone https://github.com/ethcore/parity
|
||||||
cd parity
|
cd parity
|
||||||
|
4
doc.sh
Executable file
4
doc.sh
Executable file
@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# generate documentation only for partiy and ethcore libraries
|
||||||
|
|
||||||
|
cargo doc --no-deps --verbose -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity
|
@ -10,12 +10,10 @@ authors = ["Ethcore <admin@ethcore.io>"]
|
|||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.3"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
flate2 = "0.2"
|
|
||||||
rocksdb = "0.3"
|
rocksdb = "0.3"
|
||||||
heapsize = "0.2.0"
|
heapsize = "0.2.0"
|
||||||
rust-crypto = "0.2.34"
|
rust-crypto = "0.2.34"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
#interpolate_idents = { git = "https://github.com/SkylerLipthay/interpolate_idents" }
|
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
evmjit = { path = "../evmjit", optional = true }
|
evmjit = { path = "../evmjit", optional = true }
|
||||||
ethash = { path = "../ethash" }
|
ethash = { path = "../ethash" }
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Single account in the system.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
|
|
||||||
@ -19,6 +21,7 @@ pub struct Account {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Account {
|
impl Account {
|
||||||
|
#[cfg(test)]
|
||||||
/// General constructor.
|
/// General constructor.
|
||||||
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
|
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
|
||||||
Account {
|
Account {
|
||||||
@ -31,6 +34,8 @@ impl Account {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
/// General constructor.
|
/// General constructor.
|
||||||
pub fn from_pod(pod: PodAccount) -> Account {
|
pub fn from_pod(pod: PodAccount) -> Account {
|
||||||
Account {
|
Account {
|
||||||
@ -81,15 +86,8 @@ impl Account {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reset this account to the status of a not-yet-initialised contract.
|
|
||||||
/// NOTE: Account should have `init_code()` called on it later.
|
|
||||||
pub fn reset_code(&mut self) {
|
|
||||||
self.code_hash = None;
|
|
||||||
self.code_cache = vec![];
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set this account's code to the given code.
|
/// Set this account's code to the given code.
|
||||||
/// NOTE: Account should have been created with `new_contract()` or have `reset_code()` called on it.
|
/// NOTE: Account should have been created with `new_contract()`
|
||||||
pub fn init_code(&mut self, code: Bytes) {
|
pub fn init_code(&mut self, code: Bytes) {
|
||||||
assert!(self.code_hash.is_none());
|
assert!(self.code_hash.is_none());
|
||||||
self.code_cache = code;
|
self.code_cache = code;
|
||||||
@ -113,6 +111,7 @@ impl Account {
|
|||||||
/// return the nonce associated with this account.
|
/// return the nonce associated with this account.
|
||||||
pub fn nonce(&self) -> &U256 { &self.nonce }
|
pub fn nonce(&self) -> &U256 { &self.nonce }
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
/// return the code hash associated with this account.
|
/// return the code hash associated with this account.
|
||||||
pub fn code_hash(&self) -> H256 {
|
pub fn code_hash(&self) -> H256 {
|
||||||
self.code_hash.clone().unwrap_or(SHA3_EMPTY)
|
self.code_hash.clone().unwrap_or(SHA3_EMPTY)
|
||||||
@ -129,6 +128,7 @@ impl Account {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
|
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
|
||||||
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
|
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
|
||||||
let h = code.sha3();
|
let h = code.sha3();
|
||||||
@ -163,18 +163,14 @@ impl Account {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// return the storage root associated with this account.
|
#[cfg(test)]
|
||||||
pub fn base_root(&self) -> &H256 { &self.storage_root }
|
|
||||||
|
|
||||||
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
|
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
|
||||||
pub fn storage_is_clean(&self) -> bool { self.storage_overlay.borrow().iter().find(|&(_, &(f, _))| f == Filth::Dirty).is_none() }
|
pub fn storage_is_clean(&self) -> bool { self.storage_overlay.borrow().iter().find(|&(_, &(f, _))| f == Filth::Dirty).is_none() }
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
/// return the storage root associated with this account or None if it has been altered via the overlay.
|
/// return the storage root associated with this account or None if it has been altered via the overlay.
|
||||||
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
|
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
|
||||||
|
|
||||||
/// return the storage root associated with this account or None if it has been altered via the overlay.
|
|
||||||
pub fn recent_storage_root(&self) -> &H256 { &self.storage_root }
|
|
||||||
|
|
||||||
/// return the storage overlay.
|
/// return the storage overlay.
|
||||||
pub fn storage_overlay(&self) -> Ref<HashMap<H256, (Filth, H256)>> { self.storage_overlay.borrow() }
|
pub fn storage_overlay(&self) -> Ref<HashMap<H256, (Filth, H256)>> { self.storage_overlay.borrow() }
|
||||||
|
|
||||||
|
@ -1,15 +1,18 @@
|
|||||||
|
//! Diff between two accounts.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
|
#[cfg(test)]
|
||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// Change in existance type.
|
/// Change in existance type.
|
||||||
// TODO: include other types of change.
|
// TODO: include other types of change.
|
||||||
pub enum Existance {
|
pub enum Existance {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Item came into existance.
|
||||||
Born,
|
Born,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Item stayed in existance.
|
||||||
Alive,
|
Alive,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Item went out of existance.
|
||||||
Died,
|
Died,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -25,20 +28,20 @@ impl fmt::Display for Existance {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Account diff.
|
||||||
pub struct AccountDiff {
|
pub struct AccountDiff {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Change in balance, allowed to be `Diff::Same`.
|
||||||
pub balance: Diff<U256>, // Allowed to be Same
|
pub balance: Diff<U256>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Change in nonce, allowed to be `Diff::Same`.
|
||||||
pub nonce: Diff<U256>, // Allowed to be Same
|
pub nonce: Diff<U256>, // Allowed to be Same
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Change in code, allowed to be `Diff::Same`.
|
||||||
pub code: Diff<Bytes>, // Allowed to be Same
|
pub code: Diff<Bytes>, // Allowed to be Same
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Change in storage, values are not allowed to be `Diff::Same`.
|
||||||
pub storage: BTreeMap<H256, Diff<H256>>,// Not allowed to be Same
|
pub storage: BTreeMap<H256, Diff<H256>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AccountDiff {
|
impl AccountDiff {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get `Existance` projection.
|
||||||
pub fn existance(&self) -> Existance {
|
pub fn existance(&self) -> Existance {
|
||||||
match self.balance {
|
match self.balance {
|
||||||
Diff::Born(_) => Existance::Born,
|
Diff::Born(_) => Existance::Born,
|
||||||
@ -47,7 +50,9 @@ impl AccountDiff {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
#[cfg(test)]
|
||||||
|
/// Determine difference between two optionally existant `Account`s. Returns None
|
||||||
|
/// if they are the same.
|
||||||
pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<AccountDiff> {
|
pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<AccountDiff> {
|
||||||
match (pre, post) {
|
match (pre, post) {
|
||||||
(None, Some(x)) => Some(AccountDiff {
|
(None, Some(x)) => Some(AccountDiff {
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Ethcore basic typenames.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
|
|
||||||
/// Type for a 2048-bit log-bloom, as used by our blocks.
|
/// Type for a 2048-bit log-bloom, as used by our blocks.
|
||||||
@ -6,10 +8,10 @@ pub type LogBloom = H2048;
|
|||||||
/// Constant 2048-bit datum for 0. Often used as a default.
|
/// Constant 2048-bit datum for 0. Often used as a default.
|
||||||
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Semantic boolean for when a seal/signature is included.
|
||||||
pub enum Seal {
|
pub enum Seal {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The seal/signature is included.
|
||||||
With,
|
With,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The seal/signature is not included.
|
||||||
Without,
|
Without,
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Blockchain block.
|
||||||
|
|
||||||
#![allow(ptr_arg)] // Because of &LastHashes -> &Vec<_>
|
#![allow(ptr_arg)] // Because of &LastHashes -> &Vec<_>
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
@ -18,7 +20,7 @@ pub struct Block {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Block {
|
impl Block {
|
||||||
/// Returns true iff the given bytes form a valid encoding of a block in RLP.
|
/// Returns true if the given bytes form a valid encoding of a block in RLP.
|
||||||
// TODO: implement Decoder for this and have this use that.
|
// TODO: implement Decoder for this and have this use that.
|
||||||
pub fn is_good(b: &[u8]) -> bool {
|
pub fn is_good(b: &[u8]) -> bool {
|
||||||
/*
|
/*
|
||||||
@ -71,16 +73,15 @@ pub struct ExecutedBlock {
|
|||||||
|
|
||||||
/// A set of references to `ExecutedBlock` fields that are publicly accessible.
|
/// A set of references to `ExecutedBlock` fields that are publicly accessible.
|
||||||
pub struct BlockRefMut<'a> {
|
pub struct BlockRefMut<'a> {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block header.
|
||||||
pub header: &'a Header,
|
pub header: &'a Header,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block transactions.
|
||||||
pub transactions: &'a Vec<Transaction>,
|
pub transactions: &'a Vec<Transaction>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block uncles.
|
||||||
pub uncles: &'a Vec<Header>,
|
pub uncles: &'a Vec<Header>,
|
||||||
|
/// Transaction receipts.
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub receipts: &'a Vec<Receipt>,
|
pub receipts: &'a Vec<Receipt>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// State.
|
||||||
pub state: &'a mut State,
|
pub state: &'a mut State,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ use engine::Engine;
|
|||||||
use views::*;
|
use views::*;
|
||||||
use header::*;
|
use header::*;
|
||||||
use service::*;
|
use service::*;
|
||||||
|
use client::BlockStatus;
|
||||||
|
|
||||||
/// Block queue status
|
/// Block queue status
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -41,7 +42,7 @@ pub struct BlockQueue {
|
|||||||
deleting: Arc<AtomicBool>,
|
deleting: Arc<AtomicBool>,
|
||||||
ready_signal: Arc<QueueSignal>,
|
ready_signal: Arc<QueueSignal>,
|
||||||
empty: Arc<Condvar>,
|
empty: Arc<Condvar>,
|
||||||
processing: HashSet<H256>
|
processing: RwLock<HashSet<H256>>
|
||||||
}
|
}
|
||||||
|
|
||||||
struct UnVerifiedBlock {
|
struct UnVerifiedBlock {
|
||||||
@ -106,7 +107,7 @@ impl BlockQueue {
|
|||||||
verification: verification.clone(),
|
verification: verification.clone(),
|
||||||
verifiers: verifiers,
|
verifiers: verifiers,
|
||||||
deleting: deleting.clone(),
|
deleting: deleting.clone(),
|
||||||
processing: HashSet::new(),
|
processing: RwLock::new(HashSet::new()),
|
||||||
empty: empty.clone(),
|
empty: empty.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -196,11 +197,22 @@ impl BlockQueue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if the block is currently in the queue
|
||||||
|
pub fn block_status(&self, hash: &H256) -> BlockStatus {
|
||||||
|
if self.processing.read().unwrap().contains(&hash) {
|
||||||
|
return BlockStatus::Queued;
|
||||||
|
}
|
||||||
|
if self.verification.lock().unwrap().bad.contains(&hash) {
|
||||||
|
return BlockStatus::Bad;
|
||||||
|
}
|
||||||
|
BlockStatus::Unknown
|
||||||
|
}
|
||||||
|
|
||||||
/// Add a block to the queue.
|
/// Add a block to the queue.
|
||||||
pub fn import_block(&mut self, bytes: Bytes) -> ImportResult {
|
pub fn import_block(&mut self, bytes: Bytes) -> ImportResult {
|
||||||
let header = BlockView::new(&bytes).header();
|
let header = BlockView::new(&bytes).header();
|
||||||
let h = header.hash();
|
let h = header.hash();
|
||||||
if self.processing.contains(&h) {
|
if self.processing.read().unwrap().contains(&h) {
|
||||||
return Err(ImportError::AlreadyQueued);
|
return Err(ImportError::AlreadyQueued);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
@ -217,7 +229,7 @@ impl BlockQueue {
|
|||||||
|
|
||||||
match verify_block_basic(&header, &bytes, self.engine.deref().deref()) {
|
match verify_block_basic(&header, &bytes, self.engine.deref().deref()) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
self.processing.insert(h.clone());
|
self.processing.write().unwrap().insert(h.clone());
|
||||||
self.verification.lock().unwrap().unverified.push_back(UnVerifiedBlock { header: header, bytes: bytes });
|
self.verification.lock().unwrap().unverified.push_back(UnVerifiedBlock { header: header, bytes: bytes });
|
||||||
self.more_to_verify.notify_all();
|
self.more_to_verify.notify_all();
|
||||||
Ok(h)
|
Ok(h)
|
||||||
@ -235,10 +247,12 @@ impl BlockQueue {
|
|||||||
let mut verification_lock = self.verification.lock().unwrap();
|
let mut verification_lock = self.verification.lock().unwrap();
|
||||||
let mut verification = verification_lock.deref_mut();
|
let mut verification = verification_lock.deref_mut();
|
||||||
verification.bad.insert(hash.clone());
|
verification.bad.insert(hash.clone());
|
||||||
|
self.processing.write().unwrap().remove(&hash);
|
||||||
let mut new_verified = VecDeque::new();
|
let mut new_verified = VecDeque::new();
|
||||||
for block in verification.verified.drain(..) {
|
for block in verification.verified.drain(..) {
|
||||||
if verification.bad.contains(&block.header.parent_hash) {
|
if verification.bad.contains(&block.header.parent_hash) {
|
||||||
verification.bad.insert(block.header.hash());
|
verification.bad.insert(block.header.hash());
|
||||||
|
self.processing.write().unwrap().remove(&block.header.hash());
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
new_verified.push_back(block);
|
new_verified.push_back(block);
|
||||||
@ -247,6 +261,15 @@ impl BlockQueue {
|
|||||||
verification.verified = new_verified;
|
verification.verified = new_verified;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Mark given block as processed
|
||||||
|
pub fn mark_as_good(&mut self, hashes: &[H256]) {
|
||||||
|
let mut processing = self.processing.write().unwrap();
|
||||||
|
for h in hashes {
|
||||||
|
processing.remove(&h);
|
||||||
|
}
|
||||||
|
//TODO: reward peers
|
||||||
|
}
|
||||||
|
|
||||||
/// Removes up to `max` verified blocks from the queue
|
/// Removes up to `max` verified blocks from the queue
|
||||||
pub fn drain(&mut self, max: usize) -> Vec<PreVerifiedBlock> {
|
pub fn drain(&mut self, max: usize) -> Vec<PreVerifiedBlock> {
|
||||||
let mut verification = self.verification.lock().unwrap();
|
let mut verification = self.verification.lock().unwrap();
|
||||||
@ -254,7 +277,6 @@ impl BlockQueue {
|
|||||||
let mut result = Vec::with_capacity(count);
|
let mut result = Vec::with_capacity(count);
|
||||||
for _ in 0..count {
|
for _ in 0..count {
|
||||||
let block = verification.verified.pop_front().unwrap();
|
let block = verification.verified.pop_front().unwrap();
|
||||||
self.processing.remove(&block.header.hash());
|
|
||||||
result.push(block);
|
result.push(block);
|
||||||
}
|
}
|
||||||
self.ready_signal.reset();
|
self.ready_signal.reset();
|
||||||
@ -294,6 +316,7 @@ mod tests {
|
|||||||
use block_queue::*;
|
use block_queue::*;
|
||||||
use tests::helpers::*;
|
use tests::helpers::*;
|
||||||
use error::*;
|
use error::*;
|
||||||
|
use views::*;
|
||||||
|
|
||||||
fn get_test_queue() -> BlockQueue {
|
fn get_test_queue() -> BlockQueue {
|
||||||
let spec = get_test_spec();
|
let spec = get_test_spec();
|
||||||
@ -339,11 +362,14 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn returns_ok_for_drained_duplicates() {
|
fn returns_ok_for_drained_duplicates() {
|
||||||
let mut queue = get_test_queue();
|
let mut queue = get_test_queue();
|
||||||
if let Err(e) = queue.import_block(get_good_dummy_block()) {
|
let block = get_good_dummy_block();
|
||||||
|
let hash = BlockView::new(&block).header().hash().clone();
|
||||||
|
if let Err(e) = queue.import_block(block) {
|
||||||
panic!("error importing block that is valid by definition({:?})", e);
|
panic!("error importing block that is valid by definition({:?})", e);
|
||||||
}
|
}
|
||||||
queue.flush();
|
queue.flush();
|
||||||
queue.drain(10);
|
queue.drain(10);
|
||||||
|
queue.mark_as_good(&[ hash ]);
|
||||||
|
|
||||||
if let Err(e) = queue.import_block(get_good_dummy_block()) {
|
if let Err(e) = queue.import_block(get_good_dummy_block()) {
|
||||||
panic!("error importing block that has already been drained ({:?})", e);
|
panic!("error importing block that has already been drained ({:?})", e);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//! Fast access to blockchain data.
|
//! Blockchain database.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use rocksdb::{DB, WriteBatch, Writable};
|
use rocksdb::{DB, WriteBatch, Writable};
|
||||||
@ -8,33 +8,27 @@ use transaction::*;
|
|||||||
use views::*;
|
use views::*;
|
||||||
|
|
||||||
/// Represents a tree route between `from` block and `to` block:
|
/// Represents a tree route between `from` block and `to` block:
|
||||||
///
|
|
||||||
/// - `blocks` - a vector of hashes of all blocks, ordered from `from` to `to`.
|
|
||||||
///
|
|
||||||
/// - `ancestor` - best common ancestor of these blocks.
|
|
||||||
///
|
|
||||||
/// - `index` - an index where best common ancestor would be.
|
|
||||||
pub struct TreeRoute {
|
pub struct TreeRoute {
|
||||||
/// TODO [debris] Please document me
|
/// A vector of hashes of all blocks, ordered from `from` to `to`.
|
||||||
pub blocks: Vec<H256>,
|
pub blocks: Vec<H256>,
|
||||||
/// TODO [debris] Please document me
|
/// Best common ancestor of these blocks.
|
||||||
pub ancestor: H256,
|
pub ancestor: H256,
|
||||||
/// TODO [debris] Please document me
|
/// An index where best common ancestor would be.
|
||||||
pub index: usize
|
pub index: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents blockchain's in-memory cache size in bytes.
|
/// Represents blockchain's in-memory cache size in bytes.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct CacheSize {
|
pub struct CacheSize {
|
||||||
/// TODO [debris] Please document me
|
/// Blocks cache size.
|
||||||
pub blocks: usize,
|
pub blocks: usize,
|
||||||
/// TODO [debris] Please document me
|
/// BlockDetails cache size.
|
||||||
pub block_details: usize,
|
pub block_details: usize,
|
||||||
/// TODO [debris] Please document me
|
/// Transaction addresses cache size.
|
||||||
pub transaction_addresses: usize,
|
pub transaction_addresses: usize,
|
||||||
/// TODO [debris] Please document me
|
/// Logs cache size.
|
||||||
pub block_logs: usize,
|
pub block_logs: usize,
|
||||||
/// TODO [debris] Please document me
|
/// Blooms cache size.
|
||||||
pub blocks_blooms: usize
|
pub blocks_blooms: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,7 +63,8 @@ impl Builtin {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Copy a bunch of bytes to a destination; if the `src` is too small to fill `dest`,
|
||||||
|
/// leave the rest unchanged.
|
||||||
pub fn copy_to(src: &[u8], dest: &mut[u8]) {
|
pub fn copy_to(src: &[u8], dest: &mut[u8]) {
|
||||||
// NICE: optimise
|
// NICE: optimise
|
||||||
for i in 0..min(src.len(), dest.len()) {
|
for i in 0..min(src.len(), dest.len()) {
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Blockchain database client.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use rocksdb::{Options, DB};
|
use rocksdb::{Options, DB};
|
||||||
use blockchain::{BlockChain, BlockProvider, CacheSize};
|
use blockchain::{BlockChain, BlockProvider, CacheSize};
|
||||||
@ -13,9 +15,10 @@ use service::NetSyncMessage;
|
|||||||
use env_info::LastHashes;
|
use env_info::LastHashes;
|
||||||
use verification::*;
|
use verification::*;
|
||||||
use block::*;
|
use block::*;
|
||||||
|
pub use blockchain::TreeRoute;
|
||||||
|
|
||||||
/// General block status
|
/// General block status
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub enum BlockStatus {
|
pub enum BlockStatus {
|
||||||
/// Part of the blockchain.
|
/// Part of the blockchain.
|
||||||
InChain,
|
InChain,
|
||||||
@ -48,8 +51,6 @@ impl fmt::Display for BlockChainInfo {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub type TreeRoute = ::blockchain::TreeRoute;
|
|
||||||
|
|
||||||
/// Blockchain database client. Owns and manages a blockchain and a block queue.
|
/// Blockchain database client. Owns and manages a blockchain and a block queue.
|
||||||
pub trait BlockChainClient : Sync + Send {
|
pub trait BlockChainClient : Sync + Send {
|
||||||
@ -114,18 +115,18 @@ pub trait BlockChainClient : Sync + Send {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Clone, Debug, Eq, PartialEq)]
|
#[derive(Default, Clone, Debug, Eq, PartialEq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Report on the status of a client.
|
||||||
pub struct ClientReport {
|
pub struct ClientReport {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// How many blocks have been imported so far.
|
||||||
pub blocks_imported: usize,
|
pub blocks_imported: usize,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// How many transactions have been applied so far.
|
||||||
pub transactions_applied: usize,
|
pub transactions_applied: usize,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// How much gas has been processed so far.
|
||||||
pub gas_processed: U256,
|
pub gas_processed: U256,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClientReport {
|
impl ClientReport {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Alter internal reporting to reflect the additional `block` has been processed.
|
||||||
pub fn accrue_block(&mut self, block: &PreVerifiedBlock) {
|
pub fn accrue_block(&mut self, block: &PreVerifiedBlock) {
|
||||||
self.blocks_imported += 1;
|
self.blocks_imported += 1;
|
||||||
self.transactions_applied += block.transactions.len();
|
self.transactions_applied += block.transactions.len();
|
||||||
@ -204,6 +205,7 @@ impl Client {
|
|||||||
let mut bad = HashSet::new();
|
let mut bad = HashSet::new();
|
||||||
let _import_lock = self.import_lock.lock();
|
let _import_lock = self.import_lock.lock();
|
||||||
let blocks = self.block_queue.write().unwrap().drain(128);
|
let blocks = self.block_queue.write().unwrap().drain(128);
|
||||||
|
let mut good_blocks = Vec::with_capacity(128);
|
||||||
for block in blocks {
|
for block in blocks {
|
||||||
if bad.contains(&block.header.parent_hash) {
|
if bad.contains(&block.header.parent_hash) {
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&block.header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&block.header.hash());
|
||||||
@ -256,6 +258,8 @@ impl Client {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
good_blocks.push(header.hash().clone());
|
||||||
|
|
||||||
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
||||||
let ancient = if header.number() >= HISTORY { Some(header.number() - HISTORY) } else { None };
|
let ancient = if header.number() >= HISTORY { Some(header.number() - HISTORY) } else { None };
|
||||||
match result.drain().commit(header.number(), &header.hash(), ancient.map(|n|(n, self.chain.read().unwrap().block_hash(n).unwrap()))) {
|
match result.drain().commit(header.number(), &header.hash(), ancient.map(|n|(n, self.chain.read().unwrap().block_hash(n).unwrap()))) {
|
||||||
@ -269,6 +273,7 @@ impl Client {
|
|||||||
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
||||||
ret += 1;
|
ret += 1;
|
||||||
}
|
}
|
||||||
|
self.block_queue.write().unwrap().mark_as_good(&good_blocks);
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -323,7 +328,11 @@ impl BlockChainClient for Client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn block_status(&self, hash: &H256) -> BlockStatus {
|
fn block_status(&self, hash: &H256) -> BlockStatus {
|
||||||
if self.chain.read().unwrap().is_known(&hash) { BlockStatus::InChain } else { BlockStatus::Unknown }
|
if self.chain.read().unwrap().is_known(&hash) {
|
||||||
|
BlockStatus::InChain
|
||||||
|
} else {
|
||||||
|
self.block_queue.read().unwrap().block_status(hash)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_total_difficulty(&self, hash: &H256) -> Option<U256> {
|
fn block_total_difficulty(&self, hash: &H256) -> Option<U256> {
|
||||||
@ -370,6 +379,9 @@ impl BlockChainClient for Client {
|
|||||||
if self.chain.read().unwrap().is_known(&header.hash()) {
|
if self.chain.read().unwrap().is_known(&header.hash()) {
|
||||||
return Err(ImportError::AlreadyInChain);
|
return Err(ImportError::AlreadyInChain);
|
||||||
}
|
}
|
||||||
|
if self.block_status(&header.parent_hash) == BlockStatus::Unknown {
|
||||||
|
return Err(ImportError::UnknownParent);
|
||||||
|
}
|
||||||
self.block_queue.write().unwrap().import_block(bytes)
|
self.block_queue.write().unwrap().import_block(bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,17 +31,16 @@ pub trait Engine : Sync + Send {
|
|||||||
|
|
||||||
/// Some intrinsic operation parameters; by default they take their value from the `spec()`'s `engine_params`.
|
/// Some intrinsic operation parameters; by default they take their value from the `spec()`'s `engine_params`.
|
||||||
fn maximum_extra_data_size(&self) -> usize { decode(&self.spec().engine_params.get("maximumExtraDataSize").unwrap()) }
|
fn maximum_extra_data_size(&self) -> usize { decode(&self.spec().engine_params.get("maximumExtraDataSize").unwrap()) }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Maximum number of uncles a block is allowed to declare.
|
||||||
fn maximum_uncle_count(&self) -> usize { 2 }
|
fn maximum_uncle_count(&self) -> usize { 2 }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The nonce with which accounts begin.
|
||||||
fn account_start_nonce(&self) -> U256 { decode(&self.spec().engine_params.get("accountStartNonce").unwrap()) }
|
fn account_start_nonce(&self) -> U256 { decode(&self.spec().engine_params.get("accountStartNonce").unwrap()) }
|
||||||
|
|
||||||
/// Block transformation functions, before and after the transactions.
|
/// Block transformation functions, before the transactions.
|
||||||
fn on_new_block(&self, _block: &mut ExecutedBlock) {}
|
fn on_new_block(&self, _block: &mut ExecutedBlock) {}
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block transformation functions, after the transactions.
|
||||||
fn on_close_block(&self, _block: &mut ExecutedBlock) {}
|
fn on_close_block(&self, _block: &mut ExecutedBlock) {}
|
||||||
|
|
||||||
// TODO: consider including State in the params for verification functions.
|
|
||||||
/// Phase 1 quick block verification. Only does checks that are cheap. `block` (the header's full block)
|
/// Phase 1 quick block verification. Only does checks that are cheap. `block` (the header's full block)
|
||||||
/// may be provided for additional checks. Returns either a null `Ok` or a general error detailing the problem with import.
|
/// may be provided for additional checks. Returns either a null `Ok` or a general error detailing the problem with import.
|
||||||
fn verify_block_basic(&self, _header: &Header, _block: Option<&[u8]>) -> Result<(), Error> { Ok(()) }
|
fn verify_block_basic(&self, _header: &Header, _block: Option<&[u8]>) -> Result<(), Error> { Ok(()) }
|
||||||
@ -58,7 +57,7 @@ pub trait Engine : Sync + Send {
|
|||||||
// TODO: Add flags for which bits of the transaction to check.
|
// TODO: Add flags for which bits of the transaction to check.
|
||||||
// TODO: consider including State in the params.
|
// TODO: consider including State in the params.
|
||||||
fn verify_transaction_basic(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
fn verify_transaction_basic(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Verify a particular transaction is valid.
|
||||||
fn verify_transaction(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
fn verify_transaction(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
||||||
|
|
||||||
/// Don't forget to call Super::populateFromParent when subclassing & overriding.
|
/// Don't forget to call Super::populateFromParent when subclassing & overriding.
|
||||||
@ -67,11 +66,13 @@ pub trait Engine : Sync + Send {
|
|||||||
|
|
||||||
// TODO: builtin contract routing - to do this properly, it will require removing the built-in configuration-reading logic
|
// TODO: builtin contract routing - to do this properly, it will require removing the built-in configuration-reading logic
|
||||||
// from Spec into here and removing the Spec::builtins field.
|
// from Spec into here and removing the Spec::builtins field.
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Determine whether a particular address is a builtin contract.
|
||||||
fn is_builtin(&self, a: &Address) -> bool { self.spec().builtins.contains_key(a) }
|
fn is_builtin(&self, a: &Address) -> bool { self.spec().builtins.contains_key(a) }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Determine the code execution cost of the builtin contract with address `a`.
|
||||||
|
/// Panics if `is_builtin(a)` is not true.
|
||||||
fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 { self.spec().builtins.get(a).unwrap().cost(input.len()) }
|
fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 { self.spec().builtins.get(a).unwrap().cost(input.len()) }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Execution the builtin contract `a` on `input` and return `output`.
|
||||||
|
/// Panics if `is_builtin(a)` is not true.
|
||||||
fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut [u8]) { self.spec().builtins.get(a).unwrap().execute(input, output); }
|
fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut [u8]) { self.spec().builtins.get(a).unwrap().execute(input, output); }
|
||||||
|
|
||||||
// TODO: sealing stuff - though might want to leave this for later.
|
// TODO: sealing stuff - though might want to leave this for later.
|
||||||
|
@ -24,13 +24,6 @@ pub struct EnvInfo {
|
|||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EnvInfo {
|
|
||||||
/// Create empty env_info initialized with zeros
|
|
||||||
pub fn new() -> EnvInfo {
|
|
||||||
EnvInfo::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for EnvInfo {
|
impl Default for EnvInfo {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
EnvInfo {
|
EnvInfo {
|
||||||
|
@ -5,22 +5,22 @@ use header::BlockNumber;
|
|||||||
use basic_types::LogBloom;
|
use basic_types::LogBloom;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error indicating an expected value was not found.
|
||||||
pub struct Mismatch<T: fmt::Debug> {
|
pub struct Mismatch<T: fmt::Debug> {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Value expected.
|
||||||
pub expected: T,
|
pub expected: T,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Value found.
|
||||||
pub found: T,
|
pub found: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error indicating value found is outside of a valid range.
|
||||||
pub struct OutOfBounds<T: fmt::Debug> {
|
pub struct OutOfBounds<T: fmt::Debug> {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Minimum allowed value.
|
||||||
pub min: Option<T>,
|
pub min: Option<T>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Maximum allowed value.
|
||||||
pub max: Option<T>,
|
pub max: Option<T>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Value found.
|
||||||
pub found: T,
|
pub found: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,11 +29,10 @@ pub struct OutOfBounds<T: fmt::Debug> {
|
|||||||
pub enum ExecutionError {
|
pub enum ExecutionError {
|
||||||
/// Returned when there gas paid for transaction execution is
|
/// Returned when there gas paid for transaction execution is
|
||||||
/// lower than base gas required.
|
/// lower than base gas required.
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
NotEnoughBaseGas {
|
NotEnoughBaseGas {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Absolute minimum gas required.
|
||||||
required: U256,
|
required: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Gas provided.
|
||||||
got: U256
|
got: U256
|
||||||
},
|
},
|
||||||
/// Returned when block (gas_used + gas) > gas_limit.
|
/// Returned when block (gas_used + gas) > gas_limit.
|
||||||
@ -41,26 +40,26 @@ pub enum ExecutionError {
|
|||||||
/// If gas =< gas_limit, upstream may try to execute the transaction
|
/// If gas =< gas_limit, upstream may try to execute the transaction
|
||||||
/// in next block.
|
/// in next block.
|
||||||
BlockGasLimitReached {
|
BlockGasLimitReached {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Gas limit of block for transaction.
|
||||||
gas_limit: U256,
|
gas_limit: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Gas used in block prior to transaction.
|
||||||
gas_used: U256,
|
gas_used: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Amount of gas in block.
|
||||||
gas: U256
|
gas: U256
|
||||||
},
|
},
|
||||||
/// Returned when transaction nonce does not match state nonce.
|
/// Returned when transaction nonce does not match state nonce.
|
||||||
InvalidNonce {
|
InvalidNonce {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Nonce expected.
|
||||||
expected: U256,
|
expected: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Nonce found.
|
||||||
got: U256
|
got: U256
|
||||||
},
|
},
|
||||||
/// Returned when cost of transaction (value + gas_price * gas) exceeds
|
/// Returned when cost of transaction (value + gas_price * gas) exceeds
|
||||||
/// current sender balance.
|
/// current sender balance.
|
||||||
NotEnoughCash {
|
NotEnoughCash {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Minimum required balance.
|
||||||
required: U512,
|
required: U512,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Actual balance.
|
||||||
got: U512
|
got: U512
|
||||||
},
|
},
|
||||||
/// Returned when internal evm error occurs.
|
/// Returned when internal evm error occurs.
|
||||||
@ -68,76 +67,82 @@ pub enum ExecutionError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Errors concerning transaction proessing.
|
||||||
pub enum TransactionError {
|
pub enum TransactionError {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Transaction's gas limit (aka gas) is invalid.
|
||||||
InvalidGasLimit(OutOfBounds<U256>),
|
InvalidGasLimit(OutOfBounds<U256>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
/// TODO [arkpar] Please document me
|
/// Errors concerning block processing.
|
||||||
pub enum BlockError {
|
pub enum BlockError {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block has too many uncles.
|
||||||
TooManyUncles(OutOfBounds<usize>),
|
TooManyUncles(OutOfBounds<usize>),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Extra data is of an invalid length.
|
||||||
UncleWrongGeneration,
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
ExtraDataOutOfBounds(OutOfBounds<usize>),
|
ExtraDataOutOfBounds(OutOfBounds<usize>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Seal is incorrect format.
|
||||||
InvalidSealArity(Mismatch<usize>),
|
InvalidSealArity(Mismatch<usize>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Block has too much gas used.
|
||||||
TooMuchGasUsed(OutOfBounds<U256>),
|
TooMuchGasUsed(OutOfBounds<U256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Uncles hash in header is invalid.
|
||||||
InvalidUnclesHash(Mismatch<H256>),
|
InvalidUnclesHash(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// An uncle is from a generation too old.
|
||||||
UncleTooOld(OutOfBounds<BlockNumber>),
|
UncleTooOld(OutOfBounds<BlockNumber>),
|
||||||
/// TODO [arkpar] Please document me
|
/// An uncle is from the same generation as the block.
|
||||||
UncleIsBrother(OutOfBounds<BlockNumber>),
|
UncleIsBrother(OutOfBounds<BlockNumber>),
|
||||||
/// TODO [arkpar] Please document me
|
/// An uncle is already in the chain.
|
||||||
UncleInChain(H256),
|
UncleInChain(H256),
|
||||||
/// TODO [arkpar] Please document me
|
/// An uncle has a parent not in the chain.
|
||||||
UncleParentNotInChain(H256),
|
UncleParentNotInChain(H256),
|
||||||
/// TODO [arkpar] Please document me
|
/// State root header field is invalid.
|
||||||
InvalidStateRoot(Mismatch<H256>),
|
InvalidStateRoot(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Gas used header field is invalid.
|
||||||
InvalidGasUsed(Mismatch<U256>),
|
InvalidGasUsed(Mismatch<U256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Transactions root header field is invalid.
|
||||||
InvalidTransactionsRoot(Mismatch<H256>),
|
InvalidTransactionsRoot(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Difficulty is out of range; this can be used as an looser error prior to getting a definitive
|
||||||
|
/// value for difficulty. This error needs only provide bounds of which it is out.
|
||||||
|
DifficultyOutOfBounds(OutOfBounds<U256>),
|
||||||
|
/// Difficulty header field is invalid; this is a strong error used after getting a definitive
|
||||||
|
/// value for difficulty (which is provided).
|
||||||
InvalidDifficulty(Mismatch<U256>),
|
InvalidDifficulty(Mismatch<U256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Seal element of type H256 (max_hash for Ethash, but could be something else for
|
||||||
|
/// other seal engines) is out of bounds.
|
||||||
|
MismatchedH256SealElement(Mismatch<H256>),
|
||||||
|
/// Proof-of-work aspect of seal, which we assume is a 256-bit value, is invalid.
|
||||||
|
InvalidProofOfWork(OutOfBounds<U256>),
|
||||||
|
/// Gas limit header field is invalid.
|
||||||
InvalidGasLimit(OutOfBounds<U256>),
|
InvalidGasLimit(OutOfBounds<U256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Receipts trie root header field is invalid.
|
||||||
InvalidReceiptsStateRoot(Mismatch<H256>),
|
InvalidReceiptsRoot(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Timestamp header field is invalid.
|
||||||
InvalidTimestamp(OutOfBounds<u64>),
|
InvalidTimestamp(OutOfBounds<u64>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Log bloom header field is invalid.
|
||||||
InvalidLogBloom(Mismatch<LogBloom>),
|
InvalidLogBloom(Mismatch<LogBloom>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Parent hash field of header is invalid; this is an invalid error indicating a logic flaw in the codebase.
|
||||||
InvalidEthashDifficulty(Mismatch<U256>),
|
/// TODO: remove and favour an assert!/panic!.
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
InvalidBlockNonce(Mismatch<H256>),
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
InvalidParentHash(Mismatch<H256>),
|
InvalidParentHash(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Number field of header is invalid.
|
||||||
InvalidNumber(Mismatch<BlockNumber>),
|
InvalidNumber(Mismatch<BlockNumber>),
|
||||||
/// Block number isn't sensible.
|
/// Block number isn't sensible.
|
||||||
RidiculousNumber(OutOfBounds<BlockNumber>),
|
RidiculousNumber(OutOfBounds<BlockNumber>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Parent given is unknown.
|
||||||
UnknownParent(H256),
|
UnknownParent(H256),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Uncle parent given is unknown.
|
||||||
UnknownUncleParent(H256),
|
UnknownUncleParent(H256),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [arkpar] Please document me
|
/// Import to the block queue result
|
||||||
pub enum ImportError {
|
pub enum ImportError {
|
||||||
/// TODO [arkpar] Please document me
|
/// Bad block detected
|
||||||
Bad(Option<Error>),
|
Bad(Option<Error>),
|
||||||
/// TODO [arkpar] Please document me
|
/// Already in the block chain
|
||||||
AlreadyInChain,
|
AlreadyInChain,
|
||||||
/// TODO [arkpar] Please document me
|
/// Already in the block queue
|
||||||
AlreadyQueued,
|
AlreadyQueued,
|
||||||
|
/// Unknown parent
|
||||||
|
UnknownParent,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Error> for ImportError {
|
impl From<Error> for ImportError {
|
||||||
@ -152,15 +157,15 @@ pub type ImportResult = Result<H256, ImportError>;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// General error type which should be capable of representing all errors in ethcore.
|
/// General error type which should be capable of representing all errors in ethcore.
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning a utility.
|
||||||
Util(UtilError),
|
Util(UtilError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning block processing.
|
||||||
Block(BlockError),
|
Block(BlockError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Unknown engine given.
|
||||||
UnknownEngineName(String),
|
UnknownEngineName(String),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning EVM code execution.
|
||||||
Execution(ExecutionError),
|
Execution(ExecutionError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning transaction processing.
|
||||||
Transaction(TransactionError),
|
Transaction(TransactionError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,22 +1,22 @@
|
|||||||
use util::*;
|
use util::*;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// TODO [debris] Please document me
|
/// 1 Ether in Wei
|
||||||
pub fn ether() -> U256 { U256::exp10(18) }
|
pub fn ether() -> U256 { U256::exp10(18) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// TODO [debris] Please document me
|
/// 1 Finney in Wei
|
||||||
pub fn finney() -> U256 { U256::exp10(15) }
|
pub fn finney() -> U256 { U256::exp10(15) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// TODO [debris] Please document me
|
/// 1 Szabo in Wei
|
||||||
pub fn szabo() -> U256 { U256::exp10(12) }
|
pub fn szabo() -> U256 { U256::exp10(12) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// TODO [debris] Please document me
|
/// 1 Shannon in Wei
|
||||||
pub fn shannon() -> U256 { U256::exp10(9) }
|
pub fn shannon() -> U256 { U256::exp10(9) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
/// TODO [debris] Please document me
|
/// 1 Wei in Wei
|
||||||
pub fn wei() -> U256 { U256::exp10(0) }
|
pub fn wei() -> U256 { U256::exp10(0) }
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ pub struct Ethash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Ethash {
|
impl Ethash {
|
||||||
/// TODO [arkpar] Please document me
|
/// Create a new boxed instance of Ethash engine
|
||||||
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
||||||
Box::new(Ethash {
|
Box::new(Ethash {
|
||||||
spec: spec,
|
spec: spec,
|
||||||
@ -110,16 +110,18 @@ impl Engine for Ethash {
|
|||||||
try!(UntrustedRlp::new(&header.seal[0]).as_val::<H256>());
|
try!(UntrustedRlp::new(&header.seal[0]).as_val::<H256>());
|
||||||
try!(UntrustedRlp::new(&header.seal[1]).as_val::<H64>());
|
try!(UntrustedRlp::new(&header.seal[1]).as_val::<H64>());
|
||||||
|
|
||||||
|
// TODO: consider removing these lines.
|
||||||
let min_difficulty = decode(self.spec().engine_params.get("minimumDifficulty").unwrap());
|
let min_difficulty = decode(self.spec().engine_params.get("minimumDifficulty").unwrap());
|
||||||
if header.difficulty < min_difficulty {
|
if header.difficulty < min_difficulty {
|
||||||
return Err(From::from(BlockError::InvalidDifficulty(Mismatch { expected: min_difficulty, found: header.difficulty })))
|
return Err(From::from(BlockError::DifficultyOutOfBounds(OutOfBounds { min: Some(min_difficulty), max: None, found: header.difficulty })))
|
||||||
}
|
}
|
||||||
|
|
||||||
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty(
|
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty(
|
||||||
&Ethash::to_ethash(header.bare_hash()),
|
&Ethash::to_ethash(header.bare_hash()),
|
||||||
header.nonce().low_u64(),
|
header.nonce().low_u64(),
|
||||||
&Ethash::to_ethash(header.mix_hash()))));
|
&Ethash::to_ethash(header.mix_hash()))));
|
||||||
if difficulty < header.difficulty {
|
if difficulty < header.difficulty {
|
||||||
return Err(From::from(BlockError::InvalidEthashDifficulty(Mismatch { expected: header.difficulty, found: difficulty })));
|
return Err(From::from(BlockError::InvalidProofOfWork(OutOfBounds { min: Some(header.difficulty), max: None, found: difficulty })));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -129,10 +131,10 @@ impl Engine for Ethash {
|
|||||||
let mix = Ethash::from_ethash(result.mix_hash);
|
let mix = Ethash::from_ethash(result.mix_hash);
|
||||||
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
|
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
|
||||||
if mix != header.mix_hash() {
|
if mix != header.mix_hash() {
|
||||||
return Err(From::from(BlockError::InvalidBlockNonce(Mismatch { expected: mix, found: header.mix_hash() })));
|
return Err(From::from(BlockError::MismatchedH256SealElement(Mismatch { expected: mix, found: header.mix_hash() })));
|
||||||
}
|
}
|
||||||
if difficulty < header.difficulty {
|
if difficulty < header.difficulty {
|
||||||
return Err(From::from(BlockError::InvalidEthashDifficulty(Mismatch { expected: header.difficulty, found: difficulty })));
|
return Err(From::from(BlockError::InvalidProofOfWork(OutOfBounds { min: Some(header.difficulty), max: None, found: difficulty })));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
//! Contains all Ethereum network specific stuff, such as denominations and
|
//! Contains all Ethereum network specific stuff, such as denominations and
|
||||||
//! consensus specifications.
|
//! consensus specifications.
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the ethash module.
|
||||||
pub mod ethash;
|
pub mod ethash;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the denominations module.
|
||||||
pub mod denominations;
|
pub mod denominations;
|
||||||
|
|
||||||
pub use self::ethash::*;
|
pub use self::ethash::*;
|
||||||
|
@ -15,35 +15,35 @@ pub enum Error {
|
|||||||
/// `BadJumpDestination` is returned when execution tried to move
|
/// `BadJumpDestination` is returned when execution tried to move
|
||||||
/// to position that wasn't marked with JUMPDEST instruction
|
/// to position that wasn't marked with JUMPDEST instruction
|
||||||
BadJumpDestination {
|
BadJumpDestination {
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// Position the code tried to jump to.
|
||||||
destination: usize
|
destination: usize
|
||||||
},
|
},
|
||||||
/// `BadInstructions` is returned when given instruction is not supported
|
/// `BadInstructions` is returned when given instruction is not supported
|
||||||
BadInstruction {
|
BadInstruction {
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// Unrecognized opcode
|
||||||
instruction: u8,
|
instruction: u8,
|
||||||
},
|
},
|
||||||
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
||||||
/// First parameter says how many elements were needed and the second how many were actually on Stack
|
|
||||||
StackUnderflow {
|
StackUnderflow {
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// Invoked instruction
|
||||||
instruction: &'static str,
|
instruction: &'static str,
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// How many stack elements was requested by instruction
|
||||||
wanted: usize,
|
wanted: usize,
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// How many elements were on stack
|
||||||
on_stack: usize
|
on_stack: usize
|
||||||
},
|
},
|
||||||
/// When execution would exceed defined Stack Limit
|
/// When execution would exceed defined Stack Limit
|
||||||
OutOfStack {
|
OutOfStack {
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// Invoked instruction
|
||||||
instruction: &'static str,
|
instruction: &'static str,
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// How many stack elements instruction wanted to push
|
||||||
wanted: usize,
|
wanted: usize,
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// What was the stack limit
|
||||||
limit: usize
|
limit: usize
|
||||||
},
|
},
|
||||||
/// Returned on evm internal error. Should never be ignored during development.
|
/// Returned on evm internal error. Should never be ignored during development.
|
||||||
/// Likely to cause consensus issues.
|
/// Likely to cause consensus issues.
|
||||||
|
#[allow(dead_code)] // created only by jit
|
||||||
Internal,
|
Internal,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,25 +1,39 @@
|
|||||||
//! Evm factory.
|
//! Evm factory.
|
||||||
|
//!
|
||||||
|
//! TODO: consider spliting it into two separate files.
|
||||||
|
#[cfg(test)]
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use evm::Evm;
|
use evm::Evm;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// Type of EVM to use.
|
||||||
pub enum VMType {
|
pub enum VMType {
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// JIT EVM
|
||||||
|
#[cfg(feature="jit")]
|
||||||
Jit,
|
Jit,
|
||||||
/// TODO [Tomusdrw] Please document me
|
/// RUST EVM
|
||||||
Interpreter
|
Interpreter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
impl fmt::Display for VMType {
|
impl fmt::Display for VMType {
|
||||||
|
#[cfg(feature="jit")]
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", match *self {
|
write!(f, "{}", match *self {
|
||||||
VMType::Jit => "JIT",
|
VMType::Jit => "JIT",
|
||||||
VMType::Interpreter => "INT"
|
VMType::Interpreter => "INT"
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
#[cfg(not(feature="jit"))]
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", match *self {
|
||||||
|
VMType::Interpreter => "INT"
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
impl VMType {
|
impl VMType {
|
||||||
/// Return all possible VMs (JIT, Interpreter)
|
/// Return all possible VMs (JIT, Interpreter)
|
||||||
#[cfg(feature="jit")]
|
#[cfg(feature="jit")]
|
||||||
@ -41,10 +55,11 @@ pub struct Factory {
|
|||||||
|
|
||||||
impl Factory {
|
impl Factory {
|
||||||
/// Create fresh instance of VM
|
/// Create fresh instance of VM
|
||||||
|
#[cfg(feature="jit")]
|
||||||
pub fn create(&self) -> Box<Evm> {
|
pub fn create(&self) -> Box<Evm> {
|
||||||
match self.evm {
|
match self.evm {
|
||||||
VMType::Jit => {
|
VMType::Jit => {
|
||||||
Factory::jit()
|
Box::new(super::jit::JitEvm)
|
||||||
},
|
},
|
||||||
VMType::Interpreter => {
|
VMType::Interpreter => {
|
||||||
Box::new(super::interpreter::Interpreter)
|
Box::new(super::interpreter::Interpreter)
|
||||||
@ -52,22 +67,23 @@ impl Factory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create fresh instance of VM
|
||||||
|
#[cfg(not(feature="jit"))]
|
||||||
|
pub fn create(&self) -> Box<Evm> {
|
||||||
|
match self.evm {
|
||||||
|
VMType::Interpreter => {
|
||||||
|
Box::new(super::interpreter::Interpreter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create new instance of specific `VMType` factory
|
/// Create new instance of specific `VMType` factory
|
||||||
|
#[cfg(test)]
|
||||||
pub fn new(evm: VMType) -> Factory {
|
pub fn new(evm: VMType) -> Factory {
|
||||||
Factory {
|
Factory {
|
||||||
evm: evm
|
evm: evm
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "jit")]
|
|
||||||
fn jit() -> Box<Evm> {
|
|
||||||
Box::new(super::jit::JitEvm)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "jit"))]
|
|
||||||
fn jit() -> Box<Evm> {
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl Default for Factory {
|
impl Default for Factory {
|
||||||
/// Returns jitvm factory
|
/// Returns jitvm factory
|
||||||
@ -95,6 +111,18 @@ fn test_create_vm() {
|
|||||||
/// Create tests by injecting different VM factories
|
/// Create tests by injecting different VM factories
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! evm_test(
|
macro_rules! evm_test(
|
||||||
|
(ignorejit => $name_test: ident: $name_jit: ident, $name_int: ident) => {
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
#[cfg(feature = "jit")]
|
||||||
|
fn $name_jit() {
|
||||||
|
$name_test(Factory::new(VMType::Jit));
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn $name_int() {
|
||||||
|
$name_test(Factory::new(VMType::Interpreter));
|
||||||
|
}
|
||||||
|
};
|
||||||
($name_test: ident: $name_jit: ident, $name_int: ident) => {
|
($name_test: ident: $name_jit: ident, $name_int: ident) => {
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg(feature = "jit")]
|
#[cfg(feature = "jit")]
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
///! Rust VM implementation
|
///! Rust VM implementation
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
use evm;
|
|
||||||
use super::instructions as instructions;
|
use super::instructions as instructions;
|
||||||
use super::instructions::Instruction;
|
use super::instructions::Instruction;
|
||||||
use std::marker::Copy;
|
use std::marker::Copy;
|
||||||
use evm::{MessageCallResult, ContractCreateResult};
|
use evm::{self, MessageCallResult, ContractCreateResult};
|
||||||
|
|
||||||
#[cfg(not(feature = "evm-debug"))]
|
#[cfg(not(feature = "evm-debug"))]
|
||||||
macro_rules! evm_debug {
|
macro_rules! evm_debug {
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
pub mod ext;
|
pub mod ext;
|
||||||
pub mod evm;
|
pub mod evm;
|
||||||
/// TODO [Tomusdrw] Please document me
|
|
||||||
pub mod interpreter;
|
pub mod interpreter;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod factory;
|
pub mod factory;
|
||||||
|
@ -1,25 +1,45 @@
|
|||||||
use common::*;
|
use common::*;
|
||||||
use evm;
|
use evm;
|
||||||
use evm::{Ext, Schedule, Factory, VMType, ContractCreateResult, MessageCallResult};
|
use evm::{Ext, Schedule, Factory, VMType, ContractCreateResult, MessageCallResult};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
struct FakeLogEntry {
|
struct FakeLogEntry {
|
||||||
topics: Vec<H256>,
|
topics: Vec<H256>,
|
||||||
data: Bytes
|
data: Bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||||
|
enum FakeCallType {
|
||||||
|
CALL, CREATE
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||||
|
struct FakeCall {
|
||||||
|
call_type: FakeCallType,
|
||||||
|
gas: U256,
|
||||||
|
sender_address: Option<Address>,
|
||||||
|
receive_address: Option<Address>,
|
||||||
|
value: Option<U256>,
|
||||||
|
data: Bytes,
|
||||||
|
code_address: Option<Address>
|
||||||
|
}
|
||||||
|
|
||||||
/// Fake externalities test structure.
|
/// Fake externalities test structure.
|
||||||
///
|
///
|
||||||
/// Can't do recursive calls.
|
/// Can't do recursive calls.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct FakeExt {
|
struct FakeExt {
|
||||||
|
sstore_clears: usize,
|
||||||
|
depth: usize,
|
||||||
store: HashMap<H256, H256>,
|
store: HashMap<H256, H256>,
|
||||||
_balances: HashMap<Address, U256>,
|
|
||||||
blockhashes: HashMap<U256, H256>,
|
blockhashes: HashMap<U256, H256>,
|
||||||
codes: HashMap<Address, Bytes>,
|
codes: HashMap<Address, Bytes>,
|
||||||
logs: Vec<FakeLogEntry>,
|
logs: Vec<FakeLogEntry>,
|
||||||
_suicides: HashSet<Address>,
|
_suicides: HashSet<Address>,
|
||||||
info: EnvInfo,
|
info: EnvInfo,
|
||||||
schedule: Schedule
|
schedule: Schedule,
|
||||||
|
balances: HashMap<Address, U256>,
|
||||||
|
calls: HashSet<FakeCall>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FakeExt {
|
impl FakeExt {
|
||||||
@ -43,31 +63,50 @@ impl Ext for FakeExt {
|
|||||||
self.store.insert(key, value);
|
self.store.insert(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exists(&self, _address: &Address) -> bool {
|
fn exists(&self, address: &Address) -> bool {
|
||||||
unimplemented!();
|
self.balances.contains_key(address)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn balance(&self, _address: &Address) -> U256 {
|
fn balance(&self, address: &Address) -> U256 {
|
||||||
unimplemented!();
|
self.balances.get(address).unwrap().clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn blockhash(&self, number: &U256) -> H256 {
|
fn blockhash(&self, number: &U256) -> H256 {
|
||||||
self.blockhashes.get(number).unwrap_or(&H256::new()).clone()
|
self.blockhashes.get(number).unwrap_or(&H256::new()).clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create(&mut self, _gas: &U256, _value: &U256, _code: &[u8]) -> ContractCreateResult {
|
fn create(&mut self, gas: &U256, value: &U256, code: &[u8]) -> ContractCreateResult {
|
||||||
unimplemented!();
|
self.calls.insert(FakeCall {
|
||||||
|
call_type: FakeCallType::CREATE,
|
||||||
|
gas: gas.clone(),
|
||||||
|
sender_address: None,
|
||||||
|
receive_address: None,
|
||||||
|
value: Some(value.clone()),
|
||||||
|
data: code.to_vec(),
|
||||||
|
code_address: None
|
||||||
|
});
|
||||||
|
ContractCreateResult::Failed
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call(&mut self,
|
fn call(&mut self,
|
||||||
_gas: &U256,
|
gas: &U256,
|
||||||
_sender_address: &Address,
|
sender_address: &Address,
|
||||||
_receive_address: &Address,
|
receive_address: &Address,
|
||||||
_value: Option<U256>,
|
value: Option<U256>,
|
||||||
_data: &[u8],
|
data: &[u8],
|
||||||
_code_address: &Address,
|
code_address: &Address,
|
||||||
_output: &mut [u8]) -> MessageCallResult {
|
_output: &mut [u8]) -> MessageCallResult {
|
||||||
unimplemented!();
|
|
||||||
|
self.calls.insert(FakeCall {
|
||||||
|
call_type: FakeCallType::CALL,
|
||||||
|
gas: gas.clone(),
|
||||||
|
sender_address: Some(sender_address.clone()),
|
||||||
|
receive_address: Some(receive_address.clone()),
|
||||||
|
value: value,
|
||||||
|
data: data.to_vec(),
|
||||||
|
code_address: Some(code_address.clone())
|
||||||
|
});
|
||||||
|
MessageCallResult::Success(gas.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extcode(&self, address: &Address) -> Bytes {
|
fn extcode(&self, address: &Address) -> Bytes {
|
||||||
@ -98,11 +137,11 @@ impl Ext for FakeExt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn depth(&self) -> usize {
|
fn depth(&self) -> usize {
|
||||||
unimplemented!();
|
self.depth
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inc_sstore_clears(&mut self) {
|
fn inc_sstore_clears(&mut self) {
|
||||||
unimplemented!();
|
self.sstore_clears += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +189,7 @@ fn test_add(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_988));
|
assert_eq!(gas_left, U256::from(79_988));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe").unwrap());
|
assert_store(&ext, 0, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_sha3: test_sha3_jit, test_sha3_int}
|
evm_test!{test_sha3: test_sha3_jit, test_sha3_int}
|
||||||
@ -170,7 +209,7 @@ fn test_sha3(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_961));
|
assert_eq!(gas_left, U256::from(79_961));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").unwrap());
|
assert_store(&ext, 0, "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_address: test_address_jit, test_address_int}
|
evm_test!{test_address: test_address_jit, test_address_int}
|
||||||
@ -190,7 +229,7 @@ fn test_address(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000f572e5295c57f15886f9b263e2f6d2d6c7b5ec6");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_origin: test_origin_jit, test_origin_int}
|
evm_test!{test_origin: test_origin_jit, test_origin_int}
|
||||||
@ -212,11 +251,10 @@ fn test_origin(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681").unwrap());
|
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO [todr] Fails with Signal 11 on JIT
|
evm_test!{ignorejit => test_sender: test_sender_jit, test_sender_int}
|
||||||
evm_test!{test_sender: test_sender_jit, test_sender_int}
|
|
||||||
fn test_sender(factory: super::Factory) {
|
fn test_sender(factory: super::Factory) {
|
||||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||||
let sender = Address::from_str("cd1722f2947def4cf144679da39c4c32bdc35681").unwrap();
|
let sender = Address::from_str("cd1722f2947def4cf144679da39c4c32bdc35681").unwrap();
|
||||||
@ -235,7 +273,7 @@ fn test_sender(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681").unwrap());
|
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_extcodecopy: test_extcodecopy_jit, test_extcodecopy_int}
|
evm_test!{test_extcodecopy: test_extcodecopy_jit, test_extcodecopy_int}
|
||||||
@ -270,7 +308,7 @@ fn test_extcodecopy(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_935));
|
assert_eq!(gas_left, U256::from(79_935));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("6005600055000000000000000000000000000000000000000000000000000000").unwrap());
|
assert_store(&ext, 0, "6005600055000000000000000000000000000000000000000000000000000000");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_log_empty: test_log_empty_jit, test_log_empty_int}
|
evm_test!{test_log_empty: test_log_empty_jit, test_log_empty_int}
|
||||||
@ -328,7 +366,7 @@ fn test_log_sender(factory: super::Factory) {
|
|||||||
assert_eq!(ext.logs[0].data, "ff00000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap());
|
assert_eq!(ext.logs[0].data, "ff00000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_blockhash: test_blockhash_jit, test_blockhash_int}
|
evm_test!{ignorejit => test_blockhash: test_blockhash_jit, test_blockhash_int}
|
||||||
fn test_blockhash(factory: super::Factory) {
|
fn test_blockhash(factory: super::Factory) {
|
||||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||||
let code = "600040600055".from_hex().unwrap();
|
let code = "600040600055".from_hex().unwrap();
|
||||||
@ -369,7 +407,7 @@ fn test_calldataload(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_991));
|
assert_eq!(gas_left, U256::from(79_991));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("23ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff23").unwrap());
|
assert_store(&ext, 0, "23ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff23");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -390,7 +428,7 @@ fn test_author(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000f572e5295c57f15886f9b263e2f6d2d6c7b5ec6");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_timestamp: test_timestamp_jit, test_timestamp_int}
|
evm_test!{test_timestamp: test_timestamp_jit, test_timestamp_int}
|
||||||
@ -410,7 +448,7 @@ fn test_timestamp(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000000000000000000000000000000000000001234").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000001234");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_number: test_number_jit, test_number_int}
|
evm_test!{test_number: test_number_jit, test_number_int}
|
||||||
@ -430,7 +468,7 @@ fn test_number(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000000000000000000000000000000000000001234").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000001234");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_difficulty: test_difficulty_jit, test_difficulty_int}
|
evm_test!{test_difficulty: test_difficulty_jit, test_difficulty_int}
|
||||||
@ -450,7 +488,7 @@ fn test_difficulty(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000000000000000000000000000000000000001234").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000001234");
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_gas_limit: test_gas_limit_jit, test_gas_limit_int}
|
evm_test!{test_gas_limit: test_gas_limit_jit, test_gas_limit_int}
|
||||||
@ -470,6 +508,421 @@ fn test_gas_limit(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_995));
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &H256::from_str("0000000000000000000000000000000000000000000000000000000000001234").unwrap());
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000001234");
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_mul: test_mul_jit, test_mul_int}
|
||||||
|
fn test_mul(factory: super::Factory) {
|
||||||
|
let code = "65012365124623626543219002600055".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "000000000000000000000000000000000000000000000000734349397b853383");
|
||||||
|
assert_eq!(gas_left, U256::from(79_983));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_sub: test_sub_jit, test_sub_int}
|
||||||
|
fn test_sub(factory: super::Factory) {
|
||||||
|
let code = "65012365124623626543219003600055".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000012364ad0302");
|
||||||
|
assert_eq!(gas_left, U256::from(79_985));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_div: test_div_jit, test_div_int}
|
||||||
|
fn test_div(factory: super::Factory) {
|
||||||
|
let code = "65012365124623626543219004600055".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "000000000000000000000000000000000000000000000000000000000002e0ac");
|
||||||
|
assert_eq!(gas_left, U256::from(79_983));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_div_zero: test_div_zero_jit, test_div_zero_int}
|
||||||
|
fn test_div_zero(factory: super::Factory) {
|
||||||
|
let code = "6501236512462360009004600055".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_eq!(gas_left, U256::from(94_983));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_mod: test_mod_jit, test_mod_int}
|
||||||
|
fn test_mod(factory: super::Factory) {
|
||||||
|
let code = "650123651246236265432290066000556501236512462360009006600155".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000076b4b");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_eq!(gas_left, U256::from(74_966));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_smod: test_smod_jit, test_smod_int}
|
||||||
|
fn test_smod(factory: super::Factory) {
|
||||||
|
let code = "650123651246236265432290076000556501236512462360009007600155".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000076b4b");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_eq!(gas_left, U256::from(74_966));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_sdiv: test_sdiv_jit, test_sdiv_int}
|
||||||
|
fn test_sdiv(factory: super::Factory) {
|
||||||
|
let code = "650123651246236265432290056000556501236512462360009005600155".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "000000000000000000000000000000000000000000000000000000000002e0ac");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_eq!(gas_left, U256::from(74_966));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_exp: test_exp_jit, test_exp_int}
|
||||||
|
fn test_exp(factory: super::Factory) {
|
||||||
|
let code = "6016650123651246230a6000556001650123651246230a6001556000650123651246230a600255".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "90fd23767b60204c3d6fc8aec9e70a42a3f127140879c133a20129a597ed0c59");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000012365124623");
|
||||||
|
assert_store(&ext, 2, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_eq!(gas_left, U256::from(39_923));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_comparison: test_comparison_jit, test_comparison_int}
|
||||||
|
fn test_comparison(factory: super::Factory) {
|
||||||
|
let code = "601665012365124623818181811060005511600155146002556415235412358014600355".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_store(&ext, 2, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_store(&ext, 3, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_eq!(gas_left, U256::from(49_952));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_signed_comparison: test_signed_comparison_jit, test_signed_comparison_int}
|
||||||
|
fn test_signed_comparison(factory: super::Factory) {
|
||||||
|
let code = "60106000036010818112600055136001556010601060000381811260025513600355".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_store(&ext, 2, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_store(&ext, 3, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_eq!(gas_left, U256::from(49_940));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_bitops: test_bitops_jit, test_bitops_int}
|
||||||
|
fn test_bitops(factory: super::Factory) {
|
||||||
|
let code = "60ff610ff08181818116600055176001551860025560008015600355198015600455600555".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(150_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "00000000000000000000000000000000000000000000000000000000000000f0");
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000fff");
|
||||||
|
assert_store(&ext, 2, "0000000000000000000000000000000000000000000000000000000000000f0f");
|
||||||
|
assert_store(&ext, 3, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_store(&ext, 4, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_store(&ext, 5, "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
|
assert_eq!(gas_left, U256::from(44_937));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_addmod_mulmod: test_addmod_mulmod_jit, test_addmod_mulmod_int}
|
||||||
|
fn test_addmod_mulmod(factory: super::Factory) {
|
||||||
|
let code = "60ff60f060108282820860005509600155600060f0601082828208196002550919600355".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000001");
|
||||||
|
assert_store(&ext, 1, "000000000000000000000000000000000000000000000000000000000000000f");
|
||||||
|
assert_store(&ext, 2, "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
|
assert_store(&ext, 3, "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
|
assert_eq!(gas_left, U256::from(19_914));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_byte: test_byte_jit, test_byte_int}
|
||||||
|
fn test_byte(factory: super::Factory) {
|
||||||
|
let code = "60f061ffff1a600055610fff601f1a600155".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000000");
|
||||||
|
assert_store(&ext, 1, "00000000000000000000000000000000000000000000000000000000000000ff");
|
||||||
|
assert_eq!(gas_left, U256::from(74_976));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_signextend: test_signextend_jit, test_signextend_int}
|
||||||
|
fn test_signextend(factory: super::Factory) {
|
||||||
|
let code = "610fff60020b60005560ff60200b600155".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000fff");
|
||||||
|
assert_store(&ext, 1, "00000000000000000000000000000000000000000000000000000000000000ff");
|
||||||
|
assert_eq!(gas_left, U256::from(59_972));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test] // JIT just returns out of gas
|
||||||
|
fn test_badinstruction_int() {
|
||||||
|
let factory = super::Factory::new(VMType::Interpreter);
|
||||||
|
let code = "af".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let err = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap_err()
|
||||||
|
};
|
||||||
|
|
||||||
|
match err {
|
||||||
|
evm::Error::BadInstruction { instruction: 0xaf } => (),
|
||||||
|
_ => assert!(false, "Expected bad instruction")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_pop: test_pop_jit, test_pop_int}
|
||||||
|
fn test_pop(factory: super::Factory) {
|
||||||
|
let code = "60f060aa50600055".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "00000000000000000000000000000000000000000000000000000000000000f0");
|
||||||
|
assert_eq!(gas_left, U256::from(79_989));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_extops: test_extops_jit, test_extops_int}
|
||||||
|
fn test_extops(factory: super::Factory) {
|
||||||
|
let code = "5a6001555836553a600255386003553460045560016001526016590454600555".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(150_000);
|
||||||
|
params.gas_price = U256::from(0x32);
|
||||||
|
params.value = ActionValue::Transfer(U256::from(0x99));
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000004"); // PC / CALLDATASIZE
|
||||||
|
assert_store(&ext, 1, "00000000000000000000000000000000000000000000000000000000000249ee"); // GAS
|
||||||
|
assert_store(&ext, 2, "0000000000000000000000000000000000000000000000000000000000000032"); // GASPRICE
|
||||||
|
assert_store(&ext, 3, "0000000000000000000000000000000000000000000000000000000000000020"); // CODESIZE
|
||||||
|
assert_store(&ext, 4, "0000000000000000000000000000000000000000000000000000000000000099"); // CALLVALUE
|
||||||
|
assert_store(&ext, 5, "0000000000000000000000000000000000000000000000000000000000000032");
|
||||||
|
assert_eq!(gas_left, U256::from(29_898));
|
||||||
|
}
|
||||||
|
|
||||||
|
evm_test!{test_jumps: test_jumps_jit, test_jumps_int}
|
||||||
|
fn test_jumps(factory: super::Factory) {
|
||||||
|
let code = "600160015560066000555b60016000540380806000551560245760015402600155600a565b".from_hex().unwrap();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(150_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(ext.sstore_clears, 1);
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000000"); // 5!
|
||||||
|
assert_store(&ext, 1, "0000000000000000000000000000000000000000000000000000000000000078"); // 5!
|
||||||
|
assert_eq!(gas_left, U256::from(54_117));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
evm_test!{test_calls: test_calls_jit, test_calls_int}
|
||||||
|
fn test_calls(factory: super::Factory) {
|
||||||
|
let code = "600054602d57600160005560006000600060006050610998610100f160006000600060006050610998610100f25b".from_hex().unwrap();
|
||||||
|
|
||||||
|
let address = Address::from(0x155);
|
||||||
|
let code_address = Address::from(0x998);
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(150_000);
|
||||||
|
params.code = Some(code);
|
||||||
|
params.address = address.clone();
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
ext.balances = {
|
||||||
|
let mut s = HashMap::new();
|
||||||
|
s.insert(params.address.clone(), params.gas.clone());
|
||||||
|
s
|
||||||
|
};
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create();
|
||||||
|
vm.exec(params, &mut ext).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_set_contains(&ext.calls, &FakeCall {
|
||||||
|
call_type: FakeCallType::CALL,
|
||||||
|
gas: U256::from(2556),
|
||||||
|
sender_address: Some(address.clone()),
|
||||||
|
receive_address: Some(code_address.clone()),
|
||||||
|
value: Some(U256::from(0x50)),
|
||||||
|
data: vec!(),
|
||||||
|
code_address: Some(code_address.clone())
|
||||||
|
});
|
||||||
|
assert_set_contains(&ext.calls, &FakeCall {
|
||||||
|
call_type: FakeCallType::CALL,
|
||||||
|
gas: U256::from(2556),
|
||||||
|
sender_address: Some(address.clone()),
|
||||||
|
receive_address: Some(address.clone()),
|
||||||
|
value: Some(U256::from(0x50)),
|
||||||
|
data: vec!(),
|
||||||
|
code_address: Some(code_address.clone())
|
||||||
|
});
|
||||||
|
assert_eq!(gas_left, U256::from(91_405));
|
||||||
|
assert_eq!(ext.calls.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_set_contains<T : Debug + Eq + PartialEq + Hash>(set: &HashSet<T>, val: &T) {
|
||||||
|
let contains = set.contains(val);
|
||||||
|
if !contains {
|
||||||
|
println!("Set: {:?}", set);
|
||||||
|
println!("Elem: {:?}", val);
|
||||||
|
}
|
||||||
|
assert!(contains, "Element not found in HashSet");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_store(ext: &FakeExt, pos: u64, val: &str) {
|
||||||
|
assert_eq!(ext.store.get(&H256::from(pos)).unwrap(), &H256::from_str(val).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ impl<'a> Executive<'a> {
|
|||||||
|
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
let res = match *t.action() {
|
let res = match t.action {
|
||||||
Action::Create => {
|
Action::Create => {
|
||||||
let new_address = contract_address(&sender, &nonce);
|
let new_address = contract_address(&sender, &nonce);
|
||||||
let params = ActionParams {
|
let params = ActionParams {
|
||||||
@ -399,7 +399,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(0x100u64));
|
state.add_balance(&sender, &U256::from(0x100u64));
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -458,7 +458,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(100));
|
state.add_balance(&sender, &U256::from(100));
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -512,7 +512,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(100));
|
state.add_balance(&sender, &U256::from(100));
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -564,7 +564,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(100));
|
state.add_balance(&sender, &U256::from(100));
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(1024, factory);
|
let engine = TestEngine::new(1024, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -624,7 +624,7 @@ mod tests {
|
|||||||
state.init_code(&address_b, code_b.clone());
|
state.init_code(&address_b, code_b.clone());
|
||||||
state.add_balance(&sender, &U256::from(100_000));
|
state.add_balance(&sender, &U256::from(100_000));
|
||||||
|
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -668,7 +668,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.init_code(&address, code.clone());
|
state.init_code(&address, code.clone());
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
@ -694,7 +694,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(18));
|
state.add_balance(&sender, &U256::from(18));
|
||||||
let mut info = EnvInfo::new();
|
let mut info = EnvInfo::default();
|
||||||
info.gas_limit = U256::from(100_000);
|
info.gas_limit = U256::from(100_000);
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
|
|
||||||
@ -721,7 +721,7 @@ mod tests {
|
|||||||
|
|
||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
let mut info = EnvInfo::new();
|
let mut info = EnvInfo::default();
|
||||||
info.gas_limit = U256::from(100_000);
|
info.gas_limit = U256::from(100_000);
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
|
|
||||||
@ -746,7 +746,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(17));
|
state.add_balance(&sender, &U256::from(17));
|
||||||
let mut info = EnvInfo::new();
|
let mut info = EnvInfo::default();
|
||||||
info.gas_limit = U256::from(100_000);
|
info.gas_limit = U256::from(100_000);
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
|
|
||||||
@ -772,7 +772,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(17));
|
state.add_balance(&sender, &U256::from(17));
|
||||||
let mut info = EnvInfo::new();
|
let mut info = EnvInfo::default();
|
||||||
info.gas_used = U256::from(20_000);
|
info.gas_used = U256::from(20_000);
|
||||||
info.gas_limit = U256::from(100_000);
|
info.gas_limit = U256::from(100_000);
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
@ -799,7 +799,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from(100_017));
|
state.add_balance(&sender, &U256::from(100_017));
|
||||||
let mut info = EnvInfo::new();
|
let mut info = EnvInfo::default();
|
||||||
info.gas_limit = U256::from(100_000);
|
info.gas_limit = U256::from(100_000);
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
|
|
||||||
@ -833,7 +833,7 @@ mod tests {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.add_balance(&sender, &U256::from_str("152d02c7e14af6800000").unwrap());
|
state.add_balance(&sender, &U256::from_str("152d02c7e14af6800000").unwrap());
|
||||||
let info = EnvInfo::new();
|
let info = EnvInfo::default();
|
||||||
let engine = TestEngine::new(0, factory);
|
let engine = TestEngine::new(0, factory);
|
||||||
let mut substate = Substate::new();
|
let mut substate = Substate::new();
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Blockchain DB extras.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use header::BlockNumber;
|
use header::BlockNumber;
|
||||||
use rocksdb::{DB, Writable};
|
use rocksdb::{DB, Writable};
|
||||||
@ -5,21 +7,21 @@ use rocksdb::{DB, Writable};
|
|||||||
/// Represents index of extra data in database
|
/// Represents index of extra data in database
|
||||||
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
||||||
pub enum ExtrasIndex {
|
pub enum ExtrasIndex {
|
||||||
/// TODO [debris] Please document me
|
/// Block details index
|
||||||
BlockDetails = 0,
|
BlockDetails = 0,
|
||||||
/// TODO [debris] Please document me
|
/// Block hash index
|
||||||
BlockHash = 1,
|
BlockHash = 1,
|
||||||
/// TODO [debris] Please document me
|
/// Transaction address index
|
||||||
TransactionAddress = 2,
|
TransactionAddress = 2,
|
||||||
/// TODO [debris] Please document me
|
/// Block log blooms index
|
||||||
BlockLogBlooms = 3,
|
BlockLogBlooms = 3,
|
||||||
/// TODO [debris] Please document me
|
/// Block blooms index
|
||||||
BlocksBlooms = 4
|
BlocksBlooms = 4
|
||||||
}
|
}
|
||||||
|
|
||||||
/// trait used to write Extras data to db
|
/// trait used to write Extras data to db
|
||||||
pub trait ExtrasWritable {
|
pub trait ExtrasWritable {
|
||||||
/// TODO [debris] Please document me
|
/// Write extra data to db
|
||||||
fn put_extras<K, T>(&self, hash: &K, value: &T) where
|
fn put_extras<K, T>(&self, hash: &K, value: &T) where
|
||||||
T: ExtrasIndexable + Encodable,
|
T: ExtrasIndexable + Encodable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
@ -27,12 +29,12 @@ pub trait ExtrasWritable {
|
|||||||
|
|
||||||
/// trait used to read Extras data from db
|
/// trait used to read Extras data from db
|
||||||
pub trait ExtrasReadable {
|
pub trait ExtrasReadable {
|
||||||
/// TODO [debris] Please document me
|
/// Read extra data from db
|
||||||
fn get_extras<K, T>(&self, hash: &K) -> Option<T> where
|
fn get_extras<K, T>(&self, hash: &K) -> Option<T> where
|
||||||
T: ExtrasIndexable + Decodable,
|
T: ExtrasIndexable + Decodable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Check if extra data exists in the db
|
||||||
fn extras_exists<K, T>(&self, hash: &K) -> bool where
|
fn extras_exists<K, T>(&self, hash: &K) -> bool where
|
||||||
T: ExtrasIndexable,
|
T: ExtrasIndexable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
@ -66,9 +68,9 @@ impl ExtrasReadable for DB {
|
|||||||
|
|
||||||
/// Implementations should convert arbitrary type to database key slice
|
/// Implementations should convert arbitrary type to database key slice
|
||||||
pub trait ExtrasSliceConvertable {
|
pub trait ExtrasSliceConvertable {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert self, with `i` (the index), to a 264-bit extras DB key.
|
||||||
fn to_extras_slice(&self, i: ExtrasIndex) -> H264;
|
fn to_extras_slice(&self, i: ExtrasIndex) -> H264;
|
||||||
/// TODO [debris] Please document me
|
/// Interpret self as a 256-bit hash, if natively `H256`.
|
||||||
fn as_h256(&self) -> Option<&H256> { None }
|
fn as_h256(&self) -> Option<&H256> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,7 +98,7 @@ impl ExtrasSliceConvertable for BlockNumber {
|
|||||||
|
|
||||||
/// Types implementing this trait can be indexed in extras database
|
/// Types implementing this trait can be indexed in extras database
|
||||||
pub trait ExtrasIndexable {
|
pub trait ExtrasIndexable {
|
||||||
/// TODO [debris] Please document me
|
/// Returns this data index
|
||||||
fn extras_index() -> ExtrasIndex;
|
fn extras_index() -> ExtrasIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,13 +111,13 @@ impl ExtrasIndexable for H256 {
|
|||||||
/// Familial details concerning a block
|
/// Familial details concerning a block
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct BlockDetails {
|
pub struct BlockDetails {
|
||||||
/// TODO [debris] Please document me
|
/// Block number
|
||||||
pub number: BlockNumber,
|
pub number: BlockNumber,
|
||||||
/// TODO [debris] Please document me
|
/// Total difficulty of the block and all its parents
|
||||||
pub total_difficulty: U256,
|
pub total_difficulty: U256,
|
||||||
/// TODO [debris] Please document me
|
/// Parent block hash
|
||||||
pub parent: H256,
|
pub parent: H256,
|
||||||
/// TODO [debris] Please document me
|
/// List of children block hashes
|
||||||
pub children: Vec<H256>
|
pub children: Vec<H256>
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,7 +159,7 @@ impl Encodable for BlockDetails {
|
|||||||
/// Log blooms of certain block
|
/// Log blooms of certain block
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct BlockLogBlooms {
|
pub struct BlockLogBlooms {
|
||||||
/// TODO [debris] Please document me
|
/// List of log blooms for the block
|
||||||
pub blooms: Vec<H2048>
|
pub blooms: Vec<H2048>
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -191,7 +193,7 @@ impl Encodable for BlockLogBlooms {
|
|||||||
|
|
||||||
/// Neighboring log blooms on certain level
|
/// Neighboring log blooms on certain level
|
||||||
pub struct BlocksBlooms {
|
pub struct BlocksBlooms {
|
||||||
/// TODO [debris] Please document me
|
/// List of block blooms.
|
||||||
pub blooms: [H2048; 16]
|
pub blooms: [H2048; 16]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,9 +241,9 @@ impl Encodable for BlocksBlooms {
|
|||||||
/// Represents address of certain transaction within block
|
/// Represents address of certain transaction within block
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TransactionAddress {
|
pub struct TransactionAddress {
|
||||||
/// TODO [debris] Please document me
|
/// Block hash
|
||||||
pub block_hash: H256,
|
pub block_hash: H256,
|
||||||
/// TODO [debris] Please document me
|
/// Transaction index within the block
|
||||||
pub index: u64
|
pub index: u64
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Block header.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use basic_types::*;
|
use basic_types::*;
|
||||||
use time::now_utc;
|
use time::now_utc;
|
||||||
@ -11,50 +13,49 @@ pub type BlockNumber = u64;
|
|||||||
/// which is non-specific.
|
/// which is non-specific.
|
||||||
///
|
///
|
||||||
/// Doesn't do all that much on its own.
|
/// Doesn't do all that much on its own.
|
||||||
#[derive(Default, Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Header {
|
pub struct Header {
|
||||||
// TODO: make all private.
|
// TODO: make all private.
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Parent hash.
|
||||||
pub parent_hash: H256,
|
pub parent_hash: H256,
|
||||||
/// TODO [arkpar] Please document me
|
/// Block timestamp.
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
/// TODO [debris] Please document me
|
/// Block number.
|
||||||
pub number: BlockNumber,
|
pub number: BlockNumber,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block author.
|
||||||
pub author: Address,
|
pub author: Address,
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Transactions root.
|
||||||
pub transactions_root: H256,
|
pub transactions_root: H256,
|
||||||
/// TODO [debris] Please document me
|
/// Block uncles hash.
|
||||||
pub uncles_hash: H256,
|
pub uncles_hash: H256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block extra data.
|
||||||
pub extra_data: Bytes,
|
pub extra_data: Bytes,
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// State root.
|
||||||
pub state_root: H256,
|
pub state_root: H256,
|
||||||
/// TODO [debris] Please document me
|
/// Block receipts root.
|
||||||
pub receipts_root: H256,
|
pub receipts_root: H256,
|
||||||
/// TODO [debris] Please document me
|
/// Block bloom.
|
||||||
pub log_bloom: LogBloom,
|
pub log_bloom: LogBloom,
|
||||||
/// TODO [debris] Please document me
|
/// Gas used for contracts execution.
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Block gas limit.
|
||||||
pub gas_limit: U256,
|
pub gas_limit: U256,
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Block difficulty.
|
||||||
pub difficulty: U256,
|
pub difficulty: U256,
|
||||||
/// TODO [arkpar] Please document me
|
/// Block seal.
|
||||||
pub seal: Vec<Bytes>,
|
pub seal: Vec<Bytes>,
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// The memoized hash of the RLP representation *including* the seal fields.
|
||||||
pub hash: RefCell<Option<H256>>,
|
pub hash: RefCell<Option<H256>>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The memoized hash of the RLP representation *without* the seal fields.
|
||||||
pub bare_hash: RefCell<Option<H256>>,
|
pub bare_hash: RefCell<Option<H256>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Header {
|
impl Default for Header {
|
||||||
/// Create a new, default-valued, header.
|
fn default() -> Self {
|
||||||
pub fn new() -> Header {
|
|
||||||
Header {
|
Header {
|
||||||
parent_hash: ZERO_H256.clone(),
|
parent_hash: ZERO_H256.clone(),
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
@ -77,51 +78,58 @@ impl Header {
|
|||||||
bare_hash: RefCell::new(None),
|
bare_hash: RefCell::new(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
impl Header {
|
||||||
|
/// Create a new, default-valued, header.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the number field of the header.
|
||||||
pub fn number(&self) -> BlockNumber { self.number }
|
pub fn number(&self) -> BlockNumber { self.number }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the timestamp field of the header.
|
||||||
pub fn timestamp(&self) -> u64 { self.timestamp }
|
pub fn timestamp(&self) -> u64 { self.timestamp }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the author field of the header.
|
||||||
pub fn author(&self) -> &Address { &self.author }
|
pub fn author(&self) -> &Address { &self.author }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the extra data field of the header.
|
||||||
pub fn extra_data(&self) -> &Bytes { &self.extra_data }
|
pub fn extra_data(&self) -> &Bytes { &self.extra_data }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the state root field of the header.
|
||||||
pub fn state_root(&self) -> &H256 { &self.state_root }
|
pub fn state_root(&self) -> &H256 { &self.state_root }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the receipts root field of the header.
|
||||||
pub fn receipts_root(&self) -> &H256 { &self.receipts_root }
|
pub fn receipts_root(&self) -> &H256 { &self.receipts_root }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the gas limit field of the header.
|
||||||
pub fn gas_limit(&self) -> &U256 { &self.gas_limit }
|
pub fn gas_limit(&self) -> &U256 { &self.gas_limit }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the difficulty field of the header.
|
||||||
pub fn difficulty(&self) -> &U256 { &self.difficulty }
|
pub fn difficulty(&self) -> &U256 { &self.difficulty }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the seal field of the header.
|
||||||
pub fn seal(&self) -> &Vec<Bytes> { &self.seal }
|
pub fn seal(&self) -> &Vec<Bytes> { &self.seal }
|
||||||
|
|
||||||
// TODO: seal_at, set_seal_at &c.
|
// TODO: seal_at, set_seal_at &c.
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the number field of the header.
|
||||||
pub fn set_number(&mut self, a: BlockNumber) { self.number = a; self.note_dirty(); }
|
pub fn set_number(&mut self, a: BlockNumber) { self.number = a; self.note_dirty(); }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the timestamp field of the header.
|
||||||
pub fn set_timestamp(&mut self, a: u64) { self.timestamp = a; self.note_dirty(); }
|
pub fn set_timestamp(&mut self, a: u64) { self.timestamp = a; self.note_dirty(); }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the timestamp field of the header to the current time.
|
||||||
pub fn set_timestamp_now(&mut self) { self.timestamp = now_utc().to_timespec().sec as u64; self.note_dirty(); }
|
pub fn set_timestamp_now(&mut self) { self.timestamp = now_utc().to_timespec().sec as u64; self.note_dirty(); }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the author field of the header.
|
||||||
pub fn set_author(&mut self, a: Address) { if a != self.author { self.author = a; self.note_dirty(); } }
|
pub fn set_author(&mut self, a: Address) { if a != self.author { self.author = a; self.note_dirty(); } }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the extra data field of the header.
|
||||||
pub fn set_extra_data(&mut self, a: Bytes) { if a != self.extra_data { self.extra_data = a; self.note_dirty(); } }
|
pub fn set_extra_data(&mut self, a: Bytes) { if a != self.extra_data { self.extra_data = a; self.note_dirty(); } }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the gas used field of the header.
|
||||||
pub fn set_gas_used(&mut self, a: U256) { self.gas_used = a; self.note_dirty(); }
|
pub fn set_gas_used(&mut self, a: U256) { self.gas_used = a; self.note_dirty(); }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the gas limit field of the header.
|
||||||
pub fn set_gas_limit(&mut self, a: U256) { self.gas_limit = a; self.note_dirty(); }
|
pub fn set_gas_limit(&mut self, a: U256) { self.gas_limit = a; self.note_dirty(); }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the difficulty field of the header.
|
||||||
pub fn set_difficulty(&mut self, a: U256) { self.difficulty = a; self.note_dirty(); }
|
pub fn set_difficulty(&mut self, a: U256) { self.difficulty = a; self.note_dirty(); }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Set the seal field of the header.
|
||||||
pub fn set_seal(&mut self, a: Vec<Bytes>) { self.seal = a; self.note_dirty(); }
|
pub fn set_seal(&mut self, a: Vec<Bytes>) { self.seal = a; self.note_dirty(); }
|
||||||
|
|
||||||
/// Get the hash of this header (sha3 of the RLP).
|
/// Get the hash of this header (sha3 of the RLP).
|
||||||
@ -155,7 +163,7 @@ impl Header {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make these functions traity
|
// TODO: make these functions traity
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Place this header into an RLP stream `s`, optionally `with_seal`.
|
||||||
pub fn stream_rlp(&self, s: &mut RlpStream, with_seal: Seal) {
|
pub fn stream_rlp(&self, s: &mut RlpStream, with_seal: Seal) {
|
||||||
s.begin_list(13 + match with_seal { Seal::With => self.seal.len(), _ => 0 });
|
s.begin_list(13 + match with_seal { Seal::With => self.seal.len(), _ => 0 });
|
||||||
s.append(&self.parent_hash);
|
s.append(&self.parent_hash);
|
||||||
@ -178,14 +186,14 @@ impl Header {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the RLP of this header, optionally `with_seal`.
|
||||||
pub fn rlp(&self, with_seal: Seal) -> Bytes {
|
pub fn rlp(&self, with_seal: Seal) -> Bytes {
|
||||||
let mut s = RlpStream::new();
|
let mut s = RlpStream::new();
|
||||||
self.stream_rlp(&mut s, with_seal);
|
self.stream_rlp(&mut s, with_seal);
|
||||||
s.out()
|
s.out()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Get the SHA3 (Keccak) of this header, optionally `with_seal`.
|
||||||
pub fn rlp_sha3(&self, with_seal: Seal) -> H256 { self.rlp(with_seal).sha3() }
|
pub fn rlp_sha3(&self, with_seal: Seal) -> H256 { self.rlp(with_seal).sha3() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ use client::{BlockChainClient,Client};
|
|||||||
use pod_state::*;
|
use pod_state::*;
|
||||||
use block::Block;
|
use block::Block;
|
||||||
use ethereum;
|
use ethereum;
|
||||||
use super::helpers::*;
|
use tests::helpers::*;
|
||||||
|
|
||||||
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
||||||
init_log();
|
init_log();
|
||||||
@ -15,12 +15,12 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
|||||||
{
|
{
|
||||||
let mut fail_unless = |cond: bool| if !cond && !fail {
|
let mut fail_unless = |cond: bool| if !cond && !fail {
|
||||||
failed.push(name.clone());
|
failed.push(name.clone());
|
||||||
flush(format!("FAIL\n"));
|
flushln!("FAIL");
|
||||||
fail = true;
|
fail = true;
|
||||||
true
|
true
|
||||||
} else {false};
|
} else {false};
|
||||||
|
|
||||||
flush(format!(" - {}...", name));
|
flush!(" - {}...", name);
|
||||||
|
|
||||||
let blocks: Vec<(Bytes, bool)> = test["blocks"].as_array().unwrap().iter().map(|e| (xjson!(&e["rlp"]), e.find("blockHeader").is_some())).collect();
|
let blocks: Vec<(Bytes, bool)> = test["blocks"].as_array().unwrap().iter().map(|e| (xjson!(&e["rlp"]), e.find("blockHeader").is_some())).collect();
|
||||||
let mut spec = match era {
|
let mut spec = match era {
|
||||||
@ -50,7 +50,7 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !fail {
|
if !fail {
|
||||||
flush(format!("ok\n"));
|
flushln!("ok");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!("!!! {:?} tests from failed.", failed.len());
|
println!("!!! {:?} tests from failed.", failed.len());
|
@ -1,6 +1,6 @@
|
|||||||
use client::{BlockChainClient,Client};
|
use client::{BlockChainClient,Client};
|
||||||
use super::test_common::*;
|
use super::test_common::*;
|
||||||
use super::helpers::*;
|
use tests::helpers::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn created() {
|
fn created() {
|
@ -1,6 +1,6 @@
|
|||||||
use super::test_common::*;
|
use super::test_common::*;
|
||||||
use super::helpers::*;
|
|
||||||
use super::chain::json_chain_test;
|
use super::chain::json_chain_test;
|
||||||
|
use tests::helpers::*;
|
||||||
|
|
||||||
fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
||||||
json_chain_test(json_data, ChainEra::Homestead)
|
json_chain_test(json_data, ChainEra::Homestead)
|
@ -1,5 +1,5 @@
|
|||||||
use super::test_common::*;
|
use super::test_common::*;
|
||||||
use super::helpers::*;
|
use tests::helpers::*;
|
||||||
use super::state::json_chain_test;
|
use super::state::json_chain_test;
|
||||||
|
|
||||||
fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
10
ethcore/src/json_tests/mod.rs
Normal file
10
ethcore/src/json_tests/mod.rs
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#[macro_use]
|
||||||
|
mod test_common;
|
||||||
|
|
||||||
|
mod transaction;
|
||||||
|
mod executive;
|
||||||
|
mod state;
|
||||||
|
mod client;
|
||||||
|
mod chain;
|
||||||
|
mod homestead_state;
|
||||||
|
mod homestead_chain;
|
@ -1,5 +1,5 @@
|
|||||||
use super::test_common::*;
|
use super::test_common::*;
|
||||||
use super::helpers::*;
|
use tests::helpers::*;
|
||||||
use pod_state::*;
|
use pod_state::*;
|
||||||
use state_diff::*;
|
use state_diff::*;
|
||||||
use ethereum;
|
use ethereum;
|
||||||
@ -19,19 +19,19 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
|||||||
ChainEra::Homestead => ethereum::new_homestead_test(),
|
ChainEra::Homestead => ethereum::new_homestead_test(),
|
||||||
}.to_engine().unwrap();
|
}.to_engine().unwrap();
|
||||||
|
|
||||||
flush(format!("\n"));
|
flushln!("");
|
||||||
|
|
||||||
for (name, test) in json.as_object().unwrap() {
|
for (name, test) in json.as_object().unwrap() {
|
||||||
let mut fail = false;
|
let mut fail = false;
|
||||||
{
|
{
|
||||||
let mut fail_unless = |cond: bool| if !cond && !fail {
|
let mut fail_unless = |cond: bool| if !cond && !fail {
|
||||||
failed.push(name.clone());
|
failed.push(name.clone());
|
||||||
flush(format!("FAIL\n"));
|
flushln!("FAIL");
|
||||||
fail = true;
|
fail = true;
|
||||||
true
|
true
|
||||||
} else {false};
|
} else {false};
|
||||||
|
|
||||||
flush(format!(" - {}...", name));
|
flush!(" - {}...", name);
|
||||||
|
|
||||||
let t = Transaction::from_json(&test["transaction"]);
|
let t = Transaction::from_json(&test["transaction"]);
|
||||||
let env = EnvInfo::from_json(&test["env"]);
|
let env = EnvInfo::from_json(&test["env"]);
|
||||||
@ -73,7 +73,7 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !fail {
|
if !fail {
|
||||||
flush(format!("ok\n"));
|
flushln!("ok");
|
||||||
}
|
}
|
||||||
// TODO: Add extra APIs for output
|
// TODO: Add extra APIs for output
|
||||||
//if fail_unless(out == r.)
|
//if fail_unless(out == r.)
|
@ -12,7 +12,6 @@ macro_rules! declare_test {
|
|||||||
#[ignore]
|
#[ignore]
|
||||||
#[test]
|
#[test]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[cfg(feature="json-tests")]
|
|
||||||
fn $id() {
|
fn $id() {
|
||||||
test!($name);
|
test!($name);
|
||||||
}
|
}
|
||||||
@ -21,7 +20,6 @@ macro_rules! declare_test {
|
|||||||
#[cfg(feature = "test-heavy")]
|
#[cfg(feature = "test-heavy")]
|
||||||
#[test]
|
#[test]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[cfg(feature="json-tests")]
|
|
||||||
fn $id() {
|
fn $id() {
|
||||||
test!($name);
|
test!($name);
|
||||||
}
|
}
|
||||||
@ -29,7 +27,6 @@ macro_rules! declare_test {
|
|||||||
($id: ident, $name: expr) => {
|
($id: ident, $name: expr) => {
|
||||||
#[test]
|
#[test]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[cfg(feature="json-tests")]
|
|
||||||
fn $id() {
|
fn $id() {
|
||||||
test!($name);
|
test!($name);
|
||||||
}
|
}
|
@ -2,157 +2,113 @@
|
|||||||
#![feature(cell_extras)]
|
#![feature(cell_extras)]
|
||||||
#![feature(augmented_assignments)]
|
#![feature(augmented_assignments)]
|
||||||
#![feature(plugin)]
|
#![feature(plugin)]
|
||||||
//#![plugin(interpolate_idents)]
|
|
||||||
#![plugin(clippy)]
|
#![plugin(clippy)]
|
||||||
#![allow(needless_range_loop, match_bool)]
|
#![allow(needless_range_loop, match_bool)]
|
||||||
|
|
||||||
//! Ethcore's ethereum implementation
|
//! Ethcore library
|
||||||
//!
|
//!
|
||||||
//! ### Rust version
|
//! ### Rust version:
|
||||||
//! - beta
|
|
||||||
//! - nightly
|
//! - nightly
|
||||||
//!
|
//!
|
||||||
//! ### Supported platforms:
|
//! ### Supported platforms:
|
||||||
//! - OSX
|
//! - OSX
|
||||||
//! - Linux/Ubuntu
|
//! - Linux
|
||||||
//!
|
//!
|
||||||
//! ### Dependencies:
|
//! ### Building:
|
||||||
//! - RocksDB 3.13
|
|
||||||
//! - LLVM 3.7 (optional, required for `jit`)
|
|
||||||
//! - evmjit (optional, required for `jit`)
|
|
||||||
//!
|
//!
|
||||||
//! ### Dependencies Installation
|
//! - Ubuntu 14.04 and later:
|
||||||
//!
|
//!
|
||||||
//! - OSX
|
|
||||||
//!
|
|
||||||
//! - rocksdb
|
|
||||||
//! ```bash
|
//! ```bash
|
||||||
|
//! # install rocksdb
|
||||||
|
//! add-apt-repository "deb http://ppa.launchpad.net/giskou/librocksdb/ubuntu trusty main"
|
||||||
|
//! apt-get update
|
||||||
|
//! apt-get install -y --force-yes librocksdb
|
||||||
|
//!
|
||||||
|
//! # install multirust
|
||||||
|
//! curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
|
||||||
|
//!
|
||||||
|
//! # install nightly and make it default
|
||||||
|
//! multirust update nightly && multirust default nightly
|
||||||
|
//!
|
||||||
|
//! # export rust LIBRARY_PATH
|
||||||
|
//! export LIBRARY_PATH=/usr/local/lib
|
||||||
|
//!
|
||||||
|
//! # download and build parity
|
||||||
|
//! git clone https://github.com/ethcore/parity
|
||||||
|
//! cd parity
|
||||||
|
//! cargo build --release
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! - OSX:
|
||||||
|
//!
|
||||||
|
//! ```bash
|
||||||
|
//! # install rocksdb && multirust
|
||||||
|
//! brew update
|
||||||
//! brew install rocksdb
|
//! brew install rocksdb
|
||||||
|
//! brew install multirust
|
||||||
|
//!
|
||||||
|
//! # install nightly and make it default
|
||||||
|
//! multirust update nightly && multirust default nightly
|
||||||
|
//!
|
||||||
|
//! # export rust LIBRARY_PATH
|
||||||
|
//! export LIBRARY_PATH=/usr/local/lib
|
||||||
|
//!
|
||||||
|
//! # download and build parity
|
||||||
|
//! git clone https://github.com/ethcore/parity
|
||||||
|
//! cd parity
|
||||||
|
//! cargo build --release
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
|
||||||
//! - llvm
|
#[macro_use] extern crate log;
|
||||||
//!
|
#[macro_use] extern crate ethcore_util as util;
|
||||||
//! - download llvm 3.7 from http://llvm.org/apt/
|
#[macro_use] extern crate lazy_static;
|
||||||
//!
|
|
||||||
//! ```bash
|
|
||||||
//! cd llvm-3.7.0.src
|
|
||||||
//! mkdir build && cd $_
|
|
||||||
//! cmake -G "Unix Makefiles" .. -DCMAKE_C_FLAGS_RELEASE= -DCMAKE_CXX_FLAGS_RELEASE= -DCMAKE_INSTALL_PREFIX=/usr/local/Cellar/llvm/3.7 -DCMAKE_BUILD_TYPE=Release
|
|
||||||
//! make && make install
|
|
||||||
//! ```
|
|
||||||
//! - evmjit
|
|
||||||
//!
|
|
||||||
//! - download from https://github.com/debris/evmjit
|
|
||||||
//!
|
|
||||||
//! ```bash
|
|
||||||
//! cd evmjit
|
|
||||||
//! mkdir build && cd $_
|
|
||||||
//! cmake -DLLVM_DIR=/usr/local/lib/llvm-3.7/share/llvm/cmake ..
|
|
||||||
//! make && make install
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! - Linux/Ubuntu
|
|
||||||
//!
|
|
||||||
//! - rocksdb
|
|
||||||
//!
|
|
||||||
//! ```bash
|
|
||||||
//! wget https://github.com/facebook/rocksdb/archive/rocksdb-3.13.tar.gz
|
|
||||||
//! tar xvf rocksdb-3.13.tar.gz && cd rocksdb-rocksdb-3.13 && make shared_lib
|
|
||||||
//! sudo make install
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! - llvm
|
|
||||||
//!
|
|
||||||
//! - install using packages from http://llvm.org/apt/
|
|
||||||
//!
|
|
||||||
//! - evmjit
|
|
||||||
//!
|
|
||||||
//! - download from https://github.com/debris/evmjit
|
|
||||||
//!
|
|
||||||
//! ```bash
|
|
||||||
//! cd evmjit
|
|
||||||
//! mkdir build && cd $_
|
|
||||||
//! cmake .. && make
|
|
||||||
//! sudo make install
|
|
||||||
//! sudo ldconfig
|
|
||||||
//! ```
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
extern crate rustc_serialize;
|
extern crate rustc_serialize;
|
||||||
extern crate flate2;
|
|
||||||
extern crate rocksdb;
|
extern crate rocksdb;
|
||||||
extern crate heapsize;
|
extern crate heapsize;
|
||||||
extern crate crypto;
|
extern crate crypto;
|
||||||
extern crate time;
|
extern crate time;
|
||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
extern crate num_cpus;
|
extern crate num_cpus;
|
||||||
#[cfg(feature = "jit" )]
|
|
||||||
extern crate evmjit;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate ethcore_util as util;
|
|
||||||
extern crate crossbeam;
|
extern crate crossbeam;
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
|
|
||||||
// NOTE: Add doc parser exception for these pub declarations.
|
#[cfg(feature = "jit" )] extern crate evmjit;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
pub mod block;
|
||||||
pub mod common;
|
pub mod blockchain;
|
||||||
/// TODO [Tomusdrw] Please document me
|
pub mod block_queue;
|
||||||
pub mod basic_types;
|
pub mod client;
|
||||||
#[macro_use]
|
|
||||||
pub mod evm;
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
/// TODO [Gav Wood] Please document me
|
pub mod ethereum;
|
||||||
pub mod log_entry;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod env_info;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod pod_account;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod pod_state;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod account_diff;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod state_diff;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod engine;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod state;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod account;
|
|
||||||
pub mod action_params;
|
|
||||||
/// TODO [debris] Please document me
|
|
||||||
pub mod header;
|
pub mod header;
|
||||||
/// TODO [Gav Wood] Please document me
|
pub mod service;
|
||||||
pub mod transaction;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod receipt;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod null_engine;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub mod builtin;
|
|
||||||
/// TODO [debris] Please document me
|
|
||||||
pub mod spec;
|
pub mod spec;
|
||||||
pub mod views;
|
pub mod views;
|
||||||
pub mod blockchain;
|
|
||||||
/// TODO [Gav Wood] Please document me
|
mod common;
|
||||||
pub mod extras;
|
mod basic_types;
|
||||||
/// TODO [arkpar] Please document me
|
#[macro_use] mod evm;
|
||||||
pub mod substate;
|
mod log_entry;
|
||||||
/// TODO [Gav Wood] Please document me
|
mod env_info;
|
||||||
pub mod service;
|
mod pod_account;
|
||||||
pub mod executive;
|
mod pod_state;
|
||||||
pub mod externalities;
|
mod account_diff;
|
||||||
|
mod state_diff;
|
||||||
|
mod engine;
|
||||||
|
mod state;
|
||||||
|
mod account;
|
||||||
|
mod action_params;
|
||||||
|
mod transaction;
|
||||||
|
mod receipt;
|
||||||
|
mod null_engine;
|
||||||
|
mod builtin;
|
||||||
|
mod extras;
|
||||||
|
mod substate;
|
||||||
|
mod executive;
|
||||||
|
mod externalities;
|
||||||
|
mod verification;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
#[cfg(test)]
|
||||||
/// TODO [arkpar] Please document me
|
#[cfg(feature="json-tests")]
|
||||||
pub mod client;
|
mod json_tests;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub mod block;
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub mod verification;
|
|
||||||
pub mod block_queue;
|
|
||||||
pub mod ethereum;
|
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
use util::*;
|
use util::*;
|
||||||
use basic_types::LogBloom;
|
use basic_types::LogBloom;
|
||||||
|
|
||||||
/// A single log's entry.
|
/// A record of execution for a `LOG` operation.
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct LogEntry {
|
pub struct LogEntry {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The address of the contract executing at the point of the `LOG` operation.
|
||||||
pub address: Address,
|
pub address: Address,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The topics associated with the `LOG` operation.
|
||||||
pub topics: Vec<H256>,
|
pub topics: Vec<H256>,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The data associated with the `LOG` operation.
|
||||||
pub data: Bytes,
|
pub data: Bytes,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,21 +31,6 @@ impl LogEntry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns reference to address.
|
|
||||||
pub fn address(&self) -> &Address {
|
|
||||||
&self.address
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to topics.
|
|
||||||
pub fn topics(&self) -> &Vec<H256> {
|
|
||||||
&self.topics
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to data.
|
|
||||||
pub fn data(&self) -> &Bytes {
|
|
||||||
&self.data
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculates the bloom of this log entry.
|
/// Calculates the bloom of this log entry.
|
||||||
pub fn bloom(&self) -> LogBloom {
|
pub fn bloom(&self) -> LogBloom {
|
||||||
self.topics.iter().fold(LogBloom::from_bloomed(&self.address.sha3()), |b, t| b.with_bloomed(&t.sha3()))
|
self.topics.iter().fold(LogBloom::from_bloomed(&self.address.sha3()), |b, t| b.with_bloomed(&t.sha3()))
|
||||||
|
@ -2,20 +2,22 @@ use util::*;
|
|||||||
use account::*;
|
use account::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// Genesis account data. Does not have a DB overlay cache.
|
/// An account, expressed as Plain-Old-Data (hence the name).
|
||||||
|
/// Does not have a DB overlay cache, code hash or anything like that.
|
||||||
pub struct PodAccount {
|
pub struct PodAccount {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The balance of the account.
|
||||||
pub balance: U256,
|
pub balance: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The nonce of the account.
|
||||||
pub nonce: U256,
|
pub nonce: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The code of the account.
|
||||||
pub code: Bytes,
|
pub code: Bytes,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The storage of the account.
|
||||||
pub storage: BTreeMap<H256, H256>,
|
pub storage: BTreeMap<H256, H256>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PodAccount {
|
impl PodAccount {
|
||||||
/// Construct new object.
|
/// Construct new object.
|
||||||
|
#[cfg(test)]
|
||||||
pub fn new(balance: U256, nonce: U256, code: Bytes, storage: BTreeMap<H256, H256>) -> PodAccount {
|
pub fn new(balance: U256, nonce: U256, code: Bytes, storage: BTreeMap<H256, H256>) -> PodAccount {
|
||||||
PodAccount { balance: balance, nonce: nonce, code: code, storage: storage }
|
PodAccount { balance: balance, nonce: nonce, code: code, storage: storage }
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ use util::*;
|
|||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq,Default)]
|
#[derive(Debug,Clone,PartialEq,Eq,Default)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// State of all accounts in the system expressed in Plain Old Data.
|
||||||
pub struct PodState (BTreeMap<Address, PodAccount>);
|
pub struct PodState (BTreeMap<Address, PodAccount>);
|
||||||
|
|
||||||
impl PodState {
|
impl PodState {
|
||||||
@ -10,6 +10,7 @@ impl PodState {
|
|||||||
pub fn new() -> PodState { Default::default() }
|
pub fn new() -> PodState { Default::default() }
|
||||||
|
|
||||||
/// Contruct a new object from the `m`.
|
/// Contruct a new object from the `m`.
|
||||||
|
#[cfg(test)]
|
||||||
pub fn from(m: BTreeMap<Address, PodAccount>) -> PodState { PodState(m) }
|
pub fn from(m: BTreeMap<Address, PodAccount>) -> PodState { PodState(m) }
|
||||||
|
|
||||||
/// Get the underlying map.
|
/// Get the underlying map.
|
||||||
@ -21,6 +22,8 @@ impl PodState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Drain object to get the underlying map.
|
/// Drain object to get the underlying map.
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn drain(self) -> BTreeMap<Address, PodAccount> { self.0 }
|
pub fn drain(self) -> BTreeMap<Address, PodAccount> { self.0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,18 +5,18 @@ use log_entry::LogEntry;
|
|||||||
/// Information describing execution of a transaction.
|
/// Information describing execution of a transaction.
|
||||||
#[derive(Default, Debug, Clone)]
|
#[derive(Default, Debug, Clone)]
|
||||||
pub struct Receipt {
|
pub struct Receipt {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The state root after executing the transaction.
|
||||||
pub state_root: H256,
|
pub state_root: H256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The total gas used in the block following execution of the transaction.
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The OR-wide combination of all logs' blooms for this transaction.
|
||||||
pub log_bloom: LogBloom,
|
pub log_bloom: LogBloom,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The logs stemming from this transaction.
|
||||||
pub logs: Vec<LogEntry>,
|
pub logs: Vec<LogEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Receipt {
|
impl Receipt {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Create a new receipt.
|
||||||
pub fn new(state_root: H256, gas_used: U256, logs: Vec<LogEntry>) -> Receipt {
|
pub fn new(state_root: H256, gas_used: U256, logs: Vec<LogEntry>) -> Receipt {
|
||||||
Receipt {
|
Receipt {
|
||||||
state_root: state_root,
|
state_root: state_root,
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
//! Creates and registers client and network services.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use spec::Spec;
|
use spec::Spec;
|
||||||
use error::*;
|
use error::*;
|
||||||
@ -13,7 +15,7 @@ pub enum SyncMessage {
|
|||||||
BlockVerified,
|
BlockVerified,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// IO Message type used for Network service
|
||||||
pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
||||||
|
|
||||||
/// Client service setup. Creates and registers client and network services with the IO subsystem.
|
/// Client service setup. Creates and registers client and network services with the IO subsystem.
|
||||||
|
@ -1,21 +1,10 @@
|
|||||||
|
//! Parameters for a block chain.
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
use flate2::read::GzDecoder;
|
|
||||||
use engine::*;
|
use engine::*;
|
||||||
use pod_state::*;
|
use pod_state::*;
|
||||||
use null_engine::*;
|
use null_engine::*;
|
||||||
|
|
||||||
/// Converts file from base64 gzipped bytes to json
|
|
||||||
pub fn gzip64res_to_json(source: &[u8]) -> Json {
|
|
||||||
// there is probably no need to store genesis in based64 gzip,
|
|
||||||
// but that's what go does, and it was easy to load it this way
|
|
||||||
let data = source.from_base64().expect("Genesis block is malformed!");
|
|
||||||
let data_ref: &[u8] = &data;
|
|
||||||
let mut decoder = GzDecoder::new(data_ref).expect("Gzip is invalid");
|
|
||||||
let mut s: String = "".to_owned();
|
|
||||||
decoder.read_to_string(&mut s).expect("Gzip is invalid");
|
|
||||||
Json::from_str(&s).expect("Json is invalid")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert JSON value to equivalent RLP representation.
|
/// Convert JSON value to equivalent RLP representation.
|
||||||
// TODO: handle container types.
|
// TODO: handle container types.
|
||||||
fn json_to_rlp(json: &Json) -> Bytes {
|
fn json_to_rlp(json: &Json) -> Bytes {
|
||||||
@ -45,53 +34,50 @@ fn json_to_rlp_map(json: &Json) -> HashMap<String, Bytes> {
|
|||||||
/// chain and those to be interpreted by the active chain engine.
|
/// chain and those to be interpreted by the active chain engine.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Spec {
|
pub struct Spec {
|
||||||
// User friendly spec name
|
/// User friendly spec name
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub name: String,
|
pub name: String,
|
||||||
// What engine are we using for this?
|
/// What engine are we using for this?
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub engine_name: String,
|
pub engine_name: String,
|
||||||
|
|
||||||
/// Known nodes on the network in enode format.
|
/// Known nodes on the network in enode format.
|
||||||
pub nodes: Vec<String>,
|
pub nodes: Vec<String>,
|
||||||
|
|
||||||
// Parameters concerning operation of the specific engine we're using.
|
/// Parameters concerning operation of the specific engine we're using.
|
||||||
// Name -> RLP-encoded value
|
/// Maps the parameter name to an RLP-encoded value.
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub engine_params: HashMap<String, Bytes>,
|
pub engine_params: HashMap<String, Bytes>,
|
||||||
|
|
||||||
// Builtin-contracts are here for now but would like to abstract into Engine API eventually.
|
/// Builtin-contracts we would like to see in the chain.
|
||||||
/// TODO [Gav Wood] Please document me
|
/// (In principle these are just hints for the engine since that has the last word on them.)
|
||||||
pub builtins: BTreeMap<Address, Builtin>,
|
pub builtins: BTreeMap<Address, Builtin>,
|
||||||
|
|
||||||
// Genesis params.
|
/// The genesis block's parent hash field.
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub parent_hash: H256,
|
pub parent_hash: H256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The genesis block's author field.
|
||||||
pub author: Address,
|
pub author: Address,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The genesis block's difficulty field.
|
||||||
pub difficulty: U256,
|
pub difficulty: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The genesis block's gas limit field.
|
||||||
pub gas_limit: U256,
|
pub gas_limit: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The genesis block's gas used field.
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The genesis block's timestamp field.
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
/// Transactions root of the genesis block. Should be SHA3_NULL_RLP.
|
/// Transactions root of the genesis block. Should be SHA3_NULL_RLP.
|
||||||
pub transactions_root: H256,
|
pub transactions_root: H256,
|
||||||
/// Receipts root of the genesis block. Should be SHA3_NULL_RLP.
|
/// Receipts root of the genesis block. Should be SHA3_NULL_RLP.
|
||||||
pub receipts_root: H256,
|
pub receipts_root: H256,
|
||||||
/// TODO [arkpar] Please document me
|
/// The genesis block's extra data field.
|
||||||
pub extra_data: Bytes,
|
pub extra_data: Bytes,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The number of seal fields in the genesis block.
|
||||||
genesis_state: PodState,
|
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub seal_fields: usize,
|
pub seal_fields: usize,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Each seal field, expressed as RLP, concatenated.
|
||||||
pub seal_rlp: Bytes,
|
pub seal_rlp: Bytes,
|
||||||
|
|
||||||
// May be prepopulated if we know this in advance.
|
// May be prepopulated if we know this in advance.
|
||||||
state_root_memo: RwLock<Option<H256>>,
|
state_root_memo: RwLock<Option<H256>>,
|
||||||
|
|
||||||
|
// Genesis state as plain old data.
|
||||||
|
genesis_state: PodState,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(wrong_self_convention)] // because to_engine(self) should be to_engine(&self)
|
#[allow(wrong_self_convention)] // because to_engine(self) should be to_engine(&self)
|
||||||
@ -117,7 +103,7 @@ impl Spec {
|
|||||||
/// Get the known knodes of the network in enode format.
|
/// Get the known knodes of the network in enode format.
|
||||||
pub fn nodes(&self) -> &Vec<String> { &self.nodes }
|
pub fn nodes(&self) -> &Vec<String> { &self.nodes }
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the header of the genesis block.
|
||||||
pub fn genesis_header(&self) -> Header {
|
pub fn genesis_header(&self) -> Header {
|
||||||
Header {
|
Header {
|
||||||
parent_hash: self.parent_hash.clone(),
|
parent_hash: self.parent_hash.clone(),
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
use common::*;
|
use common::*;
|
||||||
use engine::Engine;
|
use engine::Engine;
|
||||||
use executive::Executive;
|
use executive::Executive;
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
use pod_state::*;
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
|
use pod_state::PodState;
|
||||||
//use state_diff::*; // TODO: uncomment once to_pod() works correctly.
|
//use state_diff::*; // TODO: uncomment once to_pod() works correctly.
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Result type for the execution ("application") of a transaction.
|
||||||
pub type ApplyResult = Result<Receipt, Error>;
|
pub type ApplyResult = Result<Receipt, Error>;
|
||||||
|
|
||||||
/// Representation of the entire state of all accounts in the system.
|
/// Representation of the entire state of all accounts in the system.
|
||||||
@ -20,6 +24,7 @@ pub struct State {
|
|||||||
|
|
||||||
impl State {
|
impl State {
|
||||||
/// Creates new state with empty state root
|
/// Creates new state with empty state root
|
||||||
|
#[cfg(test)]
|
||||||
pub fn new(mut db: JournalDB, account_start_nonce: U256) -> State {
|
pub fn new(mut db: JournalDB, account_start_nonce: U256) -> State {
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
{
|
{
|
||||||
@ -60,11 +65,6 @@ impl State {
|
|||||||
&self.root
|
&self.root
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Expose the underlying database; good to use for calling `state.db().commit()`.
|
|
||||||
pub fn db(&mut self) -> &mut JournalDB {
|
|
||||||
&mut self.db
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new contract at address `contract`. If there is already an account at the address
|
/// Create a new contract at address `contract`. If there is already an account at the address
|
||||||
/// it will have its code reset, ready for `init_code()`.
|
/// it will have its code reset, ready for `init_code()`.
|
||||||
pub fn new_contract(&mut self, contract: &Address, balance: U256) {
|
pub fn new_contract(&mut self, contract: &Address, balance: U256) {
|
||||||
@ -143,7 +143,6 @@ impl State {
|
|||||||
// let old = self.to_pod();
|
// let old = self.to_pod();
|
||||||
|
|
||||||
let e = try!(Executive::new(self, env_info, engine).transact(t));
|
let e = try!(Executive::new(self, env_info, engine).transact(t));
|
||||||
//println!("Executed: {:?}", e);
|
|
||||||
|
|
||||||
// TODO uncomment once to_pod() works correctly.
|
// TODO uncomment once to_pod() works correctly.
|
||||||
// trace!("Applied transaction. Diff:\n{}\n", StateDiff::diff_pod(&old, &self.to_pod()));
|
// trace!("Applied transaction. Diff:\n{}\n", StateDiff::diff_pod(&old, &self.to_pod()));
|
||||||
@ -153,16 +152,11 @@ impl State {
|
|||||||
Ok(receipt)
|
Ok(receipt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Reverts uncommited changed.
|
||||||
pub fn revert(&mut self, backup: State) {
|
pub fn revert(&mut self, backup: State) {
|
||||||
self.cache = backup.cache;
|
self.cache = backup.cache;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert into a JSON representation.
|
|
||||||
pub fn as_json(&self) -> String {
|
|
||||||
unimplemented!();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
||||||
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
||||||
#[allow(match_ref_pats)]
|
#[allow(match_ref_pats)]
|
||||||
@ -195,6 +189,8 @@ impl State {
|
|||||||
Self::commit_into(&mut self.db, &mut self.root, self.cache.borrow_mut().deref_mut());
|
Self::commit_into(&mut self.db, &mut self.root, self.cache.borrow_mut().deref_mut());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
/// Populate the state from `accounts`.
|
/// Populate the state from `accounts`.
|
||||||
pub fn populate_from(&mut self, accounts: PodState) {
|
pub fn populate_from(&mut self, accounts: PodState) {
|
||||||
for (add, acc) in accounts.drain().into_iter() {
|
for (add, acc) in accounts.drain().into_iter() {
|
||||||
@ -202,17 +198,8 @@ impl State {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate a PodAccount map from this state.
|
#[cfg(test)]
|
||||||
pub fn to_hashmap_pod(&self) -> HashMap<Address, PodAccount> {
|
#[cfg(feature = "json-tests")]
|
||||||
// TODO: handle database rather than just the cache.
|
|
||||||
self.cache.borrow().iter().fold(HashMap::new(), |mut m, (add, opt)| {
|
|
||||||
if let Some(ref acc) = *opt {
|
|
||||||
m.insert(add.clone(), PodAccount::from_account(acc));
|
|
||||||
}
|
|
||||||
m
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate a PodAccount map from this state.
|
/// Populate a PodAccount map from this state.
|
||||||
pub fn to_pod(&self) -> PodState {
|
pub fn to_pod(&self) -> PodState {
|
||||||
// TODO: handle database rather than just the cache.
|
// TODO: handle database rather than just the cache.
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
use util::*;
|
use util::*;
|
||||||
|
#[cfg(test)]
|
||||||
use pod_state::*;
|
use pod_state::*;
|
||||||
use account_diff::*;
|
use account_diff::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Expression for the delta between two system states. Encoded the
|
||||||
|
/// delta of every altered account.
|
||||||
pub struct StateDiff (BTreeMap<Address, AccountDiff>);
|
pub struct StateDiff (BTreeMap<Address, AccountDiff>);
|
||||||
|
|
||||||
impl StateDiff {
|
impl StateDiff {
|
||||||
|
#[cfg(test)]
|
||||||
/// Calculate and return diff between `pre` state and `post` state.
|
/// Calculate and return diff between `pre` state and `post` state.
|
||||||
pub fn diff_pod(pre: &PodState, post: &PodState) -> StateDiff {
|
pub fn diff_pod(pre: &PodState, post: &PodState) -> StateDiff {
|
||||||
StateDiff(pre.get().keys().merge(post.get().keys()).filter_map(|acc| AccountDiff::diff_pod(pre.get().get(acc), post.get().get(acc)).map(|d|(acc.clone(), d))).collect())
|
StateDiff(pre.get().keys().merge(post.get().keys()).filter_map(|acc| AccountDiff::diff_pod(pre.get().get(acc), post.get().get(acc)).map(|d|(acc.clone(), d))).collect())
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
//! Execution environment substate.
|
||||||
use common::*;
|
use common::*;
|
||||||
|
|
||||||
/// State changes which should be applied in finalize,
|
/// State changes which should be applied in finalize,
|
||||||
@ -25,7 +26,7 @@ impl Substate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Merge secondary substate `s` into self, accruing each element correspondingly.
|
||||||
pub fn accrue(&mut self, s: Substate) {
|
pub fn accrue(&mut self, s: Substate) {
|
||||||
self.suicides.extend(s.suicides.into_iter());
|
self.suicides.extend(s.suicides.into_iter());
|
||||||
self.logs.extend(s.logs.into_iter());
|
self.logs.extend(s.logs.into_iter());
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
|
#[cfg(feature = "json-tests")]
|
||||||
use client::{BlockChainClient, Client};
|
use client::{BlockChainClient, Client};
|
||||||
use std::env;
|
use std::env;
|
||||||
use super::test_common::*;
|
use common::*;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use spec::*;
|
use spec::*;
|
||||||
use std::fs::{remove_dir_all};
|
use std::fs::{remove_dir_all};
|
||||||
@ -8,6 +9,8 @@ use blockchain::{BlockChain};
|
|||||||
use state::*;
|
use state::*;
|
||||||
use rocksdb::*;
|
use rocksdb::*;
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub enum ChainEra {
|
pub enum ChainEra {
|
||||||
Frontier,
|
Frontier,
|
||||||
Homestead,
|
Homestead,
|
||||||
@ -43,10 +46,10 @@ impl Drop for RandomTempPath {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[cfg(test)]
|
||||||
pub struct GuardedTempResult<T> {
|
pub struct GuardedTempResult<T> {
|
||||||
result: T,
|
result: T,
|
||||||
temp: RandomTempPath
|
_temp: RandomTempPath
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> GuardedTempResult<T> {
|
impl<T> GuardedTempResult<T> {
|
||||||
@ -111,6 +114,7 @@ pub fn create_test_block_with_data(header: &Header, transactions: &[&Transaction
|
|||||||
rlp.out()
|
rlp.out()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>> {
|
pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>> {
|
||||||
let dir = RandomTempPath::new();
|
let dir = RandomTempPath::new();
|
||||||
|
|
||||||
@ -145,11 +149,12 @@ pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>
|
|||||||
client.import_verified_blocks(&IoChannel::disconnected());
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
|
||||||
GuardedTempResult::<Arc<Client>> {
|
GuardedTempResult::<Arc<Client>> {
|
||||||
temp: dir,
|
_temp: dir,
|
||||||
result: client
|
result: client
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> GuardedTempResult<Arc<Client>> {
|
pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> GuardedTempResult<Arc<Client>> {
|
||||||
let dir = RandomTempPath::new();
|
let dir = RandomTempPath::new();
|
||||||
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
@ -162,7 +167,7 @@ pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> GuardedTempResult<Arc<
|
|||||||
client.import_verified_blocks(&IoChannel::disconnected());
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
|
||||||
GuardedTempResult::<Arc<Client>> {
|
GuardedTempResult::<Arc<Client>> {
|
||||||
temp: dir,
|
_temp: dir,
|
||||||
result: client
|
result: client
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -175,7 +180,7 @@ pub fn generate_dummy_blockchain(block_number: u32) -> GuardedTempResult<BlockCh
|
|||||||
}
|
}
|
||||||
|
|
||||||
GuardedTempResult::<BlockChain> {
|
GuardedTempResult::<BlockChain> {
|
||||||
temp: temp,
|
_temp: temp,
|
||||||
result: bc
|
result: bc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -188,7 +193,7 @@ pub fn generate_dummy_blockchain_with_extra(block_number: u32) -> GuardedTempRes
|
|||||||
}
|
}
|
||||||
|
|
||||||
GuardedTempResult::<BlockChain> {
|
GuardedTempResult::<BlockChain> {
|
||||||
temp: temp,
|
_temp: temp,
|
||||||
result: bc
|
result: bc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -198,7 +203,7 @@ pub fn generate_dummy_empty_blockchain() -> GuardedTempResult<BlockChain> {
|
|||||||
let bc = BlockChain::new(&create_unverifiable_block(0, H256::zero()), temp.as_path());
|
let bc = BlockChain::new(&create_unverifiable_block(0, H256::zero()), temp.as_path());
|
||||||
|
|
||||||
GuardedTempResult::<BlockChain> {
|
GuardedTempResult::<BlockChain> {
|
||||||
temp: temp,
|
_temp: temp,
|
||||||
result: bc
|
result: bc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -208,7 +213,7 @@ pub fn get_temp_journal_db() -> GuardedTempResult<JournalDB> {
|
|||||||
let db = DB::open_default(temp.as_str()).unwrap();
|
let db = DB::open_default(temp.as_str()).unwrap();
|
||||||
let journal_db = JournalDB::new(db);
|
let journal_db = JournalDB::new(db);
|
||||||
GuardedTempResult {
|
GuardedTempResult {
|
||||||
temp: temp,
|
_temp: temp,
|
||||||
result: journal_db
|
result: journal_db
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -217,7 +222,7 @@ pub fn get_temp_state() -> GuardedTempResult<State> {
|
|||||||
let temp = RandomTempPath::new();
|
let temp = RandomTempPath::new();
|
||||||
let journal_db = get_temp_journal_db_in(temp.as_path());
|
let journal_db = get_temp_journal_db_in(temp.as_path());
|
||||||
GuardedTempResult {
|
GuardedTempResult {
|
||||||
temp: temp,
|
_temp: temp,
|
||||||
result: State::new(journal_db, U256::from(0u8))
|
result: State::new(journal_db, U256::from(0u8))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -246,6 +251,7 @@ pub fn get_good_dummy_block() -> Bytes {
|
|||||||
create_test_block(&block_header)
|
create_test_block(&block_header)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn get_bad_state_dummy_block() -> Bytes {
|
pub fn get_bad_state_dummy_block() -> Bytes {
|
||||||
let mut block_header = Header::new();
|
let mut block_header = Header::new();
|
||||||
let test_spec = get_test_spec();
|
let test_spec = get_test_spec();
|
||||||
|
@ -1,11 +1 @@
|
|||||||
#[macro_use]
|
|
||||||
mod test_common;
|
|
||||||
|
|
||||||
mod transaction;
|
|
||||||
mod executive;
|
|
||||||
mod state;
|
|
||||||
mod client;
|
|
||||||
mod chain;
|
|
||||||
pub mod helpers;
|
pub mod helpers;
|
||||||
mod homestead_state;
|
|
||||||
mod homestead_chain;
|
|
@ -1,14 +1,16 @@
|
|||||||
|
//! Transaction data structure.
|
||||||
|
|
||||||
use util::*;
|
use util::*;
|
||||||
use basic_types::*;
|
use basic_types::*;
|
||||||
use error::*;
|
use error::*;
|
||||||
use evm::Schedule;
|
use evm::Schedule;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Transaction action type.
|
||||||
pub enum Action {
|
pub enum Action {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Create creates new contract.
|
||||||
Create,
|
Create,
|
||||||
/// TODO [debris] Please document me
|
/// Calls contract at given address.
|
||||||
Call(Address),
|
Call(Address),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -20,25 +22,25 @@ impl Default for Action {
|
|||||||
/// or contract creation operation.
|
/// or contract creation operation.
|
||||||
#[derive(Default, Debug, Clone)]
|
#[derive(Default, Debug, Clone)]
|
||||||
pub struct Transaction {
|
pub struct Transaction {
|
||||||
/// TODO [debris] Please document me
|
/// Nonce.
|
||||||
pub nonce: U256,
|
pub nonce: U256,
|
||||||
/// TODO [debris] Please document me
|
/// Gas price.
|
||||||
pub gas_price: U256,
|
pub gas_price: U256,
|
||||||
/// TODO [debris] Please document me
|
/// Gas paid up front for transaction execution.
|
||||||
pub gas: U256,
|
pub gas: U256,
|
||||||
/// TODO [debris] Please document me
|
/// Action, can be either call or contract create.
|
||||||
pub action: Action,
|
pub action: Action,
|
||||||
/// TODO [debris] Please document me
|
/// Transfered value.
|
||||||
pub value: U256,
|
pub value: U256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Transaction data.
|
||||||
pub data: Bytes,
|
pub data: Bytes,
|
||||||
|
|
||||||
// signature
|
// signature
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The V field of the signature, either 27 or 28; helps describe the point on the curve.
|
||||||
pub v: u8,
|
pub v: u8,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The R field of the signature; helps describe the point on the curve.
|
||||||
pub r: U256,
|
pub r: U256,
|
||||||
/// TODO [debris] Please document me
|
/// The S field of the signature; helps describe the point on the curve.
|
||||||
pub s: U256,
|
pub s: U256,
|
||||||
|
|
||||||
hash: RefCell<Option<H256>>,
|
hash: RefCell<Option<H256>>,
|
||||||
@ -46,7 +48,9 @@ pub struct Transaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Transaction {
|
impl Transaction {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Create a new transaction.
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Transaction {
|
Transaction {
|
||||||
nonce: x!(0),
|
nonce: x!(0),
|
||||||
@ -62,24 +66,9 @@ impl Transaction {
|
|||||||
sender: RefCell::new(None),
|
sender: RefCell::new(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Create a new message-call transaction.
|
|
||||||
pub fn new_call(to: Address, value: U256, data: Bytes, gas: U256, gas_price: U256, nonce: U256) -> Transaction {
|
|
||||||
Transaction {
|
|
||||||
nonce: nonce,
|
|
||||||
gas_price: gas_price,
|
|
||||||
gas: gas,
|
|
||||||
action: Action::Call(to),
|
|
||||||
value: value,
|
|
||||||
data: data,
|
|
||||||
v: 0,
|
|
||||||
r: x!(0),
|
|
||||||
s: x!(0),
|
|
||||||
hash: RefCell::new(None),
|
|
||||||
sender: RefCell::new(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new contract-creation transaction.
|
/// Create a new contract-creation transaction.
|
||||||
|
#[cfg(test)]
|
||||||
pub fn new_create(value: U256, data: Bytes, gas: U256, gas_price: U256, nonce: U256) -> Transaction {
|
pub fn new_create(value: U256, data: Bytes, gas: U256, gas_price: U256, nonce: U256) -> Transaction {
|
||||||
Transaction {
|
Transaction {
|
||||||
nonce: nonce,
|
nonce: nonce,
|
||||||
@ -96,19 +85,6 @@ impl Transaction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the nonce of the transaction.
|
|
||||||
pub fn nonce(&self) -> &U256 { &self.nonce }
|
|
||||||
/// Get the gas price of the transaction.
|
|
||||||
pub fn gas_price(&self) -> &U256 { &self.gas_price }
|
|
||||||
/// Get the gas of the transaction.
|
|
||||||
pub fn gas(&self) -> &U256 { &self.gas }
|
|
||||||
/// Get the action of the transaction (Create or Call).
|
|
||||||
pub fn action(&self) -> &Action { &self.action }
|
|
||||||
/// Get the value of the transaction.
|
|
||||||
pub fn value(&self) -> &U256 { &self.value }
|
|
||||||
/// Get the data of the transaction.
|
|
||||||
pub fn data(&self) -> &Bytes { &self.data }
|
|
||||||
|
|
||||||
/// Append object into RLP stream, optionally with or without the signature.
|
/// Append object into RLP stream, optionally with or without the signature.
|
||||||
pub fn rlp_append_opt(&self, s: &mut RlpStream, with_seal: Seal) {
|
pub fn rlp_append_opt(&self, s: &mut RlpStream, with_seal: Seal) {
|
||||||
s.begin_list(6 + match with_seal { Seal::With => 3, _ => 0 });
|
s.begin_list(6 + match with_seal { Seal::With => 3, _ => 0 });
|
||||||
@ -179,11 +155,6 @@ impl Transaction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Note that some fields have changed. Resets the memoised hash.
|
|
||||||
pub fn note_dirty(&self) {
|
|
||||||
*self.hash.borrow_mut() = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 0 is `v` is 27, 1 if 28, and 4 otherwise.
|
/// 0 is `v` is 27, 1 if 28, and 4 otherwise.
|
||||||
pub fn standard_v(&self) -> u8 { match self.v { 27 => 0, 28 => 1, _ => 4 } }
|
pub fn standard_v(&self) -> u8 { match self.v { 27 => 0, 28 => 1, _ => 4 } }
|
||||||
|
|
||||||
@ -216,6 +187,7 @@ impl Transaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Signs the transaction as coming from `sender`.
|
/// Signs the transaction as coming from `sender`.
|
||||||
|
#[cfg(test)]
|
||||||
pub fn signed(self, secret: &Secret) -> Transaction { let mut r = self; r.sign(secret); r }
|
pub fn signed(self, secret: &Secret) -> Transaction { let mut r = self; r.sign(secret); r }
|
||||||
|
|
||||||
/// Get the transaction cost in gas for the given params.
|
/// Get the transaction cost in gas for the given params.
|
||||||
@ -241,6 +213,9 @@ impl Transaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Do basic validation, checking for valid signature and minimum gas,
|
/// Do basic validation, checking for valid signature and minimum gas,
|
||||||
|
// TODO: consider use in block validation.
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "json-tests")]
|
||||||
pub fn validate(self, schedule: &Schedule, require_low: bool) -> Result<Transaction, Error> {
|
pub fn validate(self, schedule: &Schedule, require_low: bool) -> Result<Transaction, Error> {
|
||||||
if require_low && !ec::is_low_s(&self.s) {
|
if require_low && !ec::is_low_s(&self.s) {
|
||||||
return Err(Error::Util(UtilError::Crypto(CryptoError::InvalidSignature)));
|
return Err(Error::Util(UtilError::Crypto(CryptoError::InvalidSignature)));
|
||||||
|
@ -154,7 +154,7 @@ pub fn verify_block_final(expected: &Header, got: &Header) -> Result<(), Error>
|
|||||||
return Err(From::from(BlockError::InvalidStateRoot(Mismatch { expected: expected.state_root.clone(), found: got.state_root.clone() })))
|
return Err(From::from(BlockError::InvalidStateRoot(Mismatch { expected: expected.state_root.clone(), found: got.state_root.clone() })))
|
||||||
}
|
}
|
||||||
if expected.receipts_root != got.receipts_root {
|
if expected.receipts_root != got.receipts_root {
|
||||||
return Err(From::from(BlockError::InvalidReceiptsStateRoot(Mismatch { expected: expected.receipts_root.clone(), found: got.receipts_root.clone() })))
|
return Err(From::from(BlockError::InvalidReceiptsRoot(Mismatch { expected: expected.receipts_root.clone(), found: got.receipts_root.clone() })))
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
2
hook.sh
2
hook.sh
@ -1,3 +1,3 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
echo "#!/bin/sh\ncargo test" >> ./.git/hooks/pre-push
|
echo "#!/bin/sh\ncargo test -p ethcore" >> ./.git/hooks/pre-push
|
||||||
chmod +x ./.git/hooks/pre-push
|
chmod +x ./.git/hooks/pre-push
|
||||||
|
466
install-deps.sh
Executable file
466
install-deps.sh
Executable file
@ -0,0 +1,466 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
function run_installer()
|
||||||
|
{
|
||||||
|
####### Init vars
|
||||||
|
|
||||||
|
HOMEBREW_PREFIX=/usr/local
|
||||||
|
HOMEBREW_CACHE=/Library/Caches/Homebrew
|
||||||
|
HOMEBREW_REPO=https://github.com/Homebrew/homebrew
|
||||||
|
OSX_REQUIERED_VERSION="10.7.0"
|
||||||
|
|
||||||
|
|
||||||
|
declare OS_TYPE
|
||||||
|
declare OSX_VERSION
|
||||||
|
declare GIT_PATH
|
||||||
|
declare RUBY_PATH
|
||||||
|
declare BREW_PATH
|
||||||
|
declare INSTALL_FILES=""
|
||||||
|
|
||||||
|
errorMessages=""
|
||||||
|
isOsVersion=false
|
||||||
|
isGit=false
|
||||||
|
isRuby=false
|
||||||
|
isBrew=false
|
||||||
|
canContinue=true
|
||||||
|
depCount=0
|
||||||
|
depFound=0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
####### Setup colors
|
||||||
|
|
||||||
|
red=`tput setaf 1`
|
||||||
|
green=`tput setaf 2`
|
||||||
|
yellow=`tput setaf 3`
|
||||||
|
blue=`tput setaf 4`
|
||||||
|
magenta=`tput setaf 5`
|
||||||
|
cyan=`tput setaf 6`
|
||||||
|
white=`tput setaf 7`
|
||||||
|
b=`tput bold`
|
||||||
|
u=`tput sgr 0 1`
|
||||||
|
ul=`tput smul`
|
||||||
|
xl=`tput rmul`
|
||||||
|
stou=`tput smso`
|
||||||
|
xtou=`tput rmso`
|
||||||
|
dim=`tput dim`
|
||||||
|
reverse=`tput rev`
|
||||||
|
reset=`tput sgr0`
|
||||||
|
|
||||||
|
|
||||||
|
function head() {
|
||||||
|
echo "${blue}${b}==>${white} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function info() {
|
||||||
|
echo "${blue}${b}==>${reset} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
function successHeading() {
|
||||||
|
echo "${green}${b}==> $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function success() {
|
||||||
|
echo "${green}${b}==>${reset}${green} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function error() {
|
||||||
|
echo "${red}==> ${u}${b}${red}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function smallError() {
|
||||||
|
echo "${red}==>${reset} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
function green() {
|
||||||
|
echo "${green}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function red() {
|
||||||
|
echo "${red}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function check() {
|
||||||
|
echo "${green}${bold} ✓${reset} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function uncheck() {
|
||||||
|
echo "${red}${bold} ✘${reset} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
####### Setup methods
|
||||||
|
|
||||||
|
function wait_for_user() {
|
||||||
|
while :
|
||||||
|
do
|
||||||
|
read -p "${blue}==>${reset} $1 [Y/n] " imp
|
||||||
|
case $imp in
|
||||||
|
[yY] ) echo; break ;;
|
||||||
|
'' ) echo; break ;;
|
||||||
|
[nN] ) abortInstall "${red}==>${reset} Process stopped by user. To resume the install run the one-liner command again." ;;
|
||||||
|
* ) echo "Unrecognized option provided. Please provide either 'Y' or 'N'";
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
function exe() {
|
||||||
|
echo "\$ $@"; "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function detectOS() {
|
||||||
|
if [[ "$OSTYPE" == "linux-gnu" ]]
|
||||||
|
then
|
||||||
|
OS_TYPE="linux"
|
||||||
|
get_linux_dependencies
|
||||||
|
elif [[ "$OSTYPE" == "darwin"* ]]
|
||||||
|
then
|
||||||
|
OS_TYPE="osx"
|
||||||
|
get_osx_dependencies
|
||||||
|
else
|
||||||
|
OS_TYPE="win"
|
||||||
|
abortInstall "${red}==>${reset} ${b}OS not supported:${reset} parity one-liner currently support OS X and Linux.\nFor instructions on installing parity on other platforms please visit ${u}${blue}http://ethcore.io/${reset}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [[ $depCount == $depFound ]]
|
||||||
|
then
|
||||||
|
green "Found all dependencies ($depFound/$depCount)"
|
||||||
|
else
|
||||||
|
if [[ $canContinue == true ]]
|
||||||
|
then
|
||||||
|
red "Some dependencies are missing ($depFound/$depCount)"
|
||||||
|
elif [[ $canContinue == false && $depFound == 0 ]]
|
||||||
|
then
|
||||||
|
red "All dependencies are missing and cannot be auto-installed ($depFound/$depCount)"
|
||||||
|
abortInstall "$errorMessages";
|
||||||
|
elif [[ $canContinue == false ]]
|
||||||
|
then
|
||||||
|
red "Some dependencies which cannot be auto-installed are missing ($depFound/$depCount)"
|
||||||
|
abortInstall "$errorMessages";
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_osx_dependencies()
|
||||||
|
{
|
||||||
|
macos_version
|
||||||
|
find_git
|
||||||
|
find_ruby
|
||||||
|
find_brew
|
||||||
|
}
|
||||||
|
|
||||||
|
function macos_version()
|
||||||
|
{
|
||||||
|
declare -a reqVersion
|
||||||
|
declare -a localVersion
|
||||||
|
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
OSX_VERSION=`/usr/bin/sw_vers -productVersion 2>/dev/null`
|
||||||
|
|
||||||
|
if [ -z "$OSX_VERSION" ]
|
||||||
|
then
|
||||||
|
uncheck "OS X version not supported 🔥"
|
||||||
|
isOsVersion=false
|
||||||
|
canContinue=false
|
||||||
|
else
|
||||||
|
IFS='.' read -a localVersion <<< "$OSX_VERSION"
|
||||||
|
IFS='.' read -a reqVersion <<< "$OSX_REQUIERED_VERSION"
|
||||||
|
|
||||||
|
if (( ${reqVersion[0]} <= ${localVersion[0]} )) && (( ${reqVersion[1]} <= ${localVersion[1]} ))
|
||||||
|
then
|
||||||
|
check "OS X Version ${OSX_VERSION}"
|
||||||
|
isOsVersion=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
return
|
||||||
|
else
|
||||||
|
uncheck "OS X version not supported"
|
||||||
|
isOsVersion=false
|
||||||
|
canContinue=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
errorMessages+="${red}==>${reset} ${b}Mac OS version too old:${reset} eth requires OS X version ${red}$OSX_REQUIERED_VERSION${reset} at least in order to run.\n"
|
||||||
|
errorMessages+=" Please update the OS and reload the install process.\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_eth()
|
||||||
|
{
|
||||||
|
ETH_PATH=`which eth 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $ETH_PATH ]]
|
||||||
|
then
|
||||||
|
check "Found eth: $ETH_PATH"
|
||||||
|
echo "$($ETH_PATH -V)"
|
||||||
|
isEth=true
|
||||||
|
else
|
||||||
|
uncheck "Eth is missing"
|
||||||
|
isEth=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_git()
|
||||||
|
{
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
|
||||||
|
GIT_PATH=`which git 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $GIT_PATH ]]
|
||||||
|
then
|
||||||
|
check "$($GIT_PATH --version)"
|
||||||
|
isGit=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Git is missing"
|
||||||
|
isGit=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_ruby()
|
||||||
|
{
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
|
||||||
|
RUBY_PATH=`which ruby 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $RUBY_PATH ]]
|
||||||
|
then
|
||||||
|
RUBY_VERSION=`ruby -e "print RUBY_VERSION"`
|
||||||
|
check "Ruby ${RUBY_VERSION}"
|
||||||
|
isRuby=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Ruby is missing 🔥"
|
||||||
|
isRuby=false
|
||||||
|
canContinue=false
|
||||||
|
errorMessages+="${red}==>${reset} ${b}Couldn't find Ruby:${reset} Brew requires Ruby which could not be found.\n"
|
||||||
|
errorMessages+=" Please install Ruby using these instructions ${u}${blue}https://www.ruby-lang.org/en/documentation/installation/${reset}.\n"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_brew()
|
||||||
|
{
|
||||||
|
BREW_PATH=`which brew 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $BREW_PATH ]]
|
||||||
|
then
|
||||||
|
check "$($BREW_PATH -v)"
|
||||||
|
isBrew=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Homebrew is missing"
|
||||||
|
isBrew=false
|
||||||
|
|
||||||
|
INSTALL_FILES+="${blue}${dim}==> Homebrew:${reset}\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/bin/brew\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/Library\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/share/man/man1/brew.1\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function install_brew()
|
||||||
|
{
|
||||||
|
if [[ $isBrew == false ]]
|
||||||
|
then
|
||||||
|
head "Installing Homebrew"
|
||||||
|
|
||||||
|
if [[ $isRuby == true ]]
|
||||||
|
then
|
||||||
|
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||||
|
else
|
||||||
|
cd /usr
|
||||||
|
|
||||||
|
if [[ ! -d $HOMEBREW_PREFIX ]]
|
||||||
|
then
|
||||||
|
sudo mkdir $HOMEBREW_PREFIX
|
||||||
|
sudo chmod g+rwx $HOMEBREW_PREFIX
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -d $HOMEBREW_CACHE ]]
|
||||||
|
then
|
||||||
|
sudo mkdir $HOMEBREW_CACHE
|
||||||
|
sudo chmod g+rwx $HOMEBREW_CACHE
|
||||||
|
fi
|
||||||
|
|
||||||
|
DEVELOPER_DIR=`/usr/bin/xcode-select -print-path 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ ! $(ls -A $DEVELOPER_DIR) || ! -f $DEVELOPER_DIR/usr/bin/git ]]
|
||||||
|
then
|
||||||
|
info "Installing the Command Line Tools (expect a GUI popup):"
|
||||||
|
sudo /usr/bin/xcode-select --install
|
||||||
|
|
||||||
|
echo "Press any key when the installation has completed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd $HOMEBREW_PREFIX
|
||||||
|
|
||||||
|
bash -o pipefail -c "curl -fsSL ${HOMEBREW_REPO}/tarball/master | tar xz -m --strip 1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
find_brew
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [[ $isBrew == false ]]
|
||||||
|
then
|
||||||
|
abortInstall "Couldn't install brew"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function osx_installer()
|
||||||
|
{
|
||||||
|
osx_dependency_installer
|
||||||
|
|
||||||
|
info "Updating brew"
|
||||||
|
exe brew update
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing rocksdb"
|
||||||
|
exe brew install rocksdb
|
||||||
|
info "Installing multirust"
|
||||||
|
exe brew install multirust
|
||||||
|
sudo multirust update nightly
|
||||||
|
sudo multirust default nightly
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
function osx_dependency_installer()
|
||||||
|
{
|
||||||
|
if [[ $isGit == false ]];
|
||||||
|
then
|
||||||
|
echo "Installing Git"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $isRuby == false ]];
|
||||||
|
then
|
||||||
|
echo "Installing Ruby"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $isBrew == false ]];
|
||||||
|
then
|
||||||
|
install_brew
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_linux_dependencies()
|
||||||
|
{
|
||||||
|
find_apt
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_apt()
|
||||||
|
{
|
||||||
|
APT_PATH=`which apt-get 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $APT_PATH ]]
|
||||||
|
then
|
||||||
|
check "apt-get"
|
||||||
|
echo "$($APT_PATH -v)"
|
||||||
|
isApt=true
|
||||||
|
else
|
||||||
|
uncheck "apt-get is missing"
|
||||||
|
isApt=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
function linux_rocksdb_installer()
|
||||||
|
{
|
||||||
|
oldpwd=`pwd`
|
||||||
|
cd /tmp
|
||||||
|
exe git clone --branch v4.1 --depth=1 https://github.com/facebook/rocksdb.git
|
||||||
|
cd rocksdb
|
||||||
|
exe make shared_lib
|
||||||
|
sudo cp -a librocksdb.so* /usr/lib
|
||||||
|
sudo ldconfig
|
||||||
|
cd /tmp
|
||||||
|
rm -rf /tmp/rocksdb
|
||||||
|
cd $oldpwd
|
||||||
|
}
|
||||||
|
|
||||||
|
function linux_installer()
|
||||||
|
{
|
||||||
|
info "Installing git"
|
||||||
|
sudo apt-get install -q -y git
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing rocksdb"
|
||||||
|
linux_rocksdb_installer
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing multirust"
|
||||||
|
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sudo sh -s -- --yes
|
||||||
|
sudo multirust update nightly
|
||||||
|
sudo multirust default nightly
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
function install()
|
||||||
|
{
|
||||||
|
echo
|
||||||
|
head "Installing Parity build dependencies"
|
||||||
|
|
||||||
|
if [[ $OS_TYPE == "osx" ]]
|
||||||
|
then
|
||||||
|
osx_installer
|
||||||
|
elif [[ $OS_TYPE == "linux" ]]
|
||||||
|
then
|
||||||
|
linux_installer
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function verify_installation()
|
||||||
|
{
|
||||||
|
info "Verifying installation"
|
||||||
|
# find_eth
|
||||||
|
|
||||||
|
# if [[ $isEth == false ]]
|
||||||
|
# then
|
||||||
|
# abortInstall
|
||||||
|
# fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function abortInstall()
|
||||||
|
{
|
||||||
|
echo
|
||||||
|
error "Installation failed"
|
||||||
|
echo -e "$1"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function finish()
|
||||||
|
{
|
||||||
|
# echo
|
||||||
|
# successHeading "Installation successful!"
|
||||||
|
# head "Next steps"
|
||||||
|
# info "Run ${cyan}\`\`${reset} to get started.${reset}"
|
||||||
|
# echo
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
head "Checking OS dependencies"
|
||||||
|
detectOS
|
||||||
|
|
||||||
|
echo
|
||||||
|
head "In addition to the parity build dependencies, this script will install:"
|
||||||
|
echo "$INSTALL_FILES"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Prompt user to continue or abort
|
||||||
|
wait_for_user "${b}OK,${reset} let's go!"
|
||||||
|
|
||||||
|
# Install dependencies and eth
|
||||||
|
install
|
||||||
|
|
||||||
|
# Check installation
|
||||||
|
verify_installation
|
||||||
|
|
||||||
|
# Display goodby message
|
||||||
|
finish
|
||||||
|
}
|
||||||
|
|
||||||
|
run_installer
|
475
install-parity.sh
Executable file
475
install-parity.sh
Executable file
@ -0,0 +1,475 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
function run_installer()
|
||||||
|
{
|
||||||
|
####### Init vars
|
||||||
|
|
||||||
|
HOMEBREW_PREFIX=/usr/local
|
||||||
|
HOMEBREW_CACHE=/Library/Caches/Homebrew
|
||||||
|
HOMEBREW_REPO=https://github.com/Homebrew/homebrew
|
||||||
|
OSX_REQUIERED_VERSION="10.7.0"
|
||||||
|
|
||||||
|
|
||||||
|
declare OS_TYPE
|
||||||
|
declare OSX_VERSION
|
||||||
|
declare GIT_PATH
|
||||||
|
declare RUBY_PATH
|
||||||
|
declare BREW_PATH
|
||||||
|
declare INSTALL_FILES=""
|
||||||
|
|
||||||
|
errorMessages=""
|
||||||
|
isOsVersion=false
|
||||||
|
isGit=false
|
||||||
|
isRuby=false
|
||||||
|
isBrew=false
|
||||||
|
canContinue=true
|
||||||
|
depCount=0
|
||||||
|
depFound=0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
####### Setup colors
|
||||||
|
|
||||||
|
red=`tput setaf 1`
|
||||||
|
green=`tput setaf 2`
|
||||||
|
yellow=`tput setaf 3`
|
||||||
|
blue=`tput setaf 4`
|
||||||
|
magenta=`tput setaf 5`
|
||||||
|
cyan=`tput setaf 6`
|
||||||
|
white=`tput setaf 7`
|
||||||
|
b=`tput bold`
|
||||||
|
u=`tput sgr 0 1`
|
||||||
|
ul=`tput smul`
|
||||||
|
xl=`tput rmul`
|
||||||
|
stou=`tput smso`
|
||||||
|
xtou=`tput rmso`
|
||||||
|
dim=`tput dim`
|
||||||
|
reverse=`tput rev`
|
||||||
|
reset=`tput sgr0`
|
||||||
|
|
||||||
|
|
||||||
|
function head() {
|
||||||
|
echo "${blue}${b}==>${white} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function info() {
|
||||||
|
echo "${blue}${b}==>${reset} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
function successHeading() {
|
||||||
|
echo "${green}${b}==> $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function success() {
|
||||||
|
echo "${green}${b}==>${reset}${green} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function error() {
|
||||||
|
echo "${red}==> ${u}${b}${red}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function smallError() {
|
||||||
|
echo "${red}==>${reset} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
function green() {
|
||||||
|
echo "${green}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function red() {
|
||||||
|
echo "${red}$1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function check() {
|
||||||
|
echo "${green}${bold} ✓${reset} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function uncheck() {
|
||||||
|
echo "${red}${bold} ✘${reset} $1${reset}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
####### Setup methods
|
||||||
|
|
||||||
|
function wait_for_user() {
|
||||||
|
while :
|
||||||
|
do
|
||||||
|
read -p "${blue}==>${reset} $1 [Y/n] " imp
|
||||||
|
case $imp in
|
||||||
|
[yY] ) echo; break ;;
|
||||||
|
'' ) echo; break ;;
|
||||||
|
[nN] ) abortInstall "${red}==>${reset} Process stopped by user. To resume the install run the one-liner command again." ;;
|
||||||
|
* ) echo "Unrecognized option provided. Please provide either 'Y' or 'N'";
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
function exe() {
|
||||||
|
echo "\$ $@"; "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function detectOS() {
|
||||||
|
if [[ "$OSTYPE" == "linux-gnu" ]]
|
||||||
|
then
|
||||||
|
OS_TYPE="linux"
|
||||||
|
get_linux_dependencies
|
||||||
|
elif [[ "$OSTYPE" == "darwin"* ]]
|
||||||
|
then
|
||||||
|
OS_TYPE="osx"
|
||||||
|
get_osx_dependencies
|
||||||
|
else
|
||||||
|
OS_TYPE="win"
|
||||||
|
abortInstall "${red}==>${reset} ${b}OS not supported:${reset} parity one-liner currently support OS X and Linux.\nFor instructions on installing parity on other platforms please visit ${u}${blue}http://ethcore.io/${reset}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [[ $depCount == $depFound ]]
|
||||||
|
then
|
||||||
|
green "Found all dependencies ($depFound/$depCount)"
|
||||||
|
else
|
||||||
|
if [[ $canContinue == true ]]
|
||||||
|
then
|
||||||
|
red "Some dependencies are missing ($depFound/$depCount)"
|
||||||
|
elif [[ $canContinue == false && $depFound == 0 ]]
|
||||||
|
then
|
||||||
|
red "All dependencies are missing and cannot be auto-installed ($depFound/$depCount)"
|
||||||
|
abortInstall "$errorMessages";
|
||||||
|
elif [[ $canContinue == false ]]
|
||||||
|
then
|
||||||
|
red "Some dependencies which cannot be auto-installed are missing ($depFound/$depCount)"
|
||||||
|
abortInstall "$errorMessages";
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_osx_dependencies()
|
||||||
|
{
|
||||||
|
macos_version
|
||||||
|
find_git
|
||||||
|
find_ruby
|
||||||
|
find_brew
|
||||||
|
}
|
||||||
|
|
||||||
|
function macos_version()
|
||||||
|
{
|
||||||
|
declare -a reqVersion
|
||||||
|
declare -a localVersion
|
||||||
|
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
OSX_VERSION=`/usr/bin/sw_vers -productVersion 2>/dev/null`
|
||||||
|
|
||||||
|
if [ -z "$OSX_VERSION" ]
|
||||||
|
then
|
||||||
|
uncheck "OS X version not supported 🔥"
|
||||||
|
isOsVersion=false
|
||||||
|
canContinue=false
|
||||||
|
else
|
||||||
|
IFS='.' read -a localVersion <<< "$OSX_VERSION"
|
||||||
|
IFS='.' read -a reqVersion <<< "$OSX_REQUIERED_VERSION"
|
||||||
|
|
||||||
|
if (( ${reqVersion[0]} <= ${localVersion[0]} )) && (( ${reqVersion[1]} <= ${localVersion[1]} ))
|
||||||
|
then
|
||||||
|
check "OS X Version ${OSX_VERSION}"
|
||||||
|
isOsVersion=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
return
|
||||||
|
else
|
||||||
|
uncheck "OS X version not supported"
|
||||||
|
isOsVersion=false
|
||||||
|
canContinue=false
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
errorMessages+="${red}==>${reset} ${b}Mac OS version too old:${reset} eth requires OS X version ${red}$OSX_REQUIERED_VERSION${reset} at least in order to run.\n"
|
||||||
|
errorMessages+=" Please update the OS and reload the install process.\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_eth()
|
||||||
|
{
|
||||||
|
ETH_PATH=`which parity 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $ETH_PATH ]]
|
||||||
|
then
|
||||||
|
check "Found parity: $ETH_PATH"
|
||||||
|
echo "$($ETH_PATH -V)"
|
||||||
|
isEth=true
|
||||||
|
else
|
||||||
|
uncheck "parity is missing"
|
||||||
|
isEth=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_git()
|
||||||
|
{
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
|
||||||
|
GIT_PATH=`which git 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $GIT_PATH ]]
|
||||||
|
then
|
||||||
|
check "$($GIT_PATH --version)"
|
||||||
|
isGit=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Git is missing"
|
||||||
|
isGit=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_ruby()
|
||||||
|
{
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
|
||||||
|
RUBY_PATH=`which ruby 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $RUBY_PATH ]]
|
||||||
|
then
|
||||||
|
RUBY_VERSION=`ruby -e "print RUBY_VERSION"`
|
||||||
|
check "Ruby ${RUBY_VERSION}"
|
||||||
|
isRuby=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Ruby is missing 🔥"
|
||||||
|
isRuby=false
|
||||||
|
canContinue=false
|
||||||
|
errorMessages+="${red}==>${reset} ${b}Couldn't find Ruby:${reset} Brew requires Ruby which could not be found.\n"
|
||||||
|
errorMessages+=" Please install Ruby using these instructions ${u}${blue}https://www.ruby-lang.org/en/documentation/installation/${reset}.\n"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_brew()
|
||||||
|
{
|
||||||
|
BREW_PATH=`which brew 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $BREW_PATH ]]
|
||||||
|
then
|
||||||
|
check "$($BREW_PATH -v)"
|
||||||
|
isBrew=true
|
||||||
|
depFound=$((depFound+1))
|
||||||
|
else
|
||||||
|
uncheck "Homebrew is missing"
|
||||||
|
isBrew=false
|
||||||
|
|
||||||
|
INSTALL_FILES+="${blue}${dim}==> Homebrew:${reset}\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/bin/brew\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/Library\n"
|
||||||
|
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/share/man/man1/brew.1\n"
|
||||||
|
fi
|
||||||
|
|
||||||
|
depCount=$((depCount+1))
|
||||||
|
}
|
||||||
|
|
||||||
|
function install_brew()
|
||||||
|
{
|
||||||
|
if [[ $isBrew == false ]]
|
||||||
|
then
|
||||||
|
head "Installing Homebrew"
|
||||||
|
|
||||||
|
if [[ $isRuby == true ]]
|
||||||
|
then
|
||||||
|
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||||
|
else
|
||||||
|
cd /usr
|
||||||
|
|
||||||
|
if [[ ! -d $HOMEBREW_PREFIX ]]
|
||||||
|
then
|
||||||
|
sudo mkdir $HOMEBREW_PREFIX
|
||||||
|
sudo chmod g+rwx $HOMEBREW_PREFIX
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -d $HOMEBREW_CACHE ]]
|
||||||
|
then
|
||||||
|
sudo mkdir $HOMEBREW_CACHE
|
||||||
|
sudo chmod g+rwx $HOMEBREW_CACHE
|
||||||
|
fi
|
||||||
|
|
||||||
|
DEVELOPER_DIR=`/usr/bin/xcode-select -print-path 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ ! $(ls -A $DEVELOPER_DIR) || ! -f $DEVELOPER_DIR/usr/bin/git ]]
|
||||||
|
then
|
||||||
|
info "Installing the Command Line Tools (expect a GUI popup):"
|
||||||
|
sudo /usr/bin/xcode-select --install
|
||||||
|
|
||||||
|
echo "Press any key when the installation has completed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd $HOMEBREW_PREFIX
|
||||||
|
|
||||||
|
bash -o pipefail -c "curl -fsSL ${HOMEBREW_REPO}/tarball/master | tar xz -m --strip 1"
|
||||||
|
fi
|
||||||
|
|
||||||
|
find_brew
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [[ $isBrew == false ]]
|
||||||
|
then
|
||||||
|
abortInstall "Couldn't install brew"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function osx_installer()
|
||||||
|
{
|
||||||
|
osx_dependency_installer
|
||||||
|
|
||||||
|
info "Adding ethcore repository"
|
||||||
|
exe brew tap ethcore/ethcore git@github.com:ethcore/homebrew-ethcore.git
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Updating brew"
|
||||||
|
exe brew update
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing parity"
|
||||||
|
if [[ $isEth == true ]]
|
||||||
|
then
|
||||||
|
exe brew reinstall parity
|
||||||
|
else
|
||||||
|
exe brew install parity
|
||||||
|
exe brew linkapps parity
|
||||||
|
fi
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
function osx_dependency_installer()
|
||||||
|
{
|
||||||
|
if [[ $isGit == false ]];
|
||||||
|
then
|
||||||
|
echo "Installing Git"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $isRuby == false ]];
|
||||||
|
then
|
||||||
|
echo "Installing Ruby"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $isBrew == false ]];
|
||||||
|
then
|
||||||
|
install_brew
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_linux_dependencies()
|
||||||
|
{
|
||||||
|
find_apt
|
||||||
|
}
|
||||||
|
|
||||||
|
function find_apt()
|
||||||
|
{
|
||||||
|
APT_PATH=`which apt-get 2>/dev/null`
|
||||||
|
|
||||||
|
if [[ -f $APT_PATH ]]
|
||||||
|
then
|
||||||
|
check "apt-get"
|
||||||
|
echo "$($APT_PATH -v)"
|
||||||
|
isApt=true
|
||||||
|
else
|
||||||
|
uncheck "apt-get is missing"
|
||||||
|
isApt=false
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
function linux_rocksdb_installer()
|
||||||
|
{
|
||||||
|
oldpwd=`pwd`
|
||||||
|
cd /tmp
|
||||||
|
exe git clone --branch v4.1 --depth=1 https://github.com/facebook/rocksdb.git
|
||||||
|
cd rocksdb
|
||||||
|
exe make shared_lib
|
||||||
|
sudo cp -a librocksdb.so* /usr/lib
|
||||||
|
sudo ldconfig
|
||||||
|
cd /tmp
|
||||||
|
rm -rf /tmp/rocksdb
|
||||||
|
cd $oldpwd
|
||||||
|
}
|
||||||
|
|
||||||
|
function linux_installer()
|
||||||
|
{
|
||||||
|
info "Installing git"
|
||||||
|
sudo apt-get install -q -y git
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing rocksdb"
|
||||||
|
linux_rocksdb_installer
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing multirust"
|
||||||
|
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sudo sh -s -- --yes
|
||||||
|
sudo multirust update nightly
|
||||||
|
sudo multirust default nightly
|
||||||
|
echo
|
||||||
|
|
||||||
|
info "Installing parity"
|
||||||
|
wget --quiet --output-document=- http://ethcore.io/download/parity.deb | dpkg --install -
|
||||||
|
}
|
||||||
|
|
||||||
|
function install()
|
||||||
|
{
|
||||||
|
echo
|
||||||
|
head "Installing Parity build dependencies"
|
||||||
|
|
||||||
|
if [[ $OS_TYPE == "osx" ]]
|
||||||
|
then
|
||||||
|
osx_installer
|
||||||
|
elif [[ $OS_TYPE == "linux" ]]
|
||||||
|
then
|
||||||
|
linux_installer
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function verify_installation()
|
||||||
|
{
|
||||||
|
info "Verifying installation"
|
||||||
|
find_eth
|
||||||
|
|
||||||
|
if [[ $isEth == false ]]
|
||||||
|
then
|
||||||
|
abortInstall
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function abortInstall()
|
||||||
|
{
|
||||||
|
echo
|
||||||
|
error "Installation failed"
|
||||||
|
echo -e "$1"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function finish()
|
||||||
|
{
|
||||||
|
# echo
|
||||||
|
# successHeading "Installation successful!"
|
||||||
|
# head "Next steps"
|
||||||
|
# info "Run ${cyan}\`\`${reset} to get started.${reset}"
|
||||||
|
# echo
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
head "Checking OS dependencies"
|
||||||
|
detectOS
|
||||||
|
|
||||||
|
echo
|
||||||
|
head "In addition to the parity build dependencies, this script will install:"
|
||||||
|
echo "$INSTALL_FILES"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Prompt user to continue or abort
|
||||||
|
wait_for_user "${b}OK,${reset} let's go!"
|
||||||
|
|
||||||
|
# Install dependencies and eth
|
||||||
|
install
|
||||||
|
|
||||||
|
# Check installation
|
||||||
|
verify_installation
|
||||||
|
|
||||||
|
# Display goodby message
|
||||||
|
finish
|
||||||
|
}
|
||||||
|
|
||||||
|
run_installer
|
@ -13,4 +13,5 @@ ethcore = { path = ".." }
|
|||||||
clippy = "0.0.37"
|
clippy = "0.0.37"
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.3"
|
||||||
|
time = "0.1.34"
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ use range_collection::{RangeCollection, ToUsize, FromUsize};
|
|||||||
use ethcore::error::*;
|
use ethcore::error::*;
|
||||||
use ethcore::block::Block;
|
use ethcore::block::Block;
|
||||||
use io::SyncIo;
|
use io::SyncIo;
|
||||||
|
use time;
|
||||||
|
|
||||||
impl ToUsize for BlockNumber {
|
impl ToUsize for BlockNumber {
|
||||||
fn to_usize(&self) -> usize {
|
fn to_usize(&self) -> usize {
|
||||||
@ -61,6 +62,8 @@ const RECEIPTS_PACKET: u8 = 0x10;
|
|||||||
|
|
||||||
const NETWORK_ID: U256 = ONE_U256; //TODO: get this from parent
|
const NETWORK_ID: U256 = ONE_U256; //TODO: get this from parent
|
||||||
|
|
||||||
|
const CONNECTION_TIMEOUT_SEC: f64 = 30f64;
|
||||||
|
|
||||||
struct Header {
|
struct Header {
|
||||||
/// Header data
|
/// Header data
|
||||||
data: Bytes,
|
data: Bytes,
|
||||||
@ -138,6 +141,8 @@ struct PeerInfo {
|
|||||||
asking: PeerAsking,
|
asking: PeerAsking,
|
||||||
/// A set of block numbers being requested
|
/// A set of block numbers being requested
|
||||||
asking_blocks: Vec<BlockNumber>,
|
asking_blocks: Vec<BlockNumber>,
|
||||||
|
/// Request timestamp
|
||||||
|
ask_time: f64,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Blockchain sync handler.
|
/// Blockchain sync handler.
|
||||||
@ -250,6 +255,7 @@ impl ChainSync {
|
|||||||
genesis: try!(r.val_at(4)),
|
genesis: try!(r.val_at(4)),
|
||||||
asking: PeerAsking::Nothing,
|
asking: PeerAsking::Nothing,
|
||||||
asking_blocks: Vec::new(),
|
asking_blocks: Vec::new(),
|
||||||
|
ask_time: 0f64,
|
||||||
};
|
};
|
||||||
|
|
||||||
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest, peer.genesis);
|
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest, peer.genesis);
|
||||||
@ -408,6 +414,7 @@ impl ChainSync {
|
|||||||
trace!(target: "sync", "{} -> NewBlock ({})", peer_id, h);
|
trace!(target: "sync", "{} -> NewBlock ({})", peer_id, h);
|
||||||
let header_view = HeaderView::new(header_rlp.as_raw());
|
let header_view = HeaderView::new(header_rlp.as_raw());
|
||||||
// TODO: Decompose block and add to self.headers and self.bodies instead
|
// TODO: Decompose block and add to self.headers and self.bodies instead
|
||||||
|
let mut unknown = false;
|
||||||
if header_view.number() == From::from(self.last_imported_block + 1) {
|
if header_view.number() == From::from(self.last_imported_block + 1) {
|
||||||
match io.chain().import_block(block_rlp.as_raw().to_vec()) {
|
match io.chain().import_block(block_rlp.as_raw().to_vec()) {
|
||||||
Err(ImportError::AlreadyInChain) => {
|
Err(ImportError::AlreadyInChain) => {
|
||||||
@ -416,6 +423,10 @@ impl ChainSync {
|
|||||||
Err(ImportError::AlreadyQueued) => {
|
Err(ImportError::AlreadyQueued) => {
|
||||||
trace!(target: "sync", "New block already queued {:?}", h);
|
trace!(target: "sync", "New block already queued {:?}", h);
|
||||||
},
|
},
|
||||||
|
Err(ImportError::UnknownParent) => {
|
||||||
|
unknown = true;
|
||||||
|
trace!(target: "sync", "New block with unknown parent {:?}", h);
|
||||||
|
},
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
trace!(target: "sync", "New block queued {:?}", h);
|
trace!(target: "sync", "New block queued {:?}", h);
|
||||||
},
|
},
|
||||||
@ -426,6 +437,9 @@ impl ChainSync {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
unknown = true;
|
||||||
|
}
|
||||||
|
if unknown {
|
||||||
trace!(target: "sync", "New block unknown {:?}", h);
|
trace!(target: "sync", "New block unknown {:?}", h);
|
||||||
//TODO: handle too many unknown blocks
|
//TODO: handle too many unknown blocks
|
||||||
let difficulty: U256 = try!(r.val_at(1));
|
let difficulty: U256 = try!(r.val_at(1));
|
||||||
@ -795,6 +809,7 @@ impl ChainSync {
|
|||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let mut peer = self.peers.get_mut(&peer_id).unwrap();
|
let mut peer = self.peers.get_mut(&peer_id).unwrap();
|
||||||
peer.asking = asking;
|
peer.asking = asking;
|
||||||
|
peer.ask_time = time::precise_time_s();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -969,6 +984,16 @@ impl ChainSync {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handle peer timeouts
|
||||||
|
pub fn maintain_peers(&self, io: &mut SyncIo) {
|
||||||
|
let tick = time::precise_time_s();
|
||||||
|
for (peer_id, peer) in &self.peers {
|
||||||
|
if peer.asking != PeerAsking::Nothing && (tick - peer.ask_time) > CONNECTION_TIMEOUT_SEC {
|
||||||
|
io.disconnect_peer(*peer_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Maintain other peers. Send out any new blocks and transactions
|
/// Maintain other peers. Send out any new blocks and transactions
|
||||||
pub fn _maintain_sync(&mut self, _io: &mut SyncIo) {
|
pub fn _maintain_sync(&mut self, _io: &mut SyncIo) {
|
||||||
}
|
}
|
||||||
|
@ -9,6 +9,8 @@ use ethcore::service::SyncMessage;
|
|||||||
pub trait SyncIo {
|
pub trait SyncIo {
|
||||||
/// Disable a peer
|
/// Disable a peer
|
||||||
fn disable_peer(&mut self, peer_id: PeerId);
|
fn disable_peer(&mut self, peer_id: PeerId);
|
||||||
|
/// Disconnect peer
|
||||||
|
fn disconnect_peer(&mut self, peer_id: PeerId);
|
||||||
/// Respond to current request with a packet. Can be called from an IO handler for incoming packet.
|
/// Respond to current request with a packet. Can be called from an IO handler for incoming packet.
|
||||||
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>;
|
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>;
|
||||||
/// Send a packet to a peer.
|
/// Send a packet to a peer.
|
||||||
@ -42,6 +44,10 @@ impl<'s, 'h> SyncIo for NetSyncIo<'s, 'h> {
|
|||||||
self.network.disable_peer(peer_id);
|
self.network.disable_peer(peer_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn disconnect_peer(&mut self, peer_id: PeerId) {
|
||||||
|
self.network.disconnect_peer(peer_id);
|
||||||
|
}
|
||||||
|
|
||||||
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>{
|
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError>{
|
||||||
self.network.respond(packet_id, data)
|
self.network.respond(packet_id, data)
|
||||||
}
|
}
|
||||||
|
@ -33,11 +33,13 @@ extern crate log;
|
|||||||
extern crate ethcore_util as util;
|
extern crate ethcore_util as util;
|
||||||
extern crate ethcore;
|
extern crate ethcore;
|
||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
|
extern crate time;
|
||||||
|
|
||||||
use std::ops::*;
|
use std::ops::*;
|
||||||
use std::sync::*;
|
use std::sync::*;
|
||||||
use ethcore::client::Client;
|
use ethcore::client::Client;
|
||||||
use util::network::{NetworkProtocolHandler, NetworkService, NetworkContext, PeerId};
|
use util::network::{NetworkProtocolHandler, NetworkService, NetworkContext, PeerId};
|
||||||
|
use util::io::TimerToken;
|
||||||
use chain::ChainSync;
|
use chain::ChainSync;
|
||||||
use ethcore::service::SyncMessage;
|
use ethcore::service::SyncMessage;
|
||||||
use io::NetSyncIo;
|
use io::NetSyncIo;
|
||||||
@ -87,7 +89,8 @@ impl EthSync {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl NetworkProtocolHandler<SyncMessage> for EthSync {
|
impl NetworkProtocolHandler<SyncMessage> for EthSync {
|
||||||
fn initialize(&self, _io: &NetworkContext<SyncMessage>) {
|
fn initialize(&self, io: &NetworkContext<SyncMessage>) {
|
||||||
|
io.register_timer(0, 1000).expect("Error registering sync timer");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read(&self, io: &NetworkContext<SyncMessage>, peer: &PeerId, packet_id: u8, data: &[u8]) {
|
fn read(&self, io: &NetworkContext<SyncMessage>, peer: &PeerId, packet_id: u8, data: &[u8]) {
|
||||||
@ -101,6 +104,10 @@ impl NetworkProtocolHandler<SyncMessage> for EthSync {
|
|||||||
fn disconnected(&self, io: &NetworkContext<SyncMessage>, peer: &PeerId) {
|
fn disconnected(&self, io: &NetworkContext<SyncMessage>, peer: &PeerId) {
|
||||||
self.sync.write().unwrap().on_peer_aborting(&mut NetSyncIo::new(io, self.chain.deref()), *peer);
|
self.sync.write().unwrap().on_peer_aborting(&mut NetSyncIo::new(io, self.chain.deref()), *peer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn timeout(&self, io: &NetworkContext<SyncMessage>, _timer: TimerToken) {
|
||||||
|
self.sync.write().unwrap().maintain_peers(&mut NetSyncIo::new(io, self.chain.deref()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,104 +0,0 @@
|
|||||||
use util::*;
|
|
||||||
use sync::*;
|
|
||||||
use spec::Spec;
|
|
||||||
use error::*;
|
|
||||||
use std::env;
|
|
||||||
use client::Client;
|
|
||||||
|
|
||||||
/// Message type for external and internal events
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub enum SyncMessage {
|
|
||||||
/// New block has been imported into the blockchain
|
|
||||||
NewChainBlock(Bytes), //TODO: use Cow
|
|
||||||
/// A block is ready
|
|
||||||
BlockVerified,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
|
||||||
|
|
||||||
/// Client service setup. Creates and registers client and network services with the IO subsystem.
|
|
||||||
pub struct ClientService {
|
|
||||||
net_service: NetworkService<SyncMessage>,
|
|
||||||
client: Arc<Client>,
|
|
||||||
sync: Arc<EthSync>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClientService {
|
|
||||||
/// Start the service in a separate thread.
|
|
||||||
pub fn start(spec: Spec, net_config: NetworkConfiguration) -> Result<ClientService, Error> {
|
|
||||||
let mut net_service = try!(NetworkService::start(net_config));
|
|
||||||
info!("Starting {}", net_service.host_info());
|
|
||||||
info!("Configured for {} using {} engine", spec.name, spec.engine_name);
|
|
||||||
let mut dir = env::home_dir().unwrap();
|
|
||||||
dir.push(".parity");
|
|
||||||
dir.push(H64::from(spec.genesis_header().hash()).hex());
|
|
||||||
let client = try!(Client::new(spec, &dir, net_service.io().channel()));
|
|
||||||
let sync = EthSync::register(&mut net_service, client.clone());
|
|
||||||
let client_io = Arc::new(ClientIoHandler {
|
|
||||||
client: client.clone()
|
|
||||||
});
|
|
||||||
try!(net_service.io().register_handler(client_io));
|
|
||||||
|
|
||||||
Ok(ClientService {
|
|
||||||
net_service: net_service,
|
|
||||||
client: client,
|
|
||||||
sync: sync,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the network service.
|
|
||||||
pub fn add_node(&mut self, _enode: &str) {
|
|
||||||
unimplemented!();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub fn io(&mut self) -> &mut IoService<NetSyncMessage> {
|
|
||||||
self.net_service.io()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub fn client(&self) -> Arc<Client> {
|
|
||||||
self.client.clone()
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get shared sync handler
|
|
||||||
pub fn sync(&self) -> Arc<EthSync> {
|
|
||||||
self.sync.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// IO interface for the Client handler
|
|
||||||
struct ClientIoHandler {
|
|
||||||
client: Arc<Client>
|
|
||||||
}
|
|
||||||
|
|
||||||
const CLIENT_TICK_TIMER: TimerToken = 0;
|
|
||||||
const CLIENT_TICK_MS: u64 = 5000;
|
|
||||||
|
|
||||||
impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
|
||||||
fn initialize(&self, io: &IoContext<NetSyncMessage>) {
|
|
||||||
io.register_timer(CLIENT_TICK_TIMER, CLIENT_TICK_MS).expect("Error registering client timer");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn timeout(&self, _io: &IoContext<NetSyncMessage>, timer: TimerToken) {
|
|
||||||
if timer == CLIENT_TICK_TIMER {
|
|
||||||
self.client.tick();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(match_ref_pats)]
|
|
||||||
#[allow(single_match)]
|
|
||||||
fn message(&self, io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
|
||||||
if let &UserMessage(ref message) = net_message {
|
|
||||||
match message {
|
|
||||||
&SyncMessage::BlockVerified => {
|
|
||||||
self.client.import_verified_blocks(&io.channel());
|
|
||||||
},
|
|
||||||
_ => {}, // ignore other messages
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -209,6 +209,9 @@ impl<'p> SyncIo for TestIo<'p> {
|
|||||||
fn disable_peer(&mut self, _peer_id: PeerId) {
|
fn disable_peer(&mut self, _peer_id: PeerId) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn disconnect_peer(&mut self, _peer_id: PeerId) {
|
||||||
|
}
|
||||||
|
|
||||||
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError> {
|
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), UtilError> {
|
||||||
self.queue.push_back(TestPacket {
|
self.queue.push_back(TestPacket {
|
||||||
data: data,
|
data: data,
|
||||||
|
@ -83,11 +83,12 @@ impl<'a> fmt::Display for PrettySlice<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Trait to allow a type to be pretty-printed in `format!`, where unoverridable
|
||||||
|
/// defaults cannot otherwise be avoided.
|
||||||
pub trait ToPretty {
|
pub trait ToPretty {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert a type into a derivative form in order to make `format!` print it prettily.
|
||||||
fn pretty(&self) -> PrettySlice;
|
fn pretty(&self) -> PrettySlice;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Express the object as a hex string.
|
||||||
fn to_hex(&self) -> String {
|
fn to_hex(&self) -> String {
|
||||||
format!("{}", self.pretty())
|
format!("{}", self.pretty())
|
||||||
}
|
}
|
||||||
@ -110,11 +111,11 @@ impl ToPretty for Bytes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// A byte collection reference that can either be a slice or a vector
|
||||||
pub enum BytesRef<'a> {
|
pub enum BytesRef<'a> {
|
||||||
/// TODO [debris] Please document me
|
/// This is a reference to a vector
|
||||||
Flexible(&'a mut Bytes),
|
Flexible(&'a mut Bytes),
|
||||||
/// TODO [debris] Please document me
|
/// This is a reference to a slice
|
||||||
Fixed(&'a mut [u8])
|
Fixed(&'a mut [u8])
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -144,11 +145,12 @@ pub type Bytes = Vec<u8>;
|
|||||||
/// Slice of bytes to underlying memory
|
/// Slice of bytes to underlying memory
|
||||||
pub trait BytesConvertable {
|
pub trait BytesConvertable {
|
||||||
// TODO: rename to as_slice
|
// TODO: rename to as_slice
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the underlying byte-wise representation of the value.
|
||||||
|
/// Deprecated - use `as_slice` instead.
|
||||||
fn bytes(&self) -> &[u8];
|
fn bytes(&self) -> &[u8];
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the underlying byte-wise representation of the value.
|
||||||
fn as_slice(&self) -> &[u8] { self.bytes() }
|
fn as_slice(&self) -> &[u8] { self.bytes() }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get a copy of the underlying byte-wise representation.
|
||||||
fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() }
|
fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,9 +49,9 @@ use sha3::*;
|
|||||||
/// index. Their `BloomIndex` can be created from block number and given level.
|
/// index. Their `BloomIndex` can be created from block number and given level.
|
||||||
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
||||||
pub struct BloomIndex {
|
pub struct BloomIndex {
|
||||||
/// TODO [debris] Please document me
|
/// Bloom level
|
||||||
pub level: u8,
|
pub level: u8,
|
||||||
/// TODO [debris] Please document me
|
/// Filter Index
|
||||||
pub index: usize,
|
pub index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ macro_rules! flushln {
|
|||||||
($fmt:expr, $($arg:tt)*) => (flush!(concat!($fmt, "\n"), $($arg)*));
|
($fmt:expr, $($arg:tt)*) => (flush!(concat!($fmt, "\n"), $($arg)*));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
#[doc(hidden)]
|
||||||
pub fn flush(s: String) {
|
pub fn flush(s: String) {
|
||||||
::std::io::stdout().write(s.as_bytes()).unwrap();
|
::std::io::stdout().write(s.as_bytes()).unwrap();
|
||||||
::std::io::stdout().flush().unwrap();
|
::std::io::stdout().flush().unwrap();
|
||||||
|
@ -6,11 +6,12 @@ use uint::*;
|
|||||||
use secp256k1::{key, Secp256k1};
|
use secp256k1::{key, Secp256k1};
|
||||||
use rand::os::OsRng;
|
use rand::os::OsRng;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Secret key for secp256k1 EC operations. 256 bit generic "hash" data.
|
||||||
pub type Secret = H256;
|
pub type Secret = H256;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Public key for secp256k1 EC operations. 512 bit generic "hash" data.
|
||||||
pub type Public = H512;
|
pub type Public = H512;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Signature for secp256k1 EC operations; encodes two 256-bit curve points
|
||||||
|
/// and a third sign bit. 520 bit generic "hash" data.
|
||||||
pub type Signature = H520;
|
pub type Signature = H520;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
@ -38,17 +39,17 @@ impl Signature {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [arkpar] Please document me
|
/// Crypto error
|
||||||
pub enum CryptoError {
|
pub enum CryptoError {
|
||||||
/// TODO [arkpar] Please document me
|
/// Invalid secret key
|
||||||
InvalidSecret,
|
InvalidSecret,
|
||||||
/// TODO [arkpar] Please document me
|
/// Invalid public key
|
||||||
InvalidPublic,
|
InvalidPublic,
|
||||||
/// TODO [arkpar] Please document me
|
/// Invalid EC signature
|
||||||
InvalidSignature,
|
InvalidSignature,
|
||||||
/// TODO [arkpar] Please document me
|
/// Invalid AES message
|
||||||
InvalidMessage,
|
InvalidMessage,
|
||||||
/// TODO [arkpar] Please document me
|
/// IO Error
|
||||||
Io(::std::io::Error),
|
Io(::std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,7 +134,7 @@ impl KeyPair {
|
|||||||
pub fn sign(&self, message: &H256) -> Result<Signature, CryptoError> { ec::sign(&self.secret, message) }
|
pub fn sign(&self, message: &H256) -> Result<Signature, CryptoError> { ec::sign(&self.secret, message) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// EC functions
|
||||||
pub mod ec {
|
pub mod ec {
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use uint::*;
|
use uint::*;
|
||||||
@ -210,12 +211,12 @@ pub mod ec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// ECDH functions
|
||||||
pub mod ecdh {
|
pub mod ecdh {
|
||||||
use crypto::*;
|
use crypto::*;
|
||||||
use crypto::{self};
|
use crypto::{self};
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// Agree on a shared secret
|
||||||
pub fn agree(secret: &Secret, public: &Public, ) -> Result<Secret, CryptoError> {
|
pub fn agree(secret: &Secret, public: &Public, ) -> Result<Secret, CryptoError> {
|
||||||
use secp256k1::*;
|
use secp256k1::*;
|
||||||
let context = &crypto::SECP256K1;
|
let context = &crypto::SECP256K1;
|
||||||
@ -231,13 +232,13 @@ pub mod ecdh {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// ECIES function
|
||||||
pub mod ecies {
|
pub mod ecies {
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use bytes::*;
|
use bytes::*;
|
||||||
use crypto::*;
|
use crypto::*;
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// Encrypt a message with a public key
|
||||||
pub fn encrypt(public: &Public, plain: &[u8]) -> Result<Bytes, CryptoError> {
|
pub fn encrypt(public: &Public, plain: &[u8]) -> Result<Bytes, CryptoError> {
|
||||||
use ::rcrypto::digest::Digest;
|
use ::rcrypto::digest::Digest;
|
||||||
use ::rcrypto::sha2::Sha256;
|
use ::rcrypto::sha2::Sha256;
|
||||||
@ -273,7 +274,7 @@ pub mod ecies {
|
|||||||
Ok(msg)
|
Ok(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// Decrypt a message with a secret key
|
||||||
pub fn decrypt(secret: &Secret, encrypted: &[u8]) -> Result<Bytes, CryptoError> {
|
pub fn decrypt(secret: &Secret, encrypted: &[u8]) -> Result<Bytes, CryptoError> {
|
||||||
use ::rcrypto::digest::Digest;
|
use ::rcrypto::digest::Digest;
|
||||||
use ::rcrypto::sha2::Sha256;
|
use ::rcrypto::sha2::Sha256;
|
||||||
@ -339,20 +340,20 @@ pub mod ecies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// AES encryption
|
||||||
pub mod aes {
|
pub mod aes {
|
||||||
use ::rcrypto::blockmodes::*;
|
use ::rcrypto::blockmodes::*;
|
||||||
use ::rcrypto::aessafe::*;
|
use ::rcrypto::aessafe::*;
|
||||||
use ::rcrypto::symmetriccipher::*;
|
use ::rcrypto::symmetriccipher::*;
|
||||||
use ::rcrypto::buffer::*;
|
use ::rcrypto::buffer::*;
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// Encrypt a message
|
||||||
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
|
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
|
||||||
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
||||||
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// Decrypt a message
|
||||||
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
|
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
|
||||||
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
||||||
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
||||||
|
@ -6,36 +6,36 @@ use rlp::DecoderError;
|
|||||||
use io;
|
use io;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error in database subsystem.
|
||||||
pub enum BaseDataError {
|
pub enum BaseDataError {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// An entry was removed more times than inserted.
|
||||||
NegativelyReferencedHash,
|
NegativelyReferencedHash,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// General error type which should be capable of representing all errors in ethcore.
|
/// General error type which should be capable of representing all errors in ethcore.
|
||||||
pub enum UtilError {
|
pub enum UtilError {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the crypto utility subsystem.
|
||||||
Crypto(::crypto::CryptoError),
|
Crypto(::crypto::CryptoError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the Rust standard library's IO subsystem.
|
||||||
StdIo(::std::io::Error),
|
StdIo(::std::io::Error),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning our IO utility subsystem.
|
||||||
Io(io::IoError),
|
Io(io::IoError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the network address parsing subsystem.
|
||||||
AddressParse(::std::net::AddrParseError),
|
AddressParse(::std::net::AddrParseError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the network address resolution subsystem.
|
||||||
AddressResolve(Option<::std::io::Error>),
|
AddressResolve(Option<::std::io::Error>),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the hex conversion logic.
|
||||||
FromHex(FromHexError),
|
FromHex(FromHexError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the database abstraction logic.
|
||||||
BaseData(BaseDataError),
|
BaseData(BaseDataError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the network subsystem.
|
||||||
Network(NetworkError),
|
Network(NetworkError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error concerning the RLP decoder.
|
||||||
Decoder(DecoderError),
|
Decoder(DecoderError),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Miscellaneous error described by a string.
|
||||||
SimpleString(String),
|
SimpleString(String),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error from a bad input size being given for the needed output.
|
||||||
BadSize,
|
BadSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,8 +9,8 @@ macro_rules! xjson {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Trait allowing conversion from a JSON value.
|
||||||
pub trait FromJson {
|
pub trait FromJson {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert a JSON value to an instance of this type.
|
||||||
fn from_json(json: &Json) -> Self;
|
fn from_json(json: &Json) -> Self;
|
||||||
}
|
}
|
||||||
|
@ -15,35 +15,35 @@ use serde;
|
|||||||
///
|
///
|
||||||
/// Note: types implementing `FixedHash` must be also `BytesConvertable`.
|
/// Note: types implementing `FixedHash` must be also `BytesConvertable`.
|
||||||
pub trait FixedHash: Sized + BytesConvertable + Populatable + FromStr + Default {
|
pub trait FixedHash: Sized + BytesConvertable + Populatable + FromStr + Default {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Create a new, zero-initialised, instance.
|
||||||
fn new() -> Self;
|
fn new() -> Self;
|
||||||
/// Synonym for `new()`. Prefer to new as it's more readable.
|
/// Synonym for `new()`. Prefer to new as it's more readable.
|
||||||
fn zero() -> Self;
|
fn zero() -> Self;
|
||||||
/// TODO [debris] Please document me
|
/// Create a new, cryptographically random, instance.
|
||||||
fn random() -> Self;
|
fn random() -> Self;
|
||||||
/// TODO [debris] Please document me
|
/// Assign self have a cryptographically random value.
|
||||||
fn randomize(&mut self);
|
fn randomize(&mut self);
|
||||||
/// TODO [arkpar] Please document me
|
/// Get the size of this object in bytes.
|
||||||
fn size() -> usize;
|
fn len() -> usize;
|
||||||
/// TODO [arkpar] Please document me
|
/// Convert a slice of bytes of length `len()` to an instance of this type.
|
||||||
fn from_slice(src: &[u8]) -> Self;
|
fn from_slice(src: &[u8]) -> Self;
|
||||||
/// TODO [arkpar] Please document me
|
/// Assign self to be of the same value as a slice of bytes of length `len()`.
|
||||||
fn clone_from_slice(&mut self, src: &[u8]) -> usize;
|
fn clone_from_slice(&mut self, src: &[u8]) -> usize;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Copy the data of this object into some mutable slice of length `len()`.
|
||||||
fn copy_to(&self, dest: &mut [u8]);
|
fn copy_to(&self, dest: &mut [u8]);
|
||||||
/// TODO [Gav Wood] Please document me
|
/// When interpreting self as a bloom output, augment (bit-wise OR) with the a bloomed version of `b`.
|
||||||
fn shift_bloomed<'a, T>(&'a mut self, b: &T) -> &'a mut Self where T: FixedHash;
|
fn shift_bloomed<'a, T>(&'a mut self, b: &T) -> &'a mut Self where T: FixedHash;
|
||||||
/// TODO [debris] Please document me
|
/// Same as `shift_bloomed` except that `self` is consumed and a new value returned.
|
||||||
fn with_bloomed<T>(mut self, b: &T) -> Self where T: FixedHash { self.shift_bloomed(b); self }
|
fn with_bloomed<T>(mut self, b: &T) -> Self where T: FixedHash { self.shift_bloomed(b); self }
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Bloom the current value using the bloom parameter `m`.
|
||||||
fn bloom_part<T>(&self, m: usize) -> T where T: FixedHash;
|
fn bloom_part<T>(&self, m: usize) -> T where T: FixedHash;
|
||||||
/// TODO [debris] Please document me
|
/// Check to see whether this hash, interpreted as a bloom, contains the value `b` when bloomed.
|
||||||
fn contains_bloomed<T>(&self, b: &T) -> bool where T: FixedHash;
|
fn contains_bloomed<T>(&self, b: &T) -> bool where T: FixedHash;
|
||||||
/// TODO [arkpar] Please document me
|
/// Returns `true` if all bits set in `b` are also set in `self`.
|
||||||
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
||||||
/// TODO [debris] Please document me
|
/// Returns `true` if no bits are set.
|
||||||
fn is_zero(&self) -> bool;
|
fn is_zero(&self) -> bool;
|
||||||
/// Return the lowest 8 bytes interpreted as a BigEndian integer.
|
/// Returns the lowest 8 bytes interpreted as a BigEndian integer.
|
||||||
fn low_u64(&self) -> u64;
|
fn low_u64(&self) -> u64;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ fn clean_0x(s: &str) -> &str {
|
|||||||
macro_rules! impl_hash {
|
macro_rules! impl_hash {
|
||||||
($from: ident, $size: expr) => {
|
($from: ident, $size: expr) => {
|
||||||
#[derive(Eq)]
|
#[derive(Eq)]
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Unformatted binary data of fixed length.
|
||||||
pub struct $from (pub [u8; $size]);
|
pub struct $from (pub [u8; $size]);
|
||||||
|
|
||||||
impl BytesConvertable for $from {
|
impl BytesConvertable for $from {
|
||||||
@ -103,7 +103,7 @@ macro_rules! impl_hash {
|
|||||||
rng.fill_bytes(&mut self.0);
|
rng.fill_bytes(&mut self.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn size() -> usize {
|
fn len() -> usize {
|
||||||
$size
|
$size
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -457,12 +457,12 @@ macro_rules! impl_hash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl $from {
|
impl $from {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get a hex representation.
|
||||||
pub fn hex(&self) -> String {
|
pub fn hex(&self) -> String {
|
||||||
format!("{:?}", self)
|
format!("{:?}", self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Construct new instance equal to the bloomed value of `b`.
|
||||||
pub fn from_bloomed<T>(b: &T) -> Self where T: FixedHash { b.bloom_part($size) }
|
pub fn from_bloomed<T>(b: &T) -> Self where T: FixedHash { b.bloom_part($size) }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -578,25 +578,27 @@ impl<'_> From<&'_ Address> for H256 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert string `s` to an `H256`. Will panic if `s` is not 64 characters long or if any of
|
||||||
|
/// those characters are not 0-9, a-z or A-Z.
|
||||||
pub fn h256_from_hex(s: &str) -> H256 {
|
pub fn h256_from_hex(s: &str) -> H256 {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
H256::from_str(s).unwrap()
|
H256::from_str(s).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert `n` to an `H256`, setting the rightmost 8 bytes.
|
||||||
pub fn h256_from_u64(n: u64) -> H256 {
|
pub fn h256_from_u64(n: u64) -> H256 {
|
||||||
use uint::U256;
|
use uint::U256;
|
||||||
H256::from(&U256::from(n))
|
H256::from(&U256::from(n))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert string `s` to an `Address`. Will panic if `s` is not 40 characters long or if any of
|
||||||
|
/// those characters are not 0-9, a-z or A-Z.
|
||||||
pub fn address_from_hex(s: &str) -> Address {
|
pub fn address_from_hex(s: &str) -> Address {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
Address::from_str(s).unwrap()
|
Address::from_str(s).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert `n` to an `Address`, setting the rightmost 8 bytes.
|
||||||
pub fn address_from_u64(n: u64) -> Address {
|
pub fn address_from_u64(n: u64) -> Address {
|
||||||
let h256 = h256_from_u64(n);
|
let h256 = h256_from_u64(n);
|
||||||
From::from(h256)
|
From::from(h256)
|
||||||
|
@ -42,9 +42,9 @@ mod worker;
|
|||||||
use mio::{EventLoop, Token};
|
use mio::{EventLoop, Token};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [arkpar] Please document me
|
/// IO Error
|
||||||
pub enum IoError {
|
pub enum IoError {
|
||||||
/// TODO [arkpar] Please document me
|
/// Low level error from mio crate
|
||||||
Mio(::std::io::Error),
|
Mio(::std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,19 +78,12 @@ pub trait IoHandler<Message>: Send + Sync where Message: Send + Sync + Clone + '
|
|||||||
fn deregister_stream(&self, _stream: StreamToken, _event_loop: &mut EventLoop<IoManager<Message>>) {}
|
fn deregister_stream(&self, _stream: StreamToken, _event_loop: &mut EventLoop<IoManager<Message>>) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::TimerToken;
|
pub use io::service::TimerToken;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::StreamToken;
|
pub use io::service::StreamToken;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::IoContext;
|
pub use io::service::IoContext;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::IoService;
|
pub use io::service::IoService;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::IoChannel;
|
pub use io::service::IoChannel;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::IoManager;
|
pub use io::service::IoManager;
|
||||||
/// TODO [arkpar] Please document me
|
|
||||||
pub use io::service::TOKENS_PER_HANDLER;
|
pub use io::service::TOKENS_PER_HANDLER;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use common::*;
|
use common::*;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Remove the `"0x"`, if present, from the left of `s`, returning the remaining slice.
|
||||||
pub fn clean(s: &str) -> &str {
|
pub fn clean(s: &str) -> &str {
|
||||||
if s.len() >= 2 && &s[0..2] == "0x" {
|
if s.len() >= 2 && &s[0..2] == "0x" {
|
||||||
&s[2..]
|
&s[2..]
|
||||||
|
@ -107,14 +107,17 @@ impl MemoryDB {
|
|||||||
self.data.get(key)
|
self.data.get(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Return the internal map of hashes to data, clearing the current state.
|
||||||
pub fn drain(&mut self) -> HashMap<H256, (Bytes, i32)> {
|
pub fn drain(&mut self) -> HashMap<H256, (Bytes, i32)> {
|
||||||
let mut data = HashMap::new();
|
let mut data = HashMap::new();
|
||||||
mem::swap(&mut self.data, &mut data);
|
mem::swap(&mut self.data, &mut data);
|
||||||
data
|
data
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Denote than an existing value has the given key. Used when a key gets removed without
|
||||||
|
/// a prior insert and thus has a negative reference with no value.
|
||||||
|
///
|
||||||
|
/// May safely be called even if the key's value is known, in which case it will be a no-op.
|
||||||
pub fn denote(&self, key: &H256, value: Bytes) -> &(Bytes, i32) {
|
pub fn denote(&self, key: &H256, value: Bytes) -> &(Bytes, i32) {
|
||||||
if self.raw(key) == None {
|
if self.raw(key) == None {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -5,13 +5,13 @@ use common::*;
|
|||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// Diff type for specifying a change (or not).
|
/// Diff type for specifying a change (or not).
|
||||||
pub enum Diff<T> where T: Eq {
|
pub enum Diff<T> where T: Eq {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Both sides are the same.
|
||||||
Same,
|
Same,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Left (pre, source) side doesn't include value, right side (post, destination) does.
|
||||||
Born(T),
|
Born(T),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Both sides include data; it chaged value between them.
|
||||||
Changed(T, T),
|
Changed(T, T),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Left (pre, source) side does include value, right side (post, destination) does not.
|
||||||
Died(T),
|
Died(T),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,8 +32,8 @@ impl<T> Diff<T> where T: Eq {
|
|||||||
#[derive(PartialEq,Eq,Clone,Copy)]
|
#[derive(PartialEq,Eq,Clone,Copy)]
|
||||||
/// Boolean type for clean/dirty status.
|
/// Boolean type for clean/dirty status.
|
||||||
pub enum Filth {
|
pub enum Filth {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Data has not been changed.
|
||||||
Clean,
|
Clean,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Data has been changed.
|
||||||
Dirty,
|
Dirty,
|
||||||
}
|
}
|
||||||
|
@ -216,6 +216,12 @@ pub struct EncryptedConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncryptedConnection {
|
impl EncryptedConnection {
|
||||||
|
|
||||||
|
/// Get socket token
|
||||||
|
pub fn token(&self) -> StreamToken {
|
||||||
|
self.connection.token
|
||||||
|
}
|
||||||
|
|
||||||
/// Create an encrypted connection out of the handshake. Consumes a handshake object.
|
/// Create an encrypted connection out of the handshake. Consumes a handshake object.
|
||||||
pub fn new(mut handshake: Handshake) -> Result<EncryptedConnection, UtilError> {
|
pub fn new(mut handshake: Handshake) -> Result<EncryptedConnection, UtilError> {
|
||||||
let shared = try!(crypto::ecdh::agree(handshake.ecdhe.secret(), &handshake.remote_public));
|
let shared = try!(crypto::ecdh::agree(handshake.ecdhe.secret(), &handshake.remote_public));
|
||||||
|
@ -5,17 +5,17 @@ use rlp::*;
|
|||||||
pub enum DisconnectReason
|
pub enum DisconnectReason
|
||||||
{
|
{
|
||||||
DisconnectRequested,
|
DisconnectRequested,
|
||||||
//TCPError,
|
_TCPError,
|
||||||
//BadProtocol,
|
_BadProtocol,
|
||||||
UselessPeer,
|
UselessPeer,
|
||||||
//TooManyPeers,
|
_TooManyPeers,
|
||||||
//DuplicatePeer,
|
_DuplicatePeer,
|
||||||
//IncompatibleProtocol,
|
_IncompatibleProtocol,
|
||||||
//NullIdentity,
|
_NullIdentity,
|
||||||
//ClientQuit,
|
_ClientQuit,
|
||||||
//UnexpectedIdentity,
|
_UnexpectedIdentity,
|
||||||
//LocalIdentity,
|
_LocalIdentity,
|
||||||
//PingTimeout,
|
PingTimeout,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -19,6 +19,7 @@ use io::*;
|
|||||||
use network::NetworkProtocolHandler;
|
use network::NetworkProtocolHandler;
|
||||||
use network::node::*;
|
use network::node::*;
|
||||||
use network::stats::NetworkStats;
|
use network::stats::NetworkStats;
|
||||||
|
use network::error::DisconnectReason;
|
||||||
|
|
||||||
type Slab<T> = ::slab::Slab<T, usize>;
|
type Slab<T> = ::slab::Slab<T, usize>;
|
||||||
|
|
||||||
@ -108,6 +109,8 @@ pub enum NetworkIoMessage<Message> where Message: Send + Sync + Clone {
|
|||||||
/// Timer delay in milliseconds.
|
/// Timer delay in milliseconds.
|
||||||
delay: u64,
|
delay: u64,
|
||||||
},
|
},
|
||||||
|
/// Disconnect a peer
|
||||||
|
Disconnect(PeerId),
|
||||||
/// User message
|
/// User message
|
||||||
User(Message),
|
User(Message),
|
||||||
}
|
}
|
||||||
@ -181,8 +184,14 @@ impl<'s, Message> NetworkContext<'s, Message> where Message: Send + Sync + Clone
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Disable current protocol capability for given peer. If no capabilities left peer gets disconnected.
|
/// Disable current protocol capability for given peer. If no capabilities left peer gets disconnected.
|
||||||
pub fn disable_peer(&self, _peer: PeerId) {
|
pub fn disable_peer(&self, peer: PeerId) {
|
||||||
//TODO: remove capability, disconnect if no capabilities left
|
//TODO: remove capability, disconnect if no capabilities left
|
||||||
|
self.disconnect_peer(peer);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Disconnect peer. Reconnect can be attempted later.
|
||||||
|
pub fn disconnect_peer(&self, peer: PeerId) {
|
||||||
|
self.io.message(NetworkIoMessage::Disconnect(peer));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Register a new IO timer. 'IoHandler::timeout' will be called with the token.
|
/// Register a new IO timer. 'IoHandler::timeout' will be called with the token.
|
||||||
@ -332,6 +341,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn maintain_network(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
fn maintain_network(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
|
self.keep_alive(io);
|
||||||
self.connect_peers(io);
|
self.connect_peers(io);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,6 +353,21 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
self.connections.read().unwrap().iter().any(|e| match *e.lock().unwrap().deref() { ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false })
|
self.connections.read().unwrap().iter().any(|e| match *e.lock().unwrap().deref() { ConnectionEntry::Handshake(ref h) => h.id.eq(&id), _ => false })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn keep_alive(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
|
let mut to_kill = Vec::new();
|
||||||
|
for e in self.connections.write().unwrap().iter_mut() {
|
||||||
|
if let ConnectionEntry::Session(ref mut s) = *e.lock().unwrap().deref_mut() {
|
||||||
|
if !s.keep_alive() {
|
||||||
|
s.disconnect(DisconnectReason::PingTimeout);
|
||||||
|
to_kill.push(s.token());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for p in to_kill {
|
||||||
|
self.kill_connection(p, io);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn connect_peers(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
fn connect_peers(&self, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||||
struct NodeInfo {
|
struct NodeInfo {
|
||||||
id: NodeId,
|
id: NodeId,
|
||||||
@ -684,6 +709,15 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
|
|||||||
self.timers.write().unwrap().insert(handler_token, ProtocolTimer { protocol: protocol, token: *token });
|
self.timers.write().unwrap().insert(handler_token, ProtocolTimer { protocol: protocol, token: *token });
|
||||||
io.register_timer(handler_token, *delay).expect("Error registering timer");
|
io.register_timer(handler_token, *delay).expect("Error registering timer");
|
||||||
},
|
},
|
||||||
|
NetworkIoMessage::Disconnect(ref peer) => {
|
||||||
|
if let Some(connection) = self.connections.read().unwrap().get(*peer).cloned() {
|
||||||
|
match *connection.lock().unwrap().deref_mut() {
|
||||||
|
ConnectionEntry::Handshake(_) => {},
|
||||||
|
ConnectionEntry::Session(ref mut s) => { s.disconnect(DisconnectReason::DisconnectRequested); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.kill_connection(*peer, io);
|
||||||
|
},
|
||||||
NetworkIoMessage::User(ref message) => {
|
NetworkIoMessage::User(ref message) => {
|
||||||
for (p, h) in self.handlers.read().unwrap().iter() {
|
for (p, h) in self.handlers.read().unwrap().iter() {
|
||||||
h.message(&NetworkContext::new(io, p, None, self.connections.clone()), &message);
|
h.message(&NetworkContext::new(io, p, None, self.connections.clone()), &message);
|
||||||
|
@ -21,7 +21,7 @@ impl<Message> NetworkService<Message> where Message: Send + Sync + Clone + 'stat
|
|||||||
let host = Arc::new(Host::new(config));
|
let host = Arc::new(Host::new(config));
|
||||||
let stats = host.stats().clone();
|
let stats = host.stats().clone();
|
||||||
let host_info = host.client_version();
|
let host_info = host.client_version();
|
||||||
info!("NetworkService::start(): id={:?}", host.client_id());
|
info!("Host ID={:?}", host.client_id());
|
||||||
try!(io_service.register_handler(host));
|
try!(io_service.register_handler(host));
|
||||||
Ok(NetworkService {
|
Ok(NetworkService {
|
||||||
io_service: io_service,
|
io_service: io_service,
|
||||||
|
@ -4,10 +4,14 @@ use rlp::*;
|
|||||||
use network::connection::{EncryptedConnection, Packet};
|
use network::connection::{EncryptedConnection, Packet};
|
||||||
use network::handshake::Handshake;
|
use network::handshake::Handshake;
|
||||||
use error::*;
|
use error::*;
|
||||||
use io::{IoContext};
|
use io::{IoContext, StreamToken};
|
||||||
use network::error::{NetworkError, DisconnectReason};
|
use network::error::{NetworkError, DisconnectReason};
|
||||||
use network::host::*;
|
use network::host::*;
|
||||||
use network::node::NodeId;
|
use network::node::NodeId;
|
||||||
|
use time;
|
||||||
|
|
||||||
|
const PING_TIMEOUT_SEC: u64 = 30;
|
||||||
|
const PING_INTERVAL_SEC: u64 = 30;
|
||||||
|
|
||||||
/// Peer session over encrypted connection.
|
/// Peer session over encrypted connection.
|
||||||
/// When created waits for Hello packet exchange and signals ready state.
|
/// When created waits for Hello packet exchange and signals ready state.
|
||||||
@ -19,6 +23,8 @@ pub struct Session {
|
|||||||
connection: EncryptedConnection,
|
connection: EncryptedConnection,
|
||||||
/// Session ready flag. Set after successfull Hello packet exchange
|
/// Session ready flag. Set after successfull Hello packet exchange
|
||||||
had_hello: bool,
|
had_hello: bool,
|
||||||
|
ping_time_ns: u64,
|
||||||
|
pong_time_ns: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Structure used to report various session events.
|
/// Structure used to report various session events.
|
||||||
@ -47,6 +53,8 @@ pub struct SessionInfo {
|
|||||||
pub protocol_version: u32,
|
pub protocol_version: u32,
|
||||||
/// Peer protocol capabilities
|
/// Peer protocol capabilities
|
||||||
capabilities: Vec<SessionCapabilityInfo>,
|
capabilities: Vec<SessionCapabilityInfo>,
|
||||||
|
/// Peer ping delay in milliseconds
|
||||||
|
pub ping_ms: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
@ -95,10 +103,13 @@ impl Session {
|
|||||||
client_version: String::new(),
|
client_version: String::new(),
|
||||||
protocol_version: 0,
|
protocol_version: 0,
|
||||||
capabilities: Vec::new(),
|
capabilities: Vec::new(),
|
||||||
|
ping_ms: None,
|
||||||
},
|
},
|
||||||
|
ping_time_ns: 0,
|
||||||
|
pong_time_ns: None,
|
||||||
};
|
};
|
||||||
try!(session.write_hello(host));
|
try!(session.write_hello(host));
|
||||||
try!(session.write_ping());
|
try!(session.send_ping());
|
||||||
Ok(session)
|
Ok(session)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,7 +152,7 @@ impl Session {
|
|||||||
while protocol != self.info.capabilities[i].protocol {
|
while protocol != self.info.capabilities[i].protocol {
|
||||||
i += 1;
|
i += 1;
|
||||||
if i == self.info.capabilities.len() {
|
if i == self.info.capabilities.len() {
|
||||||
debug!(target: "net", "Unkown protocol: {:?}", protocol);
|
debug!(target: "net", "Unknown protocol: {:?}", protocol);
|
||||||
return Ok(())
|
return Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -152,6 +163,26 @@ impl Session {
|
|||||||
self.connection.send_packet(&rlp.out())
|
self.connection.send_packet(&rlp.out())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Keep this session alive. Returns false if ping timeout happened
|
||||||
|
pub fn keep_alive(&mut self) -> bool {
|
||||||
|
let timed_out = if let Some(pong) = self.pong_time_ns {
|
||||||
|
pong - self.ping_time_ns > PING_TIMEOUT_SEC * 1000_000_000
|
||||||
|
} else {
|
||||||
|
time::precise_time_ns() - self.ping_time_ns > PING_TIMEOUT_SEC * 1000_000_000
|
||||||
|
};
|
||||||
|
|
||||||
|
if !timed_out && time::precise_time_ns() - self.ping_time_ns > PING_INTERVAL_SEC * 1000_000_000 {
|
||||||
|
if let Err(e) = self.send_ping() {
|
||||||
|
debug!("Error sending ping message: {:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
!timed_out
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn token(&self) -> StreamToken {
|
||||||
|
self.connection.token()
|
||||||
|
}
|
||||||
|
|
||||||
fn read_packet(&mut self, packet: Packet, host: &HostInfo) -> Result<SessionData, UtilError> {
|
fn read_packet(&mut self, packet: Packet, host: &HostInfo) -> Result<SessionData, UtilError> {
|
||||||
if packet.data.len() < 2 {
|
if packet.data.len() < 2 {
|
||||||
return Err(From::from(NetworkError::BadProtocol));
|
return Err(From::from(NetworkError::BadProtocol));
|
||||||
@ -168,7 +199,12 @@ impl Session {
|
|||||||
},
|
},
|
||||||
PACKET_DISCONNECT => Err(From::from(NetworkError::Disconnect(DisconnectReason::DisconnectRequested))),
|
PACKET_DISCONNECT => Err(From::from(NetworkError::Disconnect(DisconnectReason::DisconnectRequested))),
|
||||||
PACKET_PING => {
|
PACKET_PING => {
|
||||||
try!(self.write_pong());
|
try!(self.send_pong());
|
||||||
|
Ok(SessionData::None)
|
||||||
|
},
|
||||||
|
PACKET_PONG => {
|
||||||
|
self.pong_time_ns = Some(time::precise_time_ns());
|
||||||
|
self.info.ping_ms = Some((self.pong_time_ns.unwrap() - self.ping_time_ns) / 1000_000);
|
||||||
Ok(SessionData::None)
|
Ok(SessionData::None)
|
||||||
},
|
},
|
||||||
PACKET_GET_PEERS => Ok(SessionData::None), //TODO;
|
PACKET_GET_PEERS => Ok(SessionData::None), //TODO;
|
||||||
@ -178,7 +214,7 @@ impl Session {
|
|||||||
while packet_id < self.info.capabilities[i].id_offset {
|
while packet_id < self.info.capabilities[i].id_offset {
|
||||||
i += 1;
|
i += 1;
|
||||||
if i == self.info.capabilities.len() {
|
if i == self.info.capabilities.len() {
|
||||||
debug!(target: "net", "Unkown packet: {:?}", packet_id);
|
debug!(target: "net", "Unknown packet: {:?}", packet_id);
|
||||||
return Ok(SessionData::None)
|
return Ok(SessionData::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -189,7 +225,7 @@ impl Session {
|
|||||||
Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
|
Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
debug!(target: "net", "Unkown packet: {:?}", packet_id);
|
debug!(target: "net", "Unknown packet: {:?}", packet_id);
|
||||||
Ok(SessionData::None)
|
Ok(SessionData::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -255,15 +291,20 @@ impl Session {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_ping(&mut self) -> Result<(), UtilError> {
|
/// Senf ping packet
|
||||||
self.send(try!(Session::prepare(PACKET_PING)))
|
pub fn send_ping(&mut self) -> Result<(), UtilError> {
|
||||||
|
try!(self.send(try!(Session::prepare(PACKET_PING))));
|
||||||
|
self.ping_time_ns = time::precise_time_ns();
|
||||||
|
self.pong_time_ns = None;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_pong(&mut self) -> Result<(), UtilError> {
|
fn send_pong(&mut self) -> Result<(), UtilError> {
|
||||||
self.send(try!(Session::prepare(PACKET_PONG)))
|
self.send(try!(Session::prepare(PACKET_PONG)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn disconnect(&mut self, reason: DisconnectReason) -> NetworkError {
|
/// Disconnect this session
|
||||||
|
pub fn disconnect(&mut self, reason: DisconnectReason) -> NetworkError {
|
||||||
let mut rlp = RlpStream::new();
|
let mut rlp = RlpStream::new();
|
||||||
rlp.append(&(PACKET_DISCONNECT as u32));
|
rlp.append(&(PACKET_DISCONNECT as u32));
|
||||||
rlp.begin_list(1);
|
rlp.begin_list(1);
|
||||||
|
@ -34,7 +34,7 @@ pub struct NibbleSlice<'a> {
|
|||||||
offset_encode_suffix: usize,
|
offset_encode_suffix: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Iterator type for a nibble slice.
|
||||||
pub struct NibbleSliceIterator<'a> {
|
pub struct NibbleSliceIterator<'a> {
|
||||||
p: &'a NibbleSlice<'a>,
|
p: &'a NibbleSlice<'a>,
|
||||||
i: usize,
|
i: usize,
|
||||||
@ -77,7 +77,7 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
(r, a.len() + b.len())
|
(r, a.len() + b.len())
|
||||||
}*/
|
}*/
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get an iterator for the series of nibbles.
|
||||||
pub fn iter(&'a self) -> NibbleSliceIterator<'a> {
|
pub fn iter(&'a self) -> NibbleSliceIterator<'a> {
|
||||||
NibbleSliceIterator { p: self, i: 0 }
|
NibbleSliceIterator { p: self, i: 0 }
|
||||||
}
|
}
|
||||||
@ -132,7 +132,7 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
i
|
i
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`.
|
||||||
pub fn encoded(&self, is_leaf: bool) -> Bytes {
|
pub fn encoded(&self, is_leaf: bool) -> Bytes {
|
||||||
let l = self.len();
|
let l = self.len();
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = Bytes::with_capacity(l / 2 + 1);
|
||||||
@ -145,7 +145,8 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Encode only the leftmost `n` bytes of the nibble slice in prefixed hex notation,
|
||||||
|
/// noting whether it `is_leaf`.
|
||||||
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> Bytes {
|
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> Bytes {
|
||||||
let l = min(self.len(), n);
|
let l = min(self.len(), n);
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = Bytes::with_capacity(l / 2 + 1);
|
||||||
|
@ -140,9 +140,9 @@ impl <T>ToBytes for T where T: FixedHash {
|
|||||||
/// Error returned when FromBytes conversation goes wrong
|
/// Error returned when FromBytes conversation goes wrong
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum FromBytesError {
|
pub enum FromBytesError {
|
||||||
/// TODO [debris] Please document me
|
/// Expected more RLP data
|
||||||
DataIsTooShort,
|
DataIsTooShort,
|
||||||
/// TODO [debris] Please document me
|
/// Extra bytes after the end of the last item
|
||||||
DataIsTooLong,
|
DataIsTooLong,
|
||||||
/// Integer-representation is non-canonically prefixed with zero byte(s).
|
/// Integer-representation is non-canonically prefixed with zero byte(s).
|
||||||
ZeroPrefixedInt,
|
ZeroPrefixedInt,
|
||||||
@ -165,7 +165,7 @@ pub type FromBytesResult<T> = Result<T, FromBytesError>;
|
|||||||
///
|
///
|
||||||
/// TODO: check size of bytes before conversation and return appropriate error
|
/// TODO: check size of bytes before conversation and return appropriate error
|
||||||
pub trait FromBytes: Sized {
|
pub trait FromBytes: Sized {
|
||||||
/// TODO [debris] Please document me
|
/// Create a value from bytes
|
||||||
fn from_bytes(bytes: &[u8]) -> FromBytesResult<Self>;
|
fn from_bytes(bytes: &[u8]) -> FromBytesResult<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -236,7 +236,7 @@ impl_uint_from_bytes!(U128);
|
|||||||
|
|
||||||
impl <T>FromBytes for T where T: FixedHash {
|
impl <T>FromBytes for T where T: FixedHash {
|
||||||
fn from_bytes(bytes: &[u8]) -> FromBytesResult<T> {
|
fn from_bytes(bytes: &[u8]) -> FromBytesResult<T> {
|
||||||
match bytes.len().cmp(&T::size()) {
|
match bytes.len().cmp(&T::len()) {
|
||||||
Ordering::Less => return Err(FromBytesError::DataIsTooShort),
|
Ordering::Less => return Err(FromBytesError::DataIsTooShort),
|
||||||
Ordering::Greater => return Err(FromBytesError::DataIsTooLong),
|
Ordering::Greater => return Err(FromBytesError::DataIsTooLong),
|
||||||
Ordering::Equal => ()
|
Ordering::Equal => ()
|
||||||
@ -246,7 +246,7 @@ impl <T>FromBytes for T where T: FixedHash {
|
|||||||
use std::{mem, ptr};
|
use std::{mem, ptr};
|
||||||
|
|
||||||
let mut res: T = mem::uninitialized();
|
let mut res: T = mem::uninitialized();
|
||||||
ptr::copy(bytes.as_ptr(), res.as_slice_mut().as_mut_ptr(), T::size());
|
ptr::copy(bytes.as_ptr(), res.as_slice_mut().as_mut_ptr(), T::len());
|
||||||
|
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
@ -48,13 +48,13 @@ pub use self::rlpstream::{RlpStream};
|
|||||||
pub use elastic_array::ElasticArray1024;
|
pub use elastic_array::ElasticArray1024;
|
||||||
use super::hash::H256;
|
use super::hash::H256;
|
||||||
|
|
||||||
/// TODO [arkpar] Please document me
|
/// The RLP encoded empty data (used to mean "null value").
|
||||||
pub const NULL_RLP: [u8; 1] = [0x80; 1];
|
pub const NULL_RLP: [u8; 1] = [0x80; 1];
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The RLP encoded empty list.
|
||||||
pub const EMPTY_LIST_RLP: [u8; 1] = [0xC0; 1];
|
pub const EMPTY_LIST_RLP: [u8; 1] = [0xC0; 1];
|
||||||
/// TODO [arkpar] Please document me
|
/// The SHA3 of the RLP encoding of empty data.
|
||||||
pub const SHA3_NULL_RLP: H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] );
|
pub const SHA3_NULL_RLP: H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] );
|
||||||
/// TODO [debris] Please document me
|
/// The SHA3 of the RLP encoding of empty list.
|
||||||
pub const SHA3_EMPTY_LIST_RLP: H256 = H256( [0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47] );
|
pub const SHA3_EMPTY_LIST_RLP: H256 = H256( [0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47] );
|
||||||
|
|
||||||
/// Shortcut function to decode trusted rlp
|
/// Shortcut function to decode trusted rlp
|
||||||
|
@ -3,27 +3,27 @@ use std::error::Error as StdError;
|
|||||||
use rlp::bytes::FromBytesError;
|
use rlp::bytes::FromBytesError;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
/// TODO [debris] Please document me
|
/// Error concerning the RLP decoder.
|
||||||
pub enum DecoderError {
|
pub enum DecoderError {
|
||||||
/// TODO [debris] Please document me
|
/// Couldn't convert given bytes to an instance of required type.
|
||||||
FromBytesError(FromBytesError),
|
FromBytesError(FromBytesError),
|
||||||
/// Given data has additional bytes at the end of the valid RLP fragment.
|
/// Data has additional bytes at the end of the valid RLP fragment.
|
||||||
RlpIsTooBig,
|
RlpIsTooBig,
|
||||||
/// TODO [debris] Please document me
|
/// Data has too few bytes for valid RLP.
|
||||||
RlpIsTooShort,
|
RlpIsTooShort,
|
||||||
/// TODO [debris] Please document me
|
/// Expect an encoded list, RLP was something else.
|
||||||
RlpExpectedToBeList,
|
RlpExpectedToBeList,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Expect encoded data, RLP was something else.
|
||||||
RlpExpectedToBeData,
|
RlpExpectedToBeData,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Expected a different size list.
|
||||||
RlpIncorrectListLen,
|
RlpIncorrectListLen,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Data length number has a prefixed zero byte, invalid for numbers.
|
||||||
RlpDataLenWithZeroPrefix,
|
RlpDataLenWithZeroPrefix,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// List length number has a prefixed zero byte, invalid for numbers.
|
||||||
RlpListLenWithZeroPrefix,
|
RlpListLenWithZeroPrefix,
|
||||||
/// TODO [debris] Please document me
|
/// Non-canonical (longer than necessary) representation used for data or list.
|
||||||
RlpInvalidIndirection,
|
RlpInvalidIndirection,
|
||||||
/// Returned when declared length is inconsistent with data specified after
|
/// Declared length is inconsistent with data specified after.
|
||||||
RlpInconsistentLengthAndData
|
RlpInconsistentLengthAndData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,12 +103,12 @@ impl <'a, 'view> Rlp<'a> where 'a: 'view {
|
|||||||
res.unwrap_or_else(|_| panic!())
|
res.unwrap_or_else(|_| panic!())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Decode into an object
|
||||||
pub fn as_val<T>(&self) -> T where T: RlpDecodable {
|
pub fn as_val<T>(&self) -> T where T: RlpDecodable {
|
||||||
Self::view_as_val(self)
|
Self::view_as_val(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Decode list item at given index into an object
|
||||||
pub fn val_at<T>(&self, index: usize) -> T where T: RlpDecodable {
|
pub fn val_at<T>(&self, index: usize) -> T where T: RlpDecodable {
|
||||||
Self::view_as_val(&self.at(index))
|
Self::view_as_val(&self.at(index))
|
||||||
}
|
}
|
||||||
|
@ -7,15 +7,15 @@ use elastic_array::ElasticArray1024;
|
|||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::*;
|
use sha3::*;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Type is able to decode RLP.
|
||||||
pub trait Decoder: Sized {
|
pub trait Decoder: Sized {
|
||||||
/// TODO [debris] Please document me
|
/// Read a value from the RLP into a given type.
|
||||||
fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError>
|
fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError>
|
||||||
where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
|
where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get underlying `UntrustedRLP` object.
|
||||||
fn as_rlp(&self) -> &UntrustedRlp;
|
fn as_rlp(&self) -> &UntrustedRlp;
|
||||||
/// TODO [debris] Please document me
|
/// Get underlying raw bytes slice.
|
||||||
fn as_raw(&self) -> &[u8];
|
fn as_raw(&self) -> &[u8];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,17 +31,17 @@ pub trait RlpDecodable: Sized {
|
|||||||
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
|
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// A view into RLP encoded data
|
||||||
pub trait View<'a, 'view>: Sized {
|
pub trait View<'a, 'view>: Sized {
|
||||||
/// TODO [debris] Please document me
|
/// RLP prototype type
|
||||||
type Prototype;
|
type Prototype;
|
||||||
/// TODO [debris] Please document me
|
/// Payload info type
|
||||||
type PayloadInfo;
|
type PayloadInfo;
|
||||||
/// TODO [debris] Please document me
|
/// Data type
|
||||||
type Data;
|
type Data;
|
||||||
/// TODO [debris] Please document me
|
/// Item type
|
||||||
type Item;
|
type Item;
|
||||||
/// TODO [debris] Please document me
|
/// Iterator type
|
||||||
type Iter;
|
type Iter;
|
||||||
|
|
||||||
/// Creates a new instance of `Rlp` reader
|
/// Creates a new instance of `Rlp` reader
|
||||||
@ -65,10 +65,10 @@ pub trait View<'a, 'view>: Sized {
|
|||||||
/// Get the prototype of the RLP.
|
/// Get the prototype of the RLP.
|
||||||
fn prototype(&self) -> Self::Prototype;
|
fn prototype(&self) -> Self::Prototype;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Get payload info.
|
||||||
fn payload_info(&self) -> Self::PayloadInfo;
|
fn payload_info(&self) -> Self::PayloadInfo;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Get underlieing data.
|
||||||
fn data(&'view self) -> Self::Data;
|
fn data(&'view self) -> Self::Data;
|
||||||
|
|
||||||
/// Returns number of RLP items.
|
/// Returns number of RLP items.
|
||||||
@ -205,18 +205,18 @@ pub trait View<'a, 'view>: Sized {
|
|||||||
/// ```
|
/// ```
|
||||||
fn iter(&'view self) -> Self::Iter;
|
fn iter(&'view self) -> Self::Iter;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Decode data into an object
|
||||||
fn as_val<T>(&self) -> Result<T, DecoderError> where T: RlpDecodable;
|
fn as_val<T>(&self) -> Result<T, DecoderError> where T: RlpDecodable;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Decode data at given list index into an object
|
||||||
fn val_at<T>(&self, index: usize) -> Result<T, DecoderError> where T: RlpDecodable;
|
fn val_at<T>(&self, index: usize) -> Result<T, DecoderError> where T: RlpDecodable;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Raw RLP encoder
|
||||||
pub trait Encoder {
|
pub trait Encoder {
|
||||||
/// TODO [debris] Please document me
|
/// Write a value represented as bytes
|
||||||
fn emit_value<E: ByteEncodable>(&mut self, value: &E);
|
fn emit_value<E: ByteEncodable>(&mut self, value: &E);
|
||||||
/// TODO [debris] Please document me
|
/// Write raw preencoded data to the output
|
||||||
fn emit_raw(&mut self, bytes: &[u8]) -> ();
|
fn emit_raw(&mut self, bytes: &[u8]) -> ();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -250,7 +250,7 @@ pub trait RlpEncodable {
|
|||||||
fn rlp_append(&self, s: &mut RlpStream);
|
fn rlp_append(&self, s: &mut RlpStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// RLP encoding stream
|
||||||
pub trait Stream: Sized {
|
pub trait Stream: Sized {
|
||||||
|
|
||||||
/// Initializes instance of empty `Stream`.
|
/// Initializes instance of empty `Stream`.
|
||||||
@ -341,7 +341,7 @@ pub trait Stream: Sized {
|
|||||||
/// }
|
/// }
|
||||||
fn is_finished(&self) -> bool;
|
fn is_finished(&self) -> bool;
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Get raw encoded bytes
|
||||||
fn as_raw(&self) -> &[u8];
|
fn as_raw(&self) -> &[u8];
|
||||||
|
|
||||||
/// Streams out encoded bytes.
|
/// Streams out encoded bytes.
|
||||||
|
@ -21,21 +21,21 @@ impl OffsetCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// TODO [debris] Please document me
|
/// RLP prototype
|
||||||
pub enum Prototype {
|
pub enum Prototype {
|
||||||
/// TODO [debris] Please document me
|
/// Empty
|
||||||
Null,
|
Null,
|
||||||
/// TODO [debris] Please document me
|
/// Value
|
||||||
Data(usize),
|
Data(usize),
|
||||||
/// TODO [debris] Please document me
|
/// List
|
||||||
List(usize),
|
List(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores basic information about item
|
/// Stores basic information about item
|
||||||
pub struct PayloadInfo {
|
pub struct PayloadInfo {
|
||||||
/// TODO [debris] Please document me
|
/// Header length in bytes
|
||||||
pub header_len: usize,
|
pub header_len: usize,
|
||||||
/// TODO [debris] Please document me
|
/// Value length in bytes
|
||||||
pub value_len: usize,
|
pub value_len: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ use bytes::{BytesConvertable, Populatable};
|
|||||||
use hash::{H256, FixedHash};
|
use hash::{H256, FixedHash};
|
||||||
use self::sha3_ext::*;
|
use self::sha3_ext::*;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Get the SHA3 (i.e. Keccak) hash of the empty bytes string.
|
||||||
pub const SHA3_EMPTY: H256 = H256( [0xc5, 0xd2, 0x46, 0x01, 0x86, 0xf7, 0x23, 0x3c, 0x92, 0x7e, 0x7d, 0xb2, 0xdc, 0xc7, 0x03, 0xc0, 0xe5, 0x00, 0xb6, 0x53, 0xca, 0x82, 0x27, 0x3b, 0x7b, 0xfa, 0xd8, 0x04, 0x5d, 0x85, 0xa4, 0x70] );
|
pub const SHA3_EMPTY: H256 = H256( [0xc5, 0xd2, 0x46, 0x01, 0x86, 0xf7, 0x23, 0x3c, 0x92, 0x7e, 0x7d, 0xb2, 0xdc, 0xc7, 0x03, 0xc0, 0xe5, 0x00, 0xb6, 0x53, 0xca, 0x82, 0x27, 0x3b, 0x7b, 0xfa, 0xd8, 0x04, 0x5d, 0x85, 0xa4, 0x70] );
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ use heapsize::HeapSizeOf;
|
|||||||
|
|
||||||
/// Should be used to squeeze collections to certain size in bytes
|
/// Should be used to squeeze collections to certain size in bytes
|
||||||
pub trait Squeeze {
|
pub trait Squeeze {
|
||||||
/// TODO [debris] Please document me
|
/// Try to reduce collection size to `size` bytes
|
||||||
fn squeeze(&mut self, size: usize);
|
fn squeeze(&mut self, size: usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,19 +1,20 @@
|
|||||||
//! Trie interface and implementation.
|
//! Trie interface and implementation.
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the trietraits module.
|
||||||
pub mod trietraits;
|
pub mod trietraits;
|
||||||
|
/// Export the standardmap module.
|
||||||
pub mod standardmap;
|
pub mod standardmap;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the journal module.
|
||||||
pub mod journal;
|
pub mod journal;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the node module.
|
||||||
pub mod node;
|
pub mod node;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the triedb module.
|
||||||
pub mod triedb;
|
pub mod triedb;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the triedbmut module.
|
||||||
pub mod triedbmut;
|
pub mod triedbmut;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the sectriedb module.
|
||||||
pub mod sectriedb;
|
pub mod sectriedb;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Export the sectriedbmut module.
|
||||||
pub mod sectriedbmut;
|
pub mod sectriedbmut;
|
||||||
|
|
||||||
pub use self::trietraits::*;
|
pub use self::trietraits::*;
|
||||||
|
@ -7,13 +7,13 @@ use super::journal::*;
|
|||||||
/// Type of node in the trie and essential information thereof.
|
/// Type of node in the trie and essential information thereof.
|
||||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||||
pub enum Node<'a> {
|
pub enum Node<'a> {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Null trie node; could be an empty root or an empty branch entry.
|
||||||
Empty,
|
Empty,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Leaf node; has key slice and value. Value may not be empty.
|
||||||
Leaf(NibbleSlice<'a>, &'a[u8]),
|
Leaf(NibbleSlice<'a>, &'a[u8]),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Extension node; has key slice and node data. Data may not be null.
|
||||||
Extension(NibbleSlice<'a>, &'a[u8]),
|
Extension(NibbleSlice<'a>, &'a[u8]),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
|
||||||
Branch([&'a[u8]; 16], Option<&'a [u8]>)
|
Branch([&'a[u8]; 16], Option<&'a [u8]>)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,13 +7,13 @@ use hash::*;
|
|||||||
|
|
||||||
/// Alphabet to use when creating words for insertion into tries.
|
/// Alphabet to use when creating words for insertion into tries.
|
||||||
pub enum Alphabet {
|
pub enum Alphabet {
|
||||||
/// TODO [Gav Wood] Please document me
|
/// All values are allowed in each bytes of the key.
|
||||||
All,
|
All,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Only a 6 values ('a' - 'f') are chosen to compose the key.
|
||||||
Low,
|
Low,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Quite a few values (around 32) are chosen to compose the key.
|
||||||
Mid,
|
Mid,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// A set of bytes given is used to compose the key.
|
||||||
Custom(Bytes),
|
Custom(Bytes),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ use super::node::*;
|
|||||||
pub struct TrieDB<'db> {
|
pub struct TrieDB<'db> {
|
||||||
db: &'db HashDB,
|
db: &'db HashDB,
|
||||||
root: &'db H256,
|
root: &'db H256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The number of hashes performed so far in operations on this trie.
|
||||||
pub hash_count: usize,
|
pub hash_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ use super::trietraits::*;
|
|||||||
pub struct TrieDBMut<'db> {
|
pub struct TrieDBMut<'db> {
|
||||||
db: &'db mut HashDB,
|
db: &'db mut HashDB,
|
||||||
root: &'db mut H256,
|
root: &'db mut H256,
|
||||||
/// TODO [Gav Wood] Please document me
|
/// The number of hashes performed so far in operations on this trie.
|
||||||
pub hash_count: usize,
|
pub hash_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ impl<'db> TrieDBMut<'db> {
|
|||||||
// TODO: return Result<Self, TrieError>
|
// TODO: return Result<Self, TrieError>
|
||||||
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> Self {
|
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> Self {
|
||||||
if !db.exists(root) {
|
if !db.exists(root) {
|
||||||
flush(format!("Trie root not found {}", root));
|
flushln!("Trie root not found {}", root);
|
||||||
panic!("Trie root not found!");
|
panic!("Trie root not found!");
|
||||||
}
|
}
|
||||||
TrieDBMut {
|
TrieDBMut {
|
||||||
|
@ -60,20 +60,20 @@ macro_rules! panic_on_overflow {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Large, fixed-length unsigned integer type.
|
||||||
pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug + fmt::Display + PartialOrd + Ord + PartialEq + Eq + Hash {
|
pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug + fmt::Display + PartialOrd + Ord + PartialEq + Eq + Hash {
|
||||||
|
|
||||||
/// Size of this type.
|
/// Size of this type.
|
||||||
const SIZE: usize;
|
const SIZE: usize;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Returns new instance equalling zero.
|
||||||
fn zero() -> Self;
|
fn zero() -> Self;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Returns new instance equalling one.
|
||||||
fn one() -> Self;
|
fn one() -> Self;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Error type for converting from a decimal string.
|
||||||
type FromDecStrErr;
|
type FromDecStrErr;
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Convert from a decimal string.
|
||||||
fn from_dec_str(value: &str) -> Result<Self, Self::FromDecStrErr>;
|
fn from_dec_str(value: &str) -> Result<Self, Self::FromDecStrErr>;
|
||||||
|
|
||||||
/// Conversion to u32
|
/// Conversion to u32
|
||||||
@ -104,26 +104,25 @@ pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug +
|
|||||||
/// Return wrapped eponentation `self**other` and flag if there was an overflow
|
/// Return wrapped eponentation `self**other` and flag if there was an overflow
|
||||||
fn overflowing_pow(self, other: Self) -> (Self, bool);
|
fn overflowing_pow(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// Add this `Uint` to other returning result and possible overflow
|
||||||
/// TODO [debris] Please document me
|
|
||||||
fn overflowing_add(self, other: Self) -> (Self, bool);
|
fn overflowing_add(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Subtract another `Uint` from this returning result and possible overflow
|
||||||
fn overflowing_sub(self, other: Self) -> (Self, bool);
|
fn overflowing_sub(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Multiple this `Uint` with other returning result and possible overflow
|
||||||
fn overflowing_mul(self, other: Self) -> (Self, bool);
|
fn overflowing_mul(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Divide this `Uint` by other returning result and possible overflow
|
||||||
fn overflowing_div(self, other: Self) -> (Self, bool);
|
fn overflowing_div(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Returns reminder of division of this `Uint` by other and possible overflow
|
||||||
fn overflowing_rem(self, other: Self) -> (Self, bool);
|
fn overflowing_rem(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [debris] Please document me
|
/// Returns negation of this `Uint` and overflow (always true)
|
||||||
fn overflowing_neg(self) -> (Self, bool);
|
fn overflowing_neg(self) -> (Self, bool);
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Shifts this `Uint` and returns overflow
|
||||||
fn overflowing_shl(self, shift: u32) -> (Self, bool);
|
fn overflowing_shl(self, shift: u32) -> (Self, bool);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -939,12 +938,10 @@ impl From<U256> for u32 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Constant value of `U256::zero()` that can be used for a reference saving an additional instance creation.
|
||||||
pub const ZERO_U256: U256 = U256([0x00u64; 4]);
|
pub const ZERO_U256: U256 = U256([0x00u64; 4]);
|
||||||
/// TODO [Gav Wood] Please document me
|
/// Constant value of `U256::one()` that can be used for a reference saving an additional instance creation.
|
||||||
pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]);
|
pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]);
|
||||||
/// TODO [Gav Wood] Please document me
|
|
||||||
pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]);
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
pub trait SharedPrefix <T> {
|
pub trait SharedPrefix <T> {
|
||||||
/// TODO [debris] Please document me
|
/// Get common prefix length
|
||||||
fn shared_prefix_len(&self, elem: &[T]) -> usize;
|
fn shared_prefix_len(&self, elem: &[T]) -> usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user