Merge branch 'master' of github.com:ethcore/parity into ethash
This commit is contained in:
commit
0f97edad7c
16
.travis.yml
Normal file
16
.travis.yml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
language: rust
|
||||||
|
|
||||||
|
rust:
|
||||||
|
- nightly
|
||||||
|
|
||||||
|
os:
|
||||||
|
- osx
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- brew update
|
||||||
|
- brew install rocksdb
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $TRAVIS_BUILD_DIR/target
|
||||||
|
- $HOME/.cargo
|
@ -24,3 +24,7 @@ num_cpus = "0.2"
|
|||||||
[features]
|
[features]
|
||||||
jit = ["evmjit"]
|
jit = ["evmjit"]
|
||||||
evm_debug = []
|
evm_debug = []
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "client"
|
||||||
|
path = "src/bin/client/main.rs"
|
||||||
|
21
cov.sh
Executable file
21
cov.sh
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# Installing KCOV under ubuntu
|
||||||
|
# https://users.rust-lang.org/t/tutorial-how-to-collect-test-coverages-for-rust-project/650#
|
||||||
|
### Install deps
|
||||||
|
# $ sudo apt-get install libcurl4-openssl-dev libelf-dev libdw-dev cmake gcc binutils-dev libiberty-dev
|
||||||
|
#
|
||||||
|
### Compile kcov
|
||||||
|
# $ wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz && tar xf master.tar.gz
|
||||||
|
# $ cd kcov-master && mkdir build && cd build
|
||||||
|
# $ cmake .. && make && sudo make install
|
||||||
|
|
||||||
|
### Running coverage
|
||||||
|
if ! type kcov > /dev/null; then
|
||||||
|
echo "Install kcov first (details inside this file). Aborting."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cargo test --no-run || exit $?
|
||||||
|
mkdir -p target/coverage
|
||||||
|
kcov --exclude-pattern ~/.multirust --include-pattern src --verify target/coverage target/debug/ethcore*
|
||||||
|
xdg-open target/coverage/index.html
|
@ -236,7 +236,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn storage_at() {
|
fn storage_at() {
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = MemoryDB::new();
|
||||||
let rlp = {
|
let rlp = {
|
||||||
let mut a = Account::new_contract(U256::from(69u8));
|
let mut a = Account::new_contract(U256::from(69u8));
|
||||||
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
|
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
|
||||||
@ -254,7 +254,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn note_code() {
|
fn note_code() {
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = MemoryDB::new();
|
||||||
|
|
||||||
let rlp = {
|
let rlp = {
|
||||||
let mut a = Account::new_contract(U256::from(69u8));
|
let mut a = Account::new_contract(U256::from(69u8));
|
||||||
@ -273,7 +273,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn commit_storage() {
|
fn commit_storage() {
|
||||||
let mut a = Account::new_contract(U256::from(69u8));
|
let mut a = Account::new_contract(U256::from(69u8));
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = MemoryDB::new();
|
||||||
a.set_storage(x!(0), x!(0x1234));
|
a.set_storage(x!(0), x!(0x1234));
|
||||||
assert_eq!(a.storage_root(), None);
|
assert_eq!(a.storage_root(), None);
|
||||||
a.commit_storage(&mut db);
|
a.commit_storage(&mut db);
|
||||||
@ -283,7 +283,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn commit_remove_commit_storage() {
|
fn commit_remove_commit_storage() {
|
||||||
let mut a = Account::new_contract(U256::from(69u8));
|
let mut a = Account::new_contract(U256::from(69u8));
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = MemoryDB::new();
|
||||||
a.set_storage(x!(0), x!(0x1234));
|
a.set_storage(x!(0), x!(0x1234));
|
||||||
a.commit_storage(&mut db);
|
a.commit_storage(&mut db);
|
||||||
a.set_storage(x!(1), x!(0x1234));
|
a.set_storage(x!(1), x!(0x1234));
|
||||||
@ -296,7 +296,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn commit_code() {
|
fn commit_code() {
|
||||||
let mut a = Account::new_contract(U256::from(69u8));
|
let mut a = Account::new_contract(U256::from(69u8));
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = MemoryDB::new();
|
||||||
a.init_code(vec![0x55, 0x44, 0xffu8]);
|
a.init_code(vec![0x55, 0x44, 0xffu8]);
|
||||||
assert_eq!(a.code_hash(), SHA3_EMPTY);
|
assert_eq!(a.code_hash(), SHA3_EMPTY);
|
||||||
a.commit_code(&mut db);
|
a.commit_code(&mut db);
|
||||||
|
@ -5,8 +5,11 @@ use pod_account::*;
|
|||||||
/// Change in existance type.
|
/// Change in existance type.
|
||||||
// TODO: include other types of change.
|
// TODO: include other types of change.
|
||||||
pub enum Existance {
|
pub enum Existance {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Born,
|
Born,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Alive,
|
Alive,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Died,
|
Died,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -22,14 +25,20 @@ impl fmt::Display for Existance {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct AccountDiff {
|
pub struct AccountDiff {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub balance: Diff<U256>, // Allowed to be Same
|
pub balance: Diff<U256>, // Allowed to be Same
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub nonce: Diff<U256>, // Allowed to be Same
|
pub nonce: Diff<U256>, // Allowed to be Same
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub code: Diff<Bytes>, // Allowed to be Same
|
pub code: Diff<Bytes>, // Allowed to be Same
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub storage: BTreeMap<H256, Diff<H256>>,// Not allowed to be Same
|
pub storage: BTreeMap<H256, Diff<H256>>,// Not allowed to be Same
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AccountDiff {
|
impl AccountDiff {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn existance(&self) -> Existance {
|
pub fn existance(&self) -> Existance {
|
||||||
match self.balance {
|
match self.balance {
|
||||||
Diff::Born(_) => Existance::Born,
|
Diff::Born(_) => Existance::Born,
|
||||||
@ -38,6 +47,7 @@ impl AccountDiff {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<AccountDiff> {
|
pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<AccountDiff> {
|
||||||
match (pre, post) {
|
match (pre, post) {
|
||||||
(None, Some(x)) => Some(AccountDiff {
|
(None, Some(x)) => Some(AccountDiff {
|
||||||
|
@ -30,6 +30,7 @@ pub struct ActionParams {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ActionParams {
|
impl ActionParams {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn new() -> ActionParams {
|
pub fn new() -> ActionParams {
|
||||||
ActionParams {
|
ActionParams {
|
||||||
code_address: Address::new(),
|
code_address: Address::new(),
|
||||||
|
@ -6,7 +6,10 @@ pub type LogBloom = H2048;
|
|||||||
/// Constant 2048-bit datum for 0. Often used as a default.
|
/// Constant 2048-bit datum for 0. Often used as a default.
|
||||||
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub enum Seal {
|
pub enum Seal {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
With,
|
With,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Without,
|
Without,
|
||||||
}
|
}
|
||||||
|
@ -1,59 +0,0 @@
|
|||||||
extern crate ethcore_util as util;
|
|
||||||
extern crate ethcore;
|
|
||||||
extern crate rustc_serialize;
|
|
||||||
extern crate log;
|
|
||||||
extern crate env_logger;
|
|
||||||
|
|
||||||
use std::io::stdin;
|
|
||||||
use std::env;
|
|
||||||
use log::{LogLevelFilter};
|
|
||||||
use env_logger::LogBuilder;
|
|
||||||
use util::*;
|
|
||||||
use ethcore::client::*;
|
|
||||||
use ethcore::service::ClientService;
|
|
||||||
use ethcore::ethereum;
|
|
||||||
use ethcore::sync::*;
|
|
||||||
|
|
||||||
fn setup_log() {
|
|
||||||
let mut builder = LogBuilder::new();
|
|
||||||
builder.filter(None, LogLevelFilter::Info);
|
|
||||||
|
|
||||||
if env::var("RUST_LOG").is_ok() {
|
|
||||||
builder.parse(&env::var("RUST_LOG").unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
builder.init().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
setup_log();
|
|
||||||
let spec = ethereum::new_frontier();
|
|
||||||
let mut service = ClientService::start(spec).unwrap();
|
|
||||||
let io_handler = Arc::new(ClientIoHandler { client: service.client() });
|
|
||||||
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
|
||||||
loop {
|
|
||||||
let mut cmd = String::new();
|
|
||||||
stdin().read_line(&mut cmd).unwrap();
|
|
||||||
if cmd == "quit\n" || cmd == "exit\n" || cmd == "q\n" {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
struct ClientIoHandler {
|
|
||||||
client: Arc<RwLock<Client>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
|
||||||
fn initialize(&self, io: &IoContext<NetSyncMessage>) {
|
|
||||||
io.register_timer(0, 5000).expect("Error registering timer");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn timeout(&self, _io: &IoContext<NetSyncMessage>, timer: TimerToken) {
|
|
||||||
if timer == 0 {
|
|
||||||
println!("Chain info: {:?}", self.client.read().unwrap().deref().chain_info());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
109
src/bin/client/main.rs
Normal file
109
src/bin/client/main.rs
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
extern crate ethcore_util as util;
|
||||||
|
extern crate ethcore;
|
||||||
|
extern crate rustc_serialize;
|
||||||
|
extern crate log;
|
||||||
|
extern crate env_logger;
|
||||||
|
|
||||||
|
use std::io::stdin;
|
||||||
|
use std::env;
|
||||||
|
use log::{LogLevelFilter};
|
||||||
|
use env_logger::LogBuilder;
|
||||||
|
use util::*;
|
||||||
|
use ethcore::client::*;
|
||||||
|
use ethcore::service::ClientService;
|
||||||
|
use ethcore::ethereum;
|
||||||
|
use ethcore::blockchain::CacheSize;
|
||||||
|
use ethcore::sync::*;
|
||||||
|
|
||||||
|
fn setup_log() {
|
||||||
|
let mut builder = LogBuilder::new();
|
||||||
|
builder.filter(None, LogLevelFilter::Info);
|
||||||
|
|
||||||
|
if env::var("RUST_LOG").is_ok() {
|
||||||
|
builder.parse(&env::var("RUST_LOG").unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.init().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
setup_log();
|
||||||
|
let spec = ethereum::new_frontier();
|
||||||
|
let mut service = ClientService::start(spec).unwrap();
|
||||||
|
let io_handler = Arc::new(ClientIoHandler { client: service.client(), info: Default::default() });
|
||||||
|
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
||||||
|
loop {
|
||||||
|
let mut cmd = String::new();
|
||||||
|
stdin().read_line(&mut cmd).unwrap();
|
||||||
|
if cmd == "quit\n" || cmd == "exit\n" || cmd == "q\n" {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Informant {
|
||||||
|
chain_info: RwLock<Option<BlockChainInfo>>,
|
||||||
|
cache_info: RwLock<Option<CacheSize>>,
|
||||||
|
report: RwLock<Option<ClientReport>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Informant {
|
||||||
|
fn default() -> Self {
|
||||||
|
Informant {
|
||||||
|
chain_info: RwLock::new(None),
|
||||||
|
cache_info: RwLock::new(None),
|
||||||
|
report: RwLock::new(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Informant {
|
||||||
|
pub fn tick(&self, client: &Client) {
|
||||||
|
// 5 seconds betwen calls. TODO: calculate this properly.
|
||||||
|
let dur = 5usize;
|
||||||
|
|
||||||
|
let chain_info = client.chain_info();
|
||||||
|
let cache_info = client.cache_info();
|
||||||
|
let report = client.report();
|
||||||
|
|
||||||
|
if let (_, &Some(ref last_cache_info), &Some(ref last_report)) = (self.chain_info.read().unwrap().deref(), self.cache_info.read().unwrap().deref(), self.report.read().unwrap().deref()) {
|
||||||
|
println!("[ {} {} ]---[ {} blk/s | {} tx/s | {} gas/s //···{}···// {} ({}) bl {} ({}) ex ]",
|
||||||
|
chain_info.best_block_number,
|
||||||
|
chain_info.best_block_hash,
|
||||||
|
(report.blocks_imported - last_report.blocks_imported) / dur,
|
||||||
|
(report.transactions_applied - last_report.transactions_applied) / dur,
|
||||||
|
(report.gas_processed - last_report.gas_processed) / From::from(dur),
|
||||||
|
0, // TODO: peers
|
||||||
|
cache_info.blocks,
|
||||||
|
cache_info.blocks as isize - last_cache_info.blocks as isize,
|
||||||
|
cache_info.block_details,
|
||||||
|
cache_info.block_details as isize - last_cache_info.block_details as isize
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
*self.chain_info.write().unwrap().deref_mut() = Some(chain_info);
|
||||||
|
*self.cache_info.write().unwrap().deref_mut() = Some(cache_info);
|
||||||
|
*self.report.write().unwrap().deref_mut() = Some(report);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const INFO_TIMER: TimerToken = 0;
|
||||||
|
|
||||||
|
struct ClientIoHandler {
|
||||||
|
client: Arc<RwLock<Client>>,
|
||||||
|
info: Informant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
||||||
|
fn initialize(&self, io: &IoContext<NetSyncMessage>) {
|
||||||
|
io.register_timer(INFO_TIMER, 5000).expect("Error registering timer");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn timeout(&self, _io: &IoContext<NetSyncMessage>, timer: TimerToken) {
|
||||||
|
if INFO_TIMER == timer {
|
||||||
|
let client = self.client.read().unwrap();
|
||||||
|
self.info.tick(client.deref());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
24
src/block.rs
24
src/block.rs
@ -24,9 +24,13 @@ pub struct Block {
|
|||||||
|
|
||||||
/// A set of references to `Block` fields that are publicly accessible.
|
/// A set of references to `Block` fields that are publicly accessible.
|
||||||
pub struct BlockRefMut<'a> {
|
pub struct BlockRefMut<'a> {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub header: &'a Header,
|
pub header: &'a Header,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub state: &'a mut State,
|
pub state: &'a mut State,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub archive: &'a Vec<Entry>,
|
pub archive: &'a Vec<Entry>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub uncles: &'a Vec<Header>,
|
pub uncles: &'a Vec<Header>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,7 +108,7 @@ pub struct SealedBlock {
|
|||||||
|
|
||||||
impl<'x, 'y> OpenBlock<'x, 'y> {
|
impl<'x, 'y> OpenBlock<'x, 'y> {
|
||||||
/// Create a new OpenBlock ready for transaction pushing.
|
/// Create a new OpenBlock ready for transaction pushing.
|
||||||
pub fn new<'a, 'b>(engine: &'a Engine, db: OverlayDB, parent: &Header, last_hashes: &'b LastHashes, author: Address, extra_data: Bytes) -> OpenBlock<'a, 'b> {
|
pub fn new<'a, 'b>(engine: &'a Engine, db: JournalDB, parent: &Header, last_hashes: &'b LastHashes, author: Address, extra_data: Bytes) -> OpenBlock<'a, 'b> {
|
||||||
let mut r = OpenBlock {
|
let mut r = OpenBlock {
|
||||||
block: Block::new(State::from_existing(db, parent.state_root().clone(), engine.account_start_nonce())),
|
block: Block::new(State::from_existing(db, parent.state_root().clone(), engine.account_start_nonce())),
|
||||||
engine: engine,
|
engine: engine,
|
||||||
@ -242,7 +246,7 @@ impl<'x, 'y> ClosedBlock<'x, 'y> {
|
|||||||
pub fn reopen(self) -> OpenBlock<'x, 'y> { self.open_block }
|
pub fn reopen(self) -> OpenBlock<'x, 'y> { self.open_block }
|
||||||
|
|
||||||
/// Drop this object and return the underlieing database.
|
/// Drop this object and return the underlieing database.
|
||||||
pub fn drain(self) -> OverlayDB { self.open_block.block.state.drop().1 }
|
pub fn drain(self) -> JournalDB { self.open_block.block.state.drop().1 }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SealedBlock {
|
impl SealedBlock {
|
||||||
@ -257,7 +261,7 @@ impl SealedBlock {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Drop this object and return the underlieing database.
|
/// Drop this object and return the underlieing database.
|
||||||
pub fn drain(self) -> OverlayDB { self.block.state.drop().1 }
|
pub fn drain(self) -> JournalDB { self.block.state.drop().1 }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IsBlock for SealedBlock {
|
impl IsBlock for SealedBlock {
|
||||||
@ -265,7 +269,7 @@ impl IsBlock for SealedBlock {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Enact the block given by block header, transactions and uncles
|
/// Enact the block given by block header, transactions and uncles
|
||||||
pub fn enact<'x, 'y>(header: &Header, transactions: &[Transaction], uncles: &[Header], engine: &'x Engine, db: OverlayDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
pub fn enact<'x, 'y>(header: &Header, transactions: &[Transaction], uncles: &[Header], engine: &'x Engine, db: JournalDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
||||||
{
|
{
|
||||||
let s = State::from_existing(db.clone(), parent.state_root().clone(), engine.account_start_nonce());
|
let s = State::from_existing(db.clone(), parent.state_root().clone(), engine.account_start_nonce());
|
||||||
trace!("enact(): root={}, author={}, author_balance={}\n", s.root(), header.author(), s.balance(&header.author()));
|
trace!("enact(): root={}, author={}, author_balance={}\n", s.root(), header.author(), s.balance(&header.author()));
|
||||||
@ -281,20 +285,20 @@ pub fn enact<'x, 'y>(header: &Header, transactions: &[Transaction], uncles: &[He
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
|
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
|
||||||
pub fn enact_bytes<'x, 'y>(block_bytes: &[u8], engine: &'x Engine, db: OverlayDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
pub fn enact_bytes<'x, 'y>(block_bytes: &[u8], engine: &'x Engine, db: JournalDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
||||||
let block = BlockView::new(block_bytes);
|
let block = BlockView::new(block_bytes);
|
||||||
let header = block.header();
|
let header = block.header();
|
||||||
enact(&header, &block.transactions(), &block.uncles(), engine, db, parent, last_hashes)
|
enact(&header, &block.transactions(), &block.uncles(), engine, db, parent, last_hashes)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
|
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
|
||||||
pub fn enact_verified<'x, 'y>(block: &PreVerifiedBlock, engine: &'x Engine, db: OverlayDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
pub fn enact_verified<'x, 'y>(block: &PreVerifiedBlock, engine: &'x Engine, db: JournalDB, parent: &Header, last_hashes: &'y LastHashes) -> Result<ClosedBlock<'x, 'y>, Error> {
|
||||||
let view = BlockView::new(&block.bytes);
|
let view = BlockView::new(&block.bytes);
|
||||||
enact(&block.header, &block.transactions, &view.uncles(), engine, db, parent, last_hashes)
|
enact(&block.header, &block.transactions, &view.uncles(), engine, db, parent, last_hashes)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
|
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
|
||||||
pub fn enact_and_seal(block_bytes: &[u8], engine: &Engine, db: OverlayDB, parent: &Header, last_hashes: &LastHashes) -> Result<SealedBlock, Error> {
|
pub fn enact_and_seal(block_bytes: &[u8], engine: &Engine, db: JournalDB, parent: &Header, last_hashes: &LastHashes) -> Result<SealedBlock, Error> {
|
||||||
let header = BlockView::new(block_bytes).header_view();
|
let header = BlockView::new(block_bytes).header_view();
|
||||||
Ok(try!(try!(enact_bytes(block_bytes, engine, db, parent, last_hashes)).seal(header.seal())))
|
Ok(try!(try!(enact_bytes(block_bytes, engine, db, parent, last_hashes)).seal(header.seal())))
|
||||||
}
|
}
|
||||||
@ -304,7 +308,7 @@ fn open_block() {
|
|||||||
use spec::*;
|
use spec::*;
|
||||||
let engine = Spec::new_test().to_engine().unwrap();
|
let engine = Spec::new_test().to_engine().unwrap();
|
||||||
let genesis_header = engine.spec().genesis_header();
|
let genesis_header = engine.spec().genesis_header();
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = vec![genesis_header.hash()];
|
||||||
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||||
@ -318,13 +322,13 @@ fn enact_block() {
|
|||||||
let engine = Spec::new_test().to_engine().unwrap();
|
let engine = Spec::new_test().to_engine().unwrap();
|
||||||
let genesis_header = engine.spec().genesis_header();
|
let genesis_header = engine.spec().genesis_header();
|
||||||
|
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &vec![genesis_header.hash()], Address::zero(), vec![]).close().seal(vec![]).unwrap();
|
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &vec![genesis_header.hash()], Address::zero(), vec![]).close().seal(vec![]).unwrap();
|
||||||
let orig_bytes = b.rlp_bytes();
|
let orig_bytes = b.rlp_bytes();
|
||||||
let orig_db = b.drain();
|
let orig_db = b.drain();
|
||||||
|
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let e = enact_and_seal(&orig_bytes, engine.deref(), db, &genesis_header, &vec![genesis_header.hash()]).unwrap();
|
let e = enact_and_seal(&orig_bytes, engine.deref(), db, &genesis_header, &vec![genesis_header.hash()]).unwrap();
|
||||||
|
|
||||||
|
@ -15,21 +15,34 @@ use views::*;
|
|||||||
///
|
///
|
||||||
/// - `index` - an index where best common ancestor would be.
|
/// - `index` - an index where best common ancestor would be.
|
||||||
pub struct TreeRoute {
|
pub struct TreeRoute {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub blocks: Vec<H256>,
|
pub blocks: Vec<H256>,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub ancestor: H256,
|
pub ancestor: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub index: usize
|
pub index: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents blockchain's in-memory cache size in bytes.
|
/// Represents blockchain's in-memory cache size in bytes.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct CacheSize {
|
pub struct CacheSize {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub blocks: usize,
|
pub blocks: usize,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub block_details: usize,
|
pub block_details: usize,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub transaction_addresses: usize,
|
pub transaction_addresses: usize,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub block_logs: usize,
|
pub block_logs: usize,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub blocks_blooms: usize
|
pub blocks_blooms: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl CacheSize {
|
||||||
|
/// Total amount used by the cache.
|
||||||
|
fn total(&self) -> usize { self.blocks + self.block_details + self.transaction_addresses + self.block_logs + self.blocks_blooms }
|
||||||
|
}
|
||||||
|
|
||||||
/// Information about best block gathered together
|
/// Information about best block gathered together
|
||||||
struct BestBlock {
|
struct BestBlock {
|
||||||
pub hash: H256,
|
pub hash: H256,
|
||||||
@ -96,6 +109,17 @@ pub trait BlockProvider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Hash, Eq, PartialEq, Clone)]
|
||||||
|
enum CacheID {
|
||||||
|
Block(H256),
|
||||||
|
Extras(ExtrasIndex, H256),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CacheManager {
|
||||||
|
cache_usage: VecDeque<HashSet<CacheID>>,
|
||||||
|
in_use: HashSet<CacheID>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Structure providing fast access to blockchain data.
|
/// Structure providing fast access to blockchain data.
|
||||||
///
|
///
|
||||||
/// **Does not do input data verification.**
|
/// **Does not do input data verification.**
|
||||||
@ -113,7 +137,9 @@ pub struct BlockChain {
|
|||||||
blocks_blooms: RwLock<HashMap<H256, BlocksBlooms>>,
|
blocks_blooms: RwLock<HashMap<H256, BlocksBlooms>>,
|
||||||
|
|
||||||
extras_db: DB,
|
extras_db: DB,
|
||||||
blocks_db: DB
|
blocks_db: DB,
|
||||||
|
|
||||||
|
cache_man: RwLock<CacheManager>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockProvider for BlockChain {
|
impl BlockProvider for BlockChain {
|
||||||
@ -136,6 +162,8 @@ impl BlockProvider for BlockChain {
|
|||||||
let opt = self.blocks_db.get(hash)
|
let opt = self.blocks_db.get(hash)
|
||||||
.expect("Low level database error. Some issue with disk?");
|
.expect("Low level database error. Some issue with disk?");
|
||||||
|
|
||||||
|
self.note_used(CacheID::Block(hash.clone()));
|
||||||
|
|
||||||
match opt {
|
match opt {
|
||||||
Some(b) => {
|
Some(b) => {
|
||||||
let bytes: Bytes = b.to_vec();
|
let bytes: Bytes = b.to_vec();
|
||||||
@ -158,6 +186,10 @@ impl BlockProvider for BlockChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const COLLECTION_QUEUE_SIZE: usize = 2;
|
||||||
|
const MIN_CACHE_SIZE: usize = 1;
|
||||||
|
const MAX_CACHE_SIZE: usize = 1024 * 1024 * 1;
|
||||||
|
|
||||||
impl BlockChain {
|
impl BlockChain {
|
||||||
/// Create new instance of blockchain from given Genesis
|
/// Create new instance of blockchain from given Genesis
|
||||||
///
|
///
|
||||||
@ -197,6 +229,9 @@ impl BlockChain {
|
|||||||
blocks_path.push("blocks");
|
blocks_path.push("blocks");
|
||||||
let blocks_db = DB::open_default(blocks_path.to_str().unwrap()).unwrap();
|
let blocks_db = DB::open_default(blocks_path.to_str().unwrap()).unwrap();
|
||||||
|
|
||||||
|
let mut cache_man = CacheManager{cache_usage: VecDeque::new(), in_use: HashSet::new()};
|
||||||
|
(0..COLLECTION_QUEUE_SIZE).foreach(|_| cache_man.cache_usage.push_back(HashSet::new()));
|
||||||
|
|
||||||
let bc = BlockChain {
|
let bc = BlockChain {
|
||||||
best_block: RwLock::new(BestBlock::new()),
|
best_block: RwLock::new(BestBlock::new()),
|
||||||
blocks: RwLock::new(HashMap::new()),
|
blocks: RwLock::new(HashMap::new()),
|
||||||
@ -206,7 +241,8 @@ impl BlockChain {
|
|||||||
block_logs: RwLock::new(HashMap::new()),
|
block_logs: RwLock::new(HashMap::new()),
|
||||||
blocks_blooms: RwLock::new(HashMap::new()),
|
blocks_blooms: RwLock::new(HashMap::new()),
|
||||||
extras_db: extras_db,
|
extras_db: extras_db,
|
||||||
blocks_db: blocks_db
|
blocks_db: blocks_db,
|
||||||
|
cache_man: RwLock::new(cache_man),
|
||||||
};
|
};
|
||||||
|
|
||||||
// load best block
|
// load best block
|
||||||
@ -251,7 +287,7 @@ impl BlockChain {
|
|||||||
/// Ensure that the best block does indeed have a state_root in the state DB.
|
/// Ensure that the best block does indeed have a state_root in the state DB.
|
||||||
/// If it doesn't, then rewind down until we find one that does and delete data to ensure that
|
/// If it doesn't, then rewind down until we find one that does and delete data to ensure that
|
||||||
/// later blocks will be reimported.
|
/// later blocks will be reimported.
|
||||||
pub fn ensure_good(&mut self, _state: &OverlayDB) {
|
pub fn ensure_good(&mut self, _state: &JournalDB) {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -497,6 +533,10 @@ impl BlockChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(h) = hash.as_h256() {
|
||||||
|
self.note_used(CacheID::Extras(T::extras_index(), h.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
self.extras_db.get_extras(hash).map(| t: T | {
|
self.extras_db.get_extras(hash).map(| t: T | {
|
||||||
let mut write = cache.write().unwrap();
|
let mut write = cache.write().unwrap();
|
||||||
write.insert(hash.clone(), t.clone());
|
write.insert(hash.clone(), t.clone());
|
||||||
@ -537,6 +577,56 @@ impl BlockChain {
|
|||||||
self.block_logs.write().unwrap().squeeze(size.block_logs);
|
self.block_logs.write().unwrap().squeeze(size.block_logs);
|
||||||
self.blocks_blooms.write().unwrap().squeeze(size.blocks_blooms);
|
self.blocks_blooms.write().unwrap().squeeze(size.blocks_blooms);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Let the cache system know that a cacheable item has been used.
|
||||||
|
fn note_used(&self, id: CacheID) {
|
||||||
|
let mut cache_man = self.cache_man.write().unwrap();
|
||||||
|
if !cache_man.cache_usage[0].contains(&id) {
|
||||||
|
cache_man.cache_usage[0].insert(id.clone());
|
||||||
|
if cache_man.in_use.contains(&id) {
|
||||||
|
if let Some(c) = cache_man.cache_usage.iter_mut().skip(1).find(|e|e.contains(&id)) {
|
||||||
|
c.remove(&id);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cache_man.in_use.insert(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ticks our cache system and throws out any old data.
|
||||||
|
pub fn collect_garbage(&self, force: bool) {
|
||||||
|
// TODO: check time.
|
||||||
|
let timeout = true;
|
||||||
|
|
||||||
|
let t = self.cache_size().total();
|
||||||
|
if t < MIN_CACHE_SIZE || (!timeout && (!force || t < MAX_CACHE_SIZE)) { return; }
|
||||||
|
|
||||||
|
let mut cache_man = self.cache_man.write().unwrap();
|
||||||
|
let mut blocks = self.blocks.write().unwrap();
|
||||||
|
let mut block_details = self.block_details.write().unwrap();
|
||||||
|
let mut block_hashes = self.block_hashes.write().unwrap();
|
||||||
|
let mut transaction_addresses = self.transaction_addresses.write().unwrap();
|
||||||
|
let mut block_logs = self.block_logs.write().unwrap();
|
||||||
|
let mut blocks_blooms = self.blocks_blooms.write().unwrap();
|
||||||
|
|
||||||
|
for id in cache_man.cache_usage.pop_back().unwrap().into_iter() {
|
||||||
|
cache_man.in_use.remove(&id);
|
||||||
|
match id {
|
||||||
|
CacheID::Block(h) => { blocks.remove(&h); },
|
||||||
|
CacheID::Extras(ExtrasIndex::BlockDetails, h) => { block_details.remove(&h); },
|
||||||
|
CacheID::Extras(ExtrasIndex::TransactionAddress, h) => { transaction_addresses.remove(&h); },
|
||||||
|
CacheID::Extras(ExtrasIndex::BlockLogBlooms, h) => { block_logs.remove(&h); },
|
||||||
|
CacheID::Extras(ExtrasIndex::BlocksBlooms, h) => { blocks_blooms.remove(&h); },
|
||||||
|
_ => panic!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cache_man.cache_usage.push_front(HashSet::new());
|
||||||
|
|
||||||
|
// TODO: handle block_hashes properly.
|
||||||
|
block_hashes.clear();
|
||||||
|
|
||||||
|
// TODO: m_lastCollection = chrono::system_clock::now();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -63,6 +63,7 @@ impl Builtin {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn copy_to(src: &[u8], dest: &mut[u8]) {
|
pub fn copy_to(src: &[u8], dest: &mut[u8]) {
|
||||||
// NICE: optimise
|
// NICE: optimise
|
||||||
for i in 0..min(src.len(), dest.len()) {
|
for i in 0..min(src.len(), dest.len()) {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use util::*;
|
use util::*;
|
||||||
use rocksdb::{Options, DB};
|
use rocksdb::{Options, DB};
|
||||||
use blockchain::{BlockChain, BlockProvider};
|
use blockchain::{BlockChain, BlockProvider, CacheSize};
|
||||||
use views::BlockView;
|
use views::BlockView;
|
||||||
use error::*;
|
use error::*;
|
||||||
use header::BlockNumber;
|
use header::BlockNumber;
|
||||||
@ -40,12 +40,20 @@ pub struct BlockChainInfo {
|
|||||||
pub best_block_number: BlockNumber
|
pub best_block_number: BlockNumber
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for BlockChainInfo {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "#{}.{}", self.best_block_number, self.best_block_hash)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Block queue status
|
/// Block queue status
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct BlockQueueStatus {
|
pub struct BlockQueueStatus {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub full: bool,
|
pub full: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub type TreeRoute = ::blockchain::TreeRoute;
|
pub type TreeRoute = ::blockchain::TreeRoute;
|
||||||
|
|
||||||
/// Blockchain database client. Owns and manages a blockchain and a block queue.
|
/// Blockchain database client. Owns and manages a blockchain and a block queue.
|
||||||
@ -99,14 +107,37 @@ pub trait BlockChainClient : Sync + Send {
|
|||||||
fn chain_info(&self) -> BlockChainInfo;
|
fn chain_info(&self) -> BlockChainInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone, Debug, Eq, PartialEq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
pub struct ClientReport {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
pub blocks_imported: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
pub transactions_applied: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
pub gas_processed: U256,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClientReport {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
pub fn accrue_block(&mut self, block: &PreVerifiedBlock) {
|
||||||
|
self.blocks_imported += 1;
|
||||||
|
self.transactions_applied += block.transactions.len();
|
||||||
|
self.gas_processed += block.header.gas_used;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Blockchain database client backed by a persistent database. Owns and manages a blockchain and a block queue.
|
/// Blockchain database client backed by a persistent database. Owns and manages a blockchain and a block queue.
|
||||||
pub struct Client {
|
pub struct Client {
|
||||||
chain: Arc<RwLock<BlockChain>>,
|
chain: Arc<RwLock<BlockChain>>,
|
||||||
engine: Arc<Box<Engine>>,
|
engine: Arc<Box<Engine>>,
|
||||||
state_db: OverlayDB,
|
state_db: JournalDB,
|
||||||
queue: BlockQueue,
|
queue: BlockQueue,
|
||||||
|
report: ClientReport,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const HISTORY: u64 = 1000;
|
||||||
|
|
||||||
impl Client {
|
impl Client {
|
||||||
/// Create a new client with given spec and DB path.
|
/// Create a new client with given spec and DB path.
|
||||||
pub fn new(spec: Spec, path: &Path, message_channel: IoChannel<NetSyncMessage> ) -> Result<Client, Error> {
|
pub fn new(spec: Spec, path: &Path, message_channel: IoChannel<NetSyncMessage> ) -> Result<Client, Error> {
|
||||||
@ -114,9 +145,7 @@ impl Client {
|
|||||||
let mut opts = Options::new();
|
let mut opts = Options::new();
|
||||||
opts.set_max_open_files(256);
|
opts.set_max_open_files(256);
|
||||||
opts.create_if_missing(true);
|
opts.create_if_missing(true);
|
||||||
/*
|
/*opts.set_use_fsync(false);
|
||||||
opts.set_max_open_files(256);
|
|
||||||
opts.set_use_fsync(false);
|
|
||||||
opts.set_bytes_per_sync(8388608);
|
opts.set_bytes_per_sync(8388608);
|
||||||
opts.set_disable_data_sync(false);
|
opts.set_disable_data_sync(false);
|
||||||
opts.set_block_cache_size_mb(1024);
|
opts.set_block_cache_size_mb(1024);
|
||||||
@ -131,17 +160,17 @@ impl Client {
|
|||||||
opts.set_max_background_compactions(4);
|
opts.set_max_background_compactions(4);
|
||||||
opts.set_max_background_flushes(4);
|
opts.set_max_background_flushes(4);
|
||||||
opts.set_filter_deletes(false);
|
opts.set_filter_deletes(false);
|
||||||
opts.set_disable_auto_compactions(true);
|
opts.set_disable_auto_compactions(false);*/
|
||||||
*/
|
|
||||||
|
|
||||||
let mut state_path = path.to_path_buf();
|
let mut state_path = path.to_path_buf();
|
||||||
state_path.push("state");
|
state_path.push("state");
|
||||||
let db = DB::open(&opts, state_path.to_str().unwrap()).unwrap();
|
let db = DB::open(&opts, state_path.to_str().unwrap()).unwrap();
|
||||||
let mut state_db = OverlayDB::new(db);
|
let mut state_db = JournalDB::new(db);
|
||||||
|
|
||||||
let engine = Arc::new(try!(spec.to_engine()));
|
let engine = Arc::new(try!(spec.to_engine()));
|
||||||
engine.spec().ensure_db_good(&mut state_db);
|
if engine.spec().ensure_db_good(&mut state_db) {
|
||||||
state_db.commit().expect("Error commiting genesis state to state DB");
|
state_db.commit(0, &engine.spec().genesis_header().hash(), None).expect("Error commiting genesis state to state DB");
|
||||||
|
}
|
||||||
|
|
||||||
// chain.write().unwrap().ensure_good(&state_db);
|
// chain.write().unwrap().ensure_good(&state_db);
|
||||||
|
|
||||||
@ -150,6 +179,7 @@ impl Client {
|
|||||||
engine: engine.clone(),
|
engine: engine.clone(),
|
||||||
state_db: state_db,
|
state_db: state_db,
|
||||||
queue: BlockQueue::new(engine, message_channel),
|
queue: BlockQueue::new(engine, message_channel),
|
||||||
|
report: Default::default(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -214,16 +244,33 @@ impl Client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
||||||
match result.drain().commit() {
|
let ancient = if header.number() >= HISTORY { Some(header.number() - HISTORY) } else { None };
|
||||||
|
match result.drain().commit(header.number(), &header.hash(), ancient.map(|n|(n, self.chain.read().unwrap().block_hash(n).unwrap()))) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!(target: "client", "State DB commit failed: {:?}", e);
|
warn!(target: "client", "State DB commit failed: {:?}", e);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//info!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
self.report.accrue_block(&block);
|
||||||
|
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get info on the cache.
|
||||||
|
pub fn cache_info(&self) -> CacheSize {
|
||||||
|
self.chain.read().unwrap().cache_size()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the report.
|
||||||
|
pub fn report(&self) -> ClientReport {
|
||||||
|
self.report.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tick the client.
|
||||||
|
pub fn tick(&self) {
|
||||||
|
self.chain.read().unwrap().collect_garbage(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockChainClient for Client {
|
impl BlockChainClient for Client {
|
||||||
|
@ -31,11 +31,14 @@ pub trait Engine : Sync + Send {
|
|||||||
|
|
||||||
/// Some intrinsic operation parameters; by default they take their value from the `spec()`'s `engine_params`.
|
/// Some intrinsic operation parameters; by default they take their value from the `spec()`'s `engine_params`.
|
||||||
fn maximum_extra_data_size(&self) -> usize { decode(&self.spec().engine_params.get("maximumExtraDataSize").unwrap()) }
|
fn maximum_extra_data_size(&self) -> usize { decode(&self.spec().engine_params.get("maximumExtraDataSize").unwrap()) }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn maximum_uncle_count(&self) -> usize { 2 }
|
fn maximum_uncle_count(&self) -> usize { 2 }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn account_start_nonce(&self) -> U256 { decode(&self.spec().engine_params.get("accountStartNonce").unwrap()) }
|
fn account_start_nonce(&self) -> U256 { decode(&self.spec().engine_params.get("accountStartNonce").unwrap()) }
|
||||||
|
|
||||||
/// Block transformation functions, before and after the transactions.
|
/// Block transformation functions, before and after the transactions.
|
||||||
fn on_new_block(&self, _block: &mut Block) {}
|
fn on_new_block(&self, _block: &mut Block) {}
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn on_close_block(&self, _block: &mut Block) {}
|
fn on_close_block(&self, _block: &mut Block) {}
|
||||||
|
|
||||||
// TODO: consider including State in the params for verification functions.
|
// TODO: consider including State in the params for verification functions.
|
||||||
@ -55,6 +58,7 @@ pub trait Engine : Sync + Send {
|
|||||||
// TODO: Add flags for which bits of the transaction to check.
|
// TODO: Add flags for which bits of the transaction to check.
|
||||||
// TODO: consider including State in the params.
|
// TODO: consider including State in the params.
|
||||||
fn verify_transaction_basic(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
fn verify_transaction_basic(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn verify_transaction(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
fn verify_transaction(&self, _t: &Transaction, _header: &Header) -> Result<(), Error> { Ok(()) }
|
||||||
|
|
||||||
/// Don't forget to call Super::populateFromParent when subclassing & overriding.
|
/// Don't forget to call Super::populateFromParent when subclassing & overriding.
|
||||||
@ -63,8 +67,11 @@ pub trait Engine : Sync + Send {
|
|||||||
|
|
||||||
// TODO: builtin contract routing - to do this properly, it will require removing the built-in configuration-reading logic
|
// TODO: builtin contract routing - to do this properly, it will require removing the built-in configuration-reading logic
|
||||||
// from Spec into here and removing the Spec::builtins field.
|
// from Spec into here and removing the Spec::builtins field.
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn is_builtin(&self, a: &Address) -> bool { self.spec().builtins.contains_key(a) }
|
fn is_builtin(&self, a: &Address) -> bool { self.spec().builtins.contains_key(a) }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 { self.spec().builtins.get(a).unwrap().cost(input.len()) }
|
fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 { self.spec().builtins.get(a).unwrap().cost(input.len()) }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut [u8]) { self.spec().builtins.get(a).unwrap().execute(input, output); }
|
fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut [u8]) { self.spec().builtins.get(a).unwrap().execute(input, output); }
|
||||||
|
|
||||||
// TODO: sealing stuff - though might want to leave this for later.
|
// TODO: sealing stuff - though might want to leave this for later.
|
||||||
|
@ -25,6 +25,7 @@ pub struct EnvInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EnvInfo {
|
impl EnvInfo {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn new() -> EnvInfo {
|
pub fn new() -> EnvInfo {
|
||||||
EnvInfo {
|
EnvInfo {
|
||||||
number: 0,
|
number: 0,
|
||||||
|
74
src/error.rs
74
src/error.rs
@ -5,15 +5,22 @@ use header::BlockNumber;
|
|||||||
use basic_types::LogBloom;
|
use basic_types::LogBloom;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct Mismatch<T: fmt::Debug> {
|
pub struct Mismatch<T: fmt::Debug> {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub expected: T,
|
pub expected: T,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub found: T,
|
pub found: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct OutOfBounds<T: fmt::Debug> {
|
pub struct OutOfBounds<T: fmt::Debug> {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub min: Option<T>,
|
pub min: Option<T>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub max: Option<T>,
|
pub max: Option<T>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub found: T,
|
pub found: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -22,58 +29,112 @@ pub struct OutOfBounds<T: fmt::Debug> {
|
|||||||
pub enum ExecutionError {
|
pub enum ExecutionError {
|
||||||
/// Returned when there gas paid for transaction execution is
|
/// Returned when there gas paid for transaction execution is
|
||||||
/// lower than base gas required.
|
/// lower than base gas required.
|
||||||
NotEnoughBaseGas { required: U256, got: U256 },
|
/// TODO [Gav Wood] Please document me
|
||||||
|
NotEnoughBaseGas {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
required: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
got: U256
|
||||||
|
},
|
||||||
/// Returned when block (gas_used + gas) > gas_limit.
|
/// Returned when block (gas_used + gas) > gas_limit.
|
||||||
///
|
///
|
||||||
/// If gas =< gas_limit, upstream may try to execute the transaction
|
/// If gas =< gas_limit, upstream may try to execute the transaction
|
||||||
/// in next block.
|
/// in next block.
|
||||||
BlockGasLimitReached { gas_limit: U256, gas_used: U256, gas: U256 },
|
BlockGasLimitReached {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
gas_limit: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
gas_used: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
gas: U256
|
||||||
|
},
|
||||||
/// Returned when transaction nonce does not match state nonce.
|
/// Returned when transaction nonce does not match state nonce.
|
||||||
InvalidNonce { expected: U256, got: U256 },
|
InvalidNonce {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
expected: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
got: U256
|
||||||
|
},
|
||||||
/// Returned when cost of transaction (value + gas_price * gas) exceeds
|
/// Returned when cost of transaction (value + gas_price * gas) exceeds
|
||||||
/// current sender balance.
|
/// current sender balance.
|
||||||
NotEnoughCash { required: U512, got: U512 },
|
NotEnoughCash {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
required: U512,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
got: U512
|
||||||
|
},
|
||||||
/// Returned when internal evm error occurs.
|
/// Returned when internal evm error occurs.
|
||||||
Internal
|
Internal
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub enum TransactionError {
|
pub enum TransactionError {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
InvalidGasLimit(OutOfBounds<U256>),
|
InvalidGasLimit(OutOfBounds<U256>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub enum BlockError {
|
pub enum BlockError {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
TooManyUncles(OutOfBounds<usize>),
|
TooManyUncles(OutOfBounds<usize>),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
UncleWrongGeneration,
|
UncleWrongGeneration,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
ExtraDataOutOfBounds(OutOfBounds<usize>),
|
ExtraDataOutOfBounds(OutOfBounds<usize>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidSealArity(Mismatch<usize>),
|
InvalidSealArity(Mismatch<usize>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
TooMuchGasUsed(OutOfBounds<U256>),
|
TooMuchGasUsed(OutOfBounds<U256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidUnclesHash(Mismatch<H256>),
|
InvalidUnclesHash(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
UncleTooOld(OutOfBounds<BlockNumber>),
|
UncleTooOld(OutOfBounds<BlockNumber>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
UncleIsBrother(OutOfBounds<BlockNumber>),
|
UncleIsBrother(OutOfBounds<BlockNumber>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
UncleInChain(H256),
|
UncleInChain(H256),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
UncleParentNotInChain(H256),
|
UncleParentNotInChain(H256),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidStateRoot(Mismatch<H256>),
|
InvalidStateRoot(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidGasUsed(Mismatch<U256>),
|
InvalidGasUsed(Mismatch<U256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidTransactionsRoot(Mismatch<H256>),
|
InvalidTransactionsRoot(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidDifficulty(Mismatch<U256>),
|
InvalidDifficulty(Mismatch<U256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidGasLimit(OutOfBounds<U256>),
|
InvalidGasLimit(OutOfBounds<U256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidReceiptsStateRoot(Mismatch<H256>),
|
InvalidReceiptsStateRoot(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidTimestamp(OutOfBounds<u64>),
|
InvalidTimestamp(OutOfBounds<u64>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidLogBloom(Mismatch<LogBloom>),
|
InvalidLogBloom(Mismatch<LogBloom>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidEthashDifficulty(Mismatch<U256>),
|
InvalidEthashDifficulty(Mismatch<U256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidBlockNonce(Mismatch<H256>),
|
InvalidBlockNonce(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidParentHash(Mismatch<H256>),
|
InvalidParentHash(Mismatch<H256>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidNumber(OutOfBounds<BlockNumber>),
|
InvalidNumber(OutOfBounds<BlockNumber>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
UnknownParent(H256),
|
UnknownParent(H256),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
UnknownUncleParent(H256),
|
UnknownUncleParent(H256),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub enum ImportError {
|
pub enum ImportError {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
Bad(Option<Error>),
|
Bad(Option<Error>),
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
AlreadyInChain,
|
AlreadyInChain,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
AlreadyQueued,
|
AlreadyQueued,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,10 +150,15 @@ pub type ImportResult = Result<(), ImportError>;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// General error type which should be capable of representing all errors in ethcore.
|
/// General error type which should be capable of representing all errors in ethcore.
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Util(UtilError),
|
Util(UtilError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Block(BlockError),
|
Block(BlockError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
UnknownEngineName(String),
|
UnknownEngineName(String),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Execution(ExecutionError),
|
Execution(ExecutionError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Transaction(TransactionError),
|
Transaction(TransactionError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,17 +1,22 @@
|
|||||||
use util::*;
|
use util::*;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn ether() -> U256 { U256::exp10(18) }
|
pub fn ether() -> U256 { U256::exp10(18) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn finney() -> U256 { U256::exp10(15) }
|
pub fn finney() -> U256 { U256::exp10(15) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn szabo() -> U256 { U256::exp10(12) }
|
pub fn szabo() -> U256 { U256::exp10(12) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn shannon() -> U256 { U256::exp10(9) }
|
pub fn shannon() -> U256 { U256::exp10(9) }
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn wei() -> U256 { U256::exp10(0) }
|
pub fn wei() -> U256 { U256::exp10(0) }
|
||||||
|
|
||||||
|
@ -19,6 +19,7 @@ pub struct Ethash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Ethash {
|
impl Ethash {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
||||||
Box::new(Ethash {
|
Box::new(Ethash {
|
||||||
spec: spec,
|
spec: spec,
|
||||||
@ -215,7 +216,7 @@ fn on_close_block() {
|
|||||||
use super::*;
|
use super::*;
|
||||||
let engine = new_morden().to_engine().unwrap();
|
let engine = new_morden().to_engine().unwrap();
|
||||||
let genesis_header = engine.spec().genesis_header();
|
let genesis_header = engine.spec().genesis_header();
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = vec![genesis_header.hash()];
|
||||||
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||||
@ -228,7 +229,7 @@ fn on_close_block_with_uncle() {
|
|||||||
use super::*;
|
use super::*;
|
||||||
let engine = new_morden().to_engine().unwrap();
|
let engine = new_morden().to_engine().unwrap();
|
||||||
let genesis_header = engine.spec().genesis_header();
|
let genesis_header = engine.spec().genesis_header();
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = vec![genesis_header.hash()];
|
||||||
let mut b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
let mut b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||||
|
@ -3,7 +3,9 @@
|
|||||||
//! Contains all Ethereum network specific stuff, such as denominations and
|
//! Contains all Ethereum network specific stuff, such as denominations and
|
||||||
//! consensus specifications.
|
//! consensus specifications.
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod ethash;
|
pub mod ethash;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod denominations;
|
pub mod denominations;
|
||||||
|
|
||||||
pub use self::ethash::*;
|
pub use self::ethash::*;
|
||||||
@ -40,7 +42,7 @@ mod tests {
|
|||||||
fn ensure_db_good() {
|
fn ensure_db_good() {
|
||||||
let engine = new_morden().to_engine().unwrap();
|
let engine = new_morden().to_engine().unwrap();
|
||||||
let genesis_header = engine.spec().genesis_header();
|
let genesis_header = engine.spec().genesis_header();
|
||||||
let mut db = OverlayDB::new_temp();
|
let mut db = JournalDB::new_temp();
|
||||||
engine.spec().ensure_db_good(&mut db);
|
engine.spec().ensure_db_good(&mut db);
|
||||||
let s = State::from_existing(db.clone(), genesis_header.state_root.clone(), engine.account_start_nonce());
|
let s = State::from_existing(db.clone(), genesis_header.state_root.clone(), engine.account_start_nonce());
|
||||||
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000001")), U256::from(1u64));
|
assert_eq!(s.balance(&address_from_hex("0000000000000000000000000000000000000001")), U256::from(1u64));
|
||||||
|
@ -15,23 +15,31 @@ pub enum Error {
|
|||||||
/// `BadJumpDestination` is returned when execution tried to move
|
/// `BadJumpDestination` is returned when execution tried to move
|
||||||
/// to position that wasn't marked with JUMPDEST instruction
|
/// to position that wasn't marked with JUMPDEST instruction
|
||||||
BadJumpDestination {
|
BadJumpDestination {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
destination: usize
|
destination: usize
|
||||||
},
|
},
|
||||||
/// `BadInstructions` is returned when given instruction is not supported
|
/// `BadInstructions` is returned when given instruction is not supported
|
||||||
BadInstruction {
|
BadInstruction {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
instruction: u8,
|
instruction: u8,
|
||||||
},
|
},
|
||||||
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
||||||
/// First parameter says how many elements were needed and the second how many were actually on Stack
|
/// First parameter says how many elements were needed and the second how many were actually on Stack
|
||||||
StackUnderflow {
|
StackUnderflow {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
instruction: &'static str,
|
instruction: &'static str,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
wanted: usize,
|
wanted: usize,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
on_stack: usize
|
on_stack: usize
|
||||||
},
|
},
|
||||||
/// When execution would exceed defined Stack Limit
|
/// When execution would exceed defined Stack Limit
|
||||||
OutOfStack {
|
OutOfStack {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
instruction: &'static str,
|
instruction: &'static str,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
wanted: usize,
|
wanted: usize,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
limit: usize
|
limit: usize
|
||||||
},
|
},
|
||||||
/// Returned on evm internal error. Should never be ignored during development.
|
/// Returned on evm internal error. Should never be ignored during development.
|
||||||
|
@ -26,6 +26,7 @@ pub enum MessageCallResult {
|
|||||||
Failed
|
Failed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Ext {
|
pub trait Ext {
|
||||||
/// Returns a value for given key.
|
/// Returns a value for given key.
|
||||||
fn storage_at(&self, key: &H256) -> H256;
|
fn storage_at(&self, key: &H256) -> H256;
|
||||||
|
@ -3,8 +3,11 @@ use std::fmt;
|
|||||||
use evm::Evm;
|
use evm::Evm;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
pub enum VMType {
|
pub enum VMType {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
Jit,
|
Jit,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
Interpreter
|
Interpreter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ use common::*;
|
|||||||
use evm;
|
use evm;
|
||||||
use super::instructions as instructions;
|
use super::instructions as instructions;
|
||||||
use super::instructions::Instruction;
|
use super::instructions::Instruction;
|
||||||
use std::num::wrapping::OverflowingOps;
|
|
||||||
use std::marker::Copy;
|
use std::marker::Copy;
|
||||||
use evm::{MessageCallResult, ContractCreateResult};
|
use evm::{MessageCallResult, ContractCreateResult};
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
pub mod ext;
|
pub mod ext;
|
||||||
pub mod evm;
|
pub mod evm;
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
pub mod interpreter;
|
pub mod interpreter;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod factory;
|
pub mod factory;
|
||||||
|
@ -2,36 +2,67 @@
|
|||||||
|
|
||||||
/// Definition of the cost schedule and other parameterisations for the EVM.
|
/// Definition of the cost schedule and other parameterisations for the EVM.
|
||||||
pub struct Schedule {
|
pub struct Schedule {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub exceptional_failed_code_deposit: bool,
|
pub exceptional_failed_code_deposit: bool,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub have_delegate_call: bool,
|
pub have_delegate_call: bool,
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
pub stack_limit: usize,
|
pub stack_limit: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub max_depth: usize,
|
pub max_depth: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub tier_step_gas: [usize; 8],
|
pub tier_step_gas: [usize; 8],
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub exp_gas: usize,
|
pub exp_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub exp_byte_gas: usize,
|
pub exp_byte_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sha3_gas: usize,
|
pub sha3_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sha3_word_gas: usize,
|
pub sha3_word_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sload_gas: usize,
|
pub sload_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sstore_set_gas: usize,
|
pub sstore_set_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sstore_reset_gas: usize,
|
pub sstore_reset_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub sstore_refund_gas: usize,
|
pub sstore_refund_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub jumpdest_gas: usize,
|
pub jumpdest_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub log_gas: usize,
|
pub log_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub log_data_gas: usize,
|
pub log_data_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub log_topic_gas: usize,
|
pub log_topic_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub create_gas: usize,
|
pub create_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub call_gas: usize,
|
pub call_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub call_stipend: usize,
|
pub call_stipend: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub call_value_transfer_gas: usize,
|
pub call_value_transfer_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub call_new_account_gas: usize,
|
pub call_new_account_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub suicide_refund_gas: usize,
|
pub suicide_refund_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub memory_gas: usize,
|
pub memory_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub quad_coeff_div: usize,
|
pub quad_coeff_div: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub create_data_gas: usize,
|
pub create_data_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub tx_gas: usize,
|
pub tx_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub tx_create_gas: usize,
|
pub tx_create_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub tx_data_zero_gas: usize,
|
pub tx_data_zero_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub tx_data_non_zero_gas: usize,
|
pub tx_data_non_zero_gas: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub copy_gas: usize,
|
pub copy_gas: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,17 +3,23 @@ use header::BlockNumber;
|
|||||||
use rocksdb::{DB, Writable};
|
use rocksdb::{DB, Writable};
|
||||||
|
|
||||||
/// Represents index of extra data in database
|
/// Represents index of extra data in database
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
||||||
pub enum ExtrasIndex {
|
pub enum ExtrasIndex {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
BlockDetails = 0,
|
BlockDetails = 0,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
BlockHash = 1,
|
BlockHash = 1,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
TransactionAddress = 2,
|
TransactionAddress = 2,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
BlockLogBlooms = 3,
|
BlockLogBlooms = 3,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
BlocksBlooms = 4
|
BlocksBlooms = 4
|
||||||
}
|
}
|
||||||
|
|
||||||
/// trait used to write Extras data to db
|
/// trait used to write Extras data to db
|
||||||
pub trait ExtrasWritable {
|
pub trait ExtrasWritable {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn put_extras<K, T>(&self, hash: &K, value: &T) where
|
fn put_extras<K, T>(&self, hash: &K, value: &T) where
|
||||||
T: ExtrasIndexable + Encodable,
|
T: ExtrasIndexable + Encodable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
@ -21,10 +27,12 @@ pub trait ExtrasWritable {
|
|||||||
|
|
||||||
/// trait used to read Extras data from db
|
/// trait used to read Extras data from db
|
||||||
pub trait ExtrasReadable {
|
pub trait ExtrasReadable {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn get_extras<K, T>(&self, hash: &K) -> Option<T> where
|
fn get_extras<K, T>(&self, hash: &K) -> Option<T> where
|
||||||
T: ExtrasIndexable + Decodable,
|
T: ExtrasIndexable + Decodable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn extras_exists<K, T>(&self, hash: &K) -> bool where
|
fn extras_exists<K, T>(&self, hash: &K) -> bool where
|
||||||
T: ExtrasIndexable,
|
T: ExtrasIndexable,
|
||||||
K: ExtrasSliceConvertable;
|
K: ExtrasSliceConvertable;
|
||||||
@ -58,7 +66,10 @@ impl ExtrasReadable for DB {
|
|||||||
|
|
||||||
/// Implementations should convert arbitrary type to database key slice
|
/// Implementations should convert arbitrary type to database key slice
|
||||||
pub trait ExtrasSliceConvertable {
|
pub trait ExtrasSliceConvertable {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn to_extras_slice(&self, i: ExtrasIndex) -> H264;
|
fn to_extras_slice(&self, i: ExtrasIndex) -> H264;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn as_h256(&self) -> Option<&H256> { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExtrasSliceConvertable for H256 {
|
impl ExtrasSliceConvertable for H256 {
|
||||||
@ -67,6 +78,7 @@ impl ExtrasSliceConvertable for H256 {
|
|||||||
slice[32] = i as u8;
|
slice[32] = i as u8;
|
||||||
slice
|
slice
|
||||||
}
|
}
|
||||||
|
fn as_h256(&self) -> Option<&H256> { Some(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExtrasSliceConvertable for U256 {
|
impl ExtrasSliceConvertable for U256 {
|
||||||
@ -84,6 +96,7 @@ impl ExtrasSliceConvertable for BlockNumber {
|
|||||||
|
|
||||||
/// Types implementing this trait can be indexed in extras database
|
/// Types implementing this trait can be indexed in extras database
|
||||||
pub trait ExtrasIndexable {
|
pub trait ExtrasIndexable {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn extras_index() -> ExtrasIndex;
|
fn extras_index() -> ExtrasIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,9 +109,13 @@ impl ExtrasIndexable for H256 {
|
|||||||
/// Familial details concerning a block
|
/// Familial details concerning a block
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct BlockDetails {
|
pub struct BlockDetails {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub number: BlockNumber,
|
pub number: BlockNumber,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub total_difficulty: U256,
|
pub total_difficulty: U256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub parent: H256,
|
pub parent: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub children: Vec<H256>
|
pub children: Vec<H256>
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,6 +158,7 @@ impl Encodable for BlockDetails {
|
|||||||
/// Log blooms of certain block
|
/// Log blooms of certain block
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct BlockLogBlooms {
|
pub struct BlockLogBlooms {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub blooms: Vec<H2048>
|
pub blooms: Vec<H2048>
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,6 +192,7 @@ impl Encodable for BlockLogBlooms {
|
|||||||
|
|
||||||
/// Neighboring log blooms on certain level
|
/// Neighboring log blooms on certain level
|
||||||
pub struct BlocksBlooms {
|
pub struct BlocksBlooms {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub blooms: [H2048; 16]
|
pub blooms: [H2048; 16]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -221,7 +240,9 @@ impl Encodable for BlocksBlooms {
|
|||||||
/// Represents address of certain transaction within block
|
/// Represents address of certain transaction within block
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TransactionAddress {
|
pub struct TransactionAddress {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub block_hash: H256,
|
pub block_hash: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub index: u64
|
pub index: u64
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ use util::*;
|
|||||||
use basic_types::*;
|
use basic_types::*;
|
||||||
use time::now_utc;
|
use time::now_utc;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub type BlockNumber = u64;
|
pub type BlockNumber = u64;
|
||||||
|
|
||||||
/// A block header.
|
/// A block header.
|
||||||
@ -13,25 +14,41 @@ pub type BlockNumber = u64;
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Header {
|
pub struct Header {
|
||||||
// TODO: make all private.
|
// TODO: make all private.
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub parent_hash: H256,
|
pub parent_hash: H256,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub number: BlockNumber,
|
pub number: BlockNumber,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub author: Address,
|
pub author: Address,
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub transactions_root: H256,
|
pub transactions_root: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub uncles_hash: H256,
|
pub uncles_hash: H256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub extra_data: Bytes,
|
pub extra_data: Bytes,
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub state_root: H256,
|
pub state_root: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub receipts_root: H256,
|
pub receipts_root: H256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub log_bloom: LogBloom,
|
pub log_bloom: LogBloom,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub gas_limit: U256,
|
pub gas_limit: U256,
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub difficulty: U256,
|
pub difficulty: U256,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub seal: Vec<Bytes>,
|
pub seal: Vec<Bytes>,
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub hash: RefCell<Option<H256>>,
|
pub hash: RefCell<Option<H256>>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub bare_hash: RefCell<Option<H256>>,
|
pub bare_hash: RefCell<Option<H256>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,32 +78,50 @@ impl Header {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn number(&self) -> BlockNumber { self.number }
|
pub fn number(&self) -> BlockNumber { self.number }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn timestamp(&self) -> u64 { self.timestamp }
|
pub fn timestamp(&self) -> u64 { self.timestamp }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn author(&self) -> &Address { &self.author }
|
pub fn author(&self) -> &Address { &self.author }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn extra_data(&self) -> &Bytes { &self.extra_data }
|
pub fn extra_data(&self) -> &Bytes { &self.extra_data }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn state_root(&self) -> &H256 { &self.state_root }
|
pub fn state_root(&self) -> &H256 { &self.state_root }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn receipts_root(&self) -> &H256 { &self.receipts_root }
|
pub fn receipts_root(&self) -> &H256 { &self.receipts_root }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn gas_limit(&self) -> &U256 { &self.gas_limit }
|
pub fn gas_limit(&self) -> &U256 { &self.gas_limit }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn difficulty(&self) -> &U256 { &self.difficulty }
|
pub fn difficulty(&self) -> &U256 { &self.difficulty }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn seal(&self) -> &Vec<Bytes> { &self.seal }
|
pub fn seal(&self) -> &Vec<Bytes> { &self.seal }
|
||||||
|
|
||||||
// TODO: seal_at, set_seal_at &c.
|
// TODO: seal_at, set_seal_at &c.
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_number(&mut self, a: BlockNumber) { self.number = a; self.note_dirty(); }
|
pub fn set_number(&mut self, a: BlockNumber) { self.number = a; self.note_dirty(); }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_timestamp(&mut self, a: u64) { self.timestamp = a; self.note_dirty(); }
|
pub fn set_timestamp(&mut self, a: u64) { self.timestamp = a; self.note_dirty(); }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_timestamp_now(&mut self) { self.timestamp = now_utc().to_timespec().sec as u64; self.note_dirty(); }
|
pub fn set_timestamp_now(&mut self) { self.timestamp = now_utc().to_timespec().sec as u64; self.note_dirty(); }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_author(&mut self, a: Address) { if a != self.author { self.author = a; self.note_dirty(); } }
|
pub fn set_author(&mut self, a: Address) { if a != self.author { self.author = a; self.note_dirty(); } }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_extra_data(&mut self, a: Bytes) { if a != self.extra_data { self.extra_data = a; self.note_dirty(); } }
|
pub fn set_extra_data(&mut self, a: Bytes) { if a != self.extra_data { self.extra_data = a; self.note_dirty(); } }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_gas_used(&mut self, a: U256) { self.gas_used = a; self.note_dirty(); }
|
pub fn set_gas_used(&mut self, a: U256) { self.gas_used = a; self.note_dirty(); }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_gas_limit(&mut self, a: U256) { self.gas_limit = a; self.note_dirty(); }
|
pub fn set_gas_limit(&mut self, a: U256) { self.gas_limit = a; self.note_dirty(); }
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_difficulty(&mut self, a: U256) { self.difficulty = a; self.note_dirty(); }
|
pub fn set_difficulty(&mut self, a: U256) { self.difficulty = a; self.note_dirty(); }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn set_seal(&mut self, a: Vec<Bytes>) { self.seal = a; self.note_dirty(); }
|
pub fn set_seal(&mut self, a: Vec<Bytes>) { self.seal = a; self.note_dirty(); }
|
||||||
|
|
||||||
/// Get the hash of this header (sha3 of the RLP).
|
/// Get the hash of this header (sha3 of the RLP).
|
||||||
@ -120,6 +155,7 @@ impl Header {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make these functions traity
|
// TODO: make these functions traity
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn stream_rlp(&self, s: &mut RlpStream, with_seal: Seal) {
|
pub fn stream_rlp(&self, s: &mut RlpStream, with_seal: Seal) {
|
||||||
s.append_list(13 + match with_seal { Seal::With => self.seal.len(), _ => 0 });
|
s.append_list(13 + match with_seal { Seal::With => self.seal.len(), _ => 0 });
|
||||||
s.append(&self.parent_hash);
|
s.append(&self.parent_hash);
|
||||||
@ -141,12 +177,14 @@ impl Header {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn rlp(&self, with_seal: Seal) -> Bytes {
|
pub fn rlp(&self, with_seal: Seal) -> Bytes {
|
||||||
let mut s = RlpStream::new();
|
let mut s = RlpStream::new();
|
||||||
self.stream_rlp(&mut s, with_seal);
|
self.stream_rlp(&mut s, with_seal);
|
||||||
s.out()
|
s.out()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn rlp_sha3(&self, with_seal: Seal) -> H256 { self.rlp(with_seal).sha3() }
|
pub fn rlp_sha3(&self, with_seal: Seal) -> H256 { self.rlp(with_seal).sha3() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
27
src/lib.rs
27
src/lib.rs
@ -1,6 +1,6 @@
|
|||||||
|
#![warn(missing_docs)]
|
||||||
#![feature(cell_extras)]
|
#![feature(cell_extras)]
|
||||||
#![feature(augmented_assignments)]
|
#![feature(augmented_assignments)]
|
||||||
#![feature(wrapping)]
|
|
||||||
//#![feature(plugin)]
|
//#![feature(plugin)]
|
||||||
//#![plugin(interpolate_idents)]
|
//#![plugin(interpolate_idents)]
|
||||||
//! Ethcore's ethereum implementation
|
//! Ethcore's ethereum implementation
|
||||||
@ -89,31 +89,51 @@ extern crate evmjit;
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate ethcore_util as util;
|
extern crate ethcore_util as util;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod common;
|
pub mod common;
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
pub mod basic_types;
|
pub mod basic_types;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod evm;
|
pub mod evm;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod log_entry;
|
pub mod log_entry;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod env_info;
|
pub mod env_info;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod pod_account;
|
pub mod pod_account;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod pod_state;
|
pub mod pod_state;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod account_diff;
|
pub mod account_diff;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod state_diff;
|
pub mod state_diff;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod engine;
|
pub mod engine;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod state;
|
pub mod state;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod account;
|
pub mod account;
|
||||||
pub mod action_params;
|
pub mod action_params;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod header;
|
pub mod header;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod transaction;
|
pub mod transaction;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod receipt;
|
pub mod receipt;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod null_engine;
|
pub mod null_engine;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod builtin;
|
pub mod builtin;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod spec;
|
pub mod spec;
|
||||||
pub mod views;
|
pub mod views;
|
||||||
pub mod blockchain;
|
pub mod blockchain;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod extras;
|
pub mod extras;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod substate;
|
pub mod substate;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod service;
|
pub mod service;
|
||||||
pub mod executive;
|
pub mod executive;
|
||||||
pub mod externalities;
|
pub mod externalities;
|
||||||
@ -121,9 +141,14 @@ pub mod externalities;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod client;
|
pub mod client;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod block;
|
pub mod block;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod verification;
|
pub mod verification;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod queue;
|
pub mod queue;
|
||||||
pub mod ethereum;
|
pub mod ethereum;
|
||||||
|
@ -4,8 +4,11 @@ use basic_types::LogBloom;
|
|||||||
/// A single log's entry.
|
/// A single log's entry.
|
||||||
#[derive(Debug,PartialEq,Eq)]
|
#[derive(Debug,PartialEq,Eq)]
|
||||||
pub struct LogEntry {
|
pub struct LogEntry {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub address: Address,
|
pub address: Address,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub topics: Vec<H256>,
|
pub topics: Vec<H256>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub data: Bytes,
|
pub data: Bytes,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@ pub struct NullEngine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl NullEngine {
|
impl NullEngine {
|
||||||
|
/// TODO [Tomusdrw] Please document me
|
||||||
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
pub fn new_boxed(spec: Spec) -> Box<Engine> {
|
||||||
Box::new(NullEngine{
|
Box::new(NullEngine{
|
||||||
spec: spec,
|
spec: spec,
|
||||||
|
@ -4,9 +4,13 @@ use account::*;
|
|||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// Genesis account data. Does not have a DB overlay cache.
|
/// Genesis account data. Does not have a DB overlay cache.
|
||||||
pub struct PodAccount {
|
pub struct PodAccount {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub balance: U256,
|
pub balance: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub nonce: U256,
|
pub nonce: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub code: Bytes,
|
pub code: Bytes,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub storage: BTreeMap<H256, H256>,
|
pub storage: BTreeMap<H256, H256>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -27,6 +31,7 @@ impl PodAccount {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn rlp(&self) -> Bytes {
|
pub fn rlp(&self) -> Bytes {
|
||||||
let mut stream = RlpStream::new_list(4);
|
let mut stream = RlpStream::new_list(4);
|
||||||
stream.append(&self.nonce);
|
stream.append(&self.nonce);
|
||||||
|
@ -2,6 +2,7 @@ use util::*;
|
|||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct PodState (BTreeMap<Address, PodAccount>);
|
pub struct PodState (BTreeMap<Address, PodAccount>);
|
||||||
|
|
||||||
impl PodState {
|
impl PodState {
|
||||||
|
@ -204,6 +204,7 @@ impl BlockQueue {
|
|||||||
verification.verified = new_verified;
|
verification.verified = new_verified;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn drain(&mut self, max: usize) -> Vec<PreVerifiedBlock> {
|
pub fn drain(&mut self, max: usize) -> Vec<PreVerifiedBlock> {
|
||||||
let mut verification = self.verification.lock().unwrap();
|
let mut verification = self.verification.lock().unwrap();
|
||||||
let count = min(max, verification.verified.len());
|
let count = min(max, verification.verified.len());
|
||||||
|
@ -5,13 +5,18 @@ use log_entry::LogEntry;
|
|||||||
/// Information describing execution of a transaction.
|
/// Information describing execution of a transaction.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Receipt {
|
pub struct Receipt {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub state_root: H256,
|
pub state_root: H256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub log_bloom: LogBloom,
|
pub log_bloom: LogBloom,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub logs: Vec<LogEntry>,
|
pub logs: Vec<LogEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Receipt {
|
impl Receipt {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn new(state_root: H256, gas_used: U256, logs: Vec<LogEntry>) -> Receipt {
|
pub fn new(state_root: H256, gas_used: U256, logs: Vec<LogEntry>) -> Receipt {
|
||||||
Receipt {
|
Receipt {
|
||||||
state_root: state_root,
|
state_root: state_root,
|
||||||
|
@ -33,10 +33,12 @@ impl ClientService {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn io(&mut self) -> &mut IoService<NetSyncMessage> {
|
pub fn io(&mut self) -> &mut IoService<NetSyncMessage> {
|
||||||
self.net_service.io()
|
self.net_service.io()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn client(&self) -> Arc<RwLock<Client>> {
|
pub fn client(&self) -> Arc<RwLock<Client>> {
|
||||||
self.client.clone()
|
self.client.clone()
|
||||||
}
|
}
|
||||||
@ -47,8 +49,18 @@ struct ClientIoHandler {
|
|||||||
client: Arc<RwLock<Client>>
|
client: Arc<RwLock<Client>>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const CLIENT_TICK_TIMER: TimerToken = 0;
|
||||||
|
const CLIENT_TICK_MS: u64 = 5000;
|
||||||
|
|
||||||
impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
||||||
fn initialize(&self, _io: &IoContext<NetSyncMessage>) {
|
fn initialize(&self, io: &IoContext<NetSyncMessage>) {
|
||||||
|
io.register_timer(CLIENT_TICK_TIMER, CLIENT_TICK_MS).expect("Error registering client timer");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn timeout(&self, _io: &IoContext<NetSyncMessage>, timer: TimerToken) {
|
||||||
|
if timer == CLIENT_TICK_TIMER {
|
||||||
|
self.client.read().unwrap().tick();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn message(&self, _io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
fn message(&self, _io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
||||||
|
21
src/spec.rs
21
src/spec.rs
@ -51,6 +51,7 @@ pub struct GenesisAccount {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl GenesisAccount {
|
impl GenesisAccount {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn rlp(&self) -> Bytes {
|
pub fn rlp(&self) -> Bytes {
|
||||||
let mut stream = RlpStream::new_list(4);
|
let mut stream = RlpStream::new_list(4);
|
||||||
stream.append(&self.nonce);
|
stream.append(&self.nonce);
|
||||||
@ -66,27 +67,41 @@ impl GenesisAccount {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Spec {
|
pub struct Spec {
|
||||||
// User friendly spec name
|
// User friendly spec name
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub name: String,
|
pub name: String,
|
||||||
// What engine are we using for this?
|
// What engine are we using for this?
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub engine_name: String,
|
pub engine_name: String,
|
||||||
|
|
||||||
// Parameters concerning operation of the specific engine we're using.
|
// Parameters concerning operation of the specific engine we're using.
|
||||||
// Name -> RLP-encoded value
|
// Name -> RLP-encoded value
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub engine_params: HashMap<String, Bytes>,
|
pub engine_params: HashMap<String, Bytes>,
|
||||||
|
|
||||||
// Builtin-contracts are here for now but would like to abstract into Engine API eventually.
|
// Builtin-contracts are here for now but would like to abstract into Engine API eventually.
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub builtins: HashMap<Address, Builtin>,
|
pub builtins: HashMap<Address, Builtin>,
|
||||||
|
|
||||||
// Genesis params.
|
// Genesis params.
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub parent_hash: H256,
|
pub parent_hash: H256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub author: Address,
|
pub author: Address,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub difficulty: U256,
|
pub difficulty: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub gas_limit: U256,
|
pub gas_limit: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub extra_data: Bytes,
|
pub extra_data: Bytes,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub genesis_state: HashMap<Address, GenesisAccount>,
|
pub genesis_state: HashMap<Address, GenesisAccount>,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub seal_fields: usize,
|
pub seal_fields: usize,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub seal_rlp: Bytes,
|
pub seal_rlp: Bytes,
|
||||||
|
|
||||||
// May be prepopulated if we know this in advance.
|
// May be prepopulated if we know this in advance.
|
||||||
@ -112,6 +127,7 @@ impl Spec {
|
|||||||
self.state_root_memo.read().unwrap().as_ref().unwrap().clone()
|
self.state_root_memo.read().unwrap().as_ref().unwrap().clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn genesis_header(&self) -> Header {
|
pub fn genesis_header(&self) -> Header {
|
||||||
Header {
|
Header {
|
||||||
parent_hash: self.parent_hash.clone(),
|
parent_hash: self.parent_hash.clone(),
|
||||||
@ -220,7 +236,7 @@ impl FromJson for Spec {
|
|||||||
|
|
||||||
impl Spec {
|
impl Spec {
|
||||||
/// Ensure that the given state DB has the trie nodes in for the genesis state.
|
/// Ensure that the given state DB has the trie nodes in for the genesis state.
|
||||||
pub fn ensure_db_good(&self, db: &mut HashDB) {
|
pub fn ensure_db_good(&self, db: &mut HashDB) -> bool {
|
||||||
if !db.contains(&self.state_root()) {
|
if !db.contains(&self.state_root()) {
|
||||||
info!("Populating genesis state...");
|
info!("Populating genesis state...");
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
@ -232,7 +248,8 @@ impl Spec {
|
|||||||
}
|
}
|
||||||
assert!(db.contains(&self.state_root()));
|
assert!(db.contains(&self.state_root()));
|
||||||
info!("Genesis state is ready");
|
info!("Genesis state is ready");
|
||||||
}
|
true
|
||||||
|
} else { false }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new Spec from a JSON UTF-8 data resource `data`.
|
/// Create a new Spec from a JSON UTF-8 data resource `data`.
|
||||||
|
14
src/state.rs
14
src/state.rs
@ -5,12 +5,13 @@ use pod_account::*;
|
|||||||
use pod_state::*;
|
use pod_state::*;
|
||||||
use state_diff::*;
|
use state_diff::*;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub type ApplyResult = Result<Receipt, Error>;
|
pub type ApplyResult = Result<Receipt, Error>;
|
||||||
|
|
||||||
/// Representation of the entire state of all accounts in the system.
|
/// Representation of the entire state of all accounts in the system.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct State {
|
pub struct State {
|
||||||
db: OverlayDB,
|
db: JournalDB,
|
||||||
root: H256,
|
root: H256,
|
||||||
cache: RefCell<HashMap<Address, Option<Account>>>,
|
cache: RefCell<HashMap<Address, Option<Account>>>,
|
||||||
|
|
||||||
@ -19,7 +20,7 @@ pub struct State {
|
|||||||
|
|
||||||
impl State {
|
impl State {
|
||||||
/// Creates new state with empty state root
|
/// Creates new state with empty state root
|
||||||
pub fn new(mut db: OverlayDB, account_start_nonce: U256) -> State {
|
pub fn new(mut db: JournalDB, account_start_nonce: U256) -> State {
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
{
|
{
|
||||||
// init trie and reset root too null
|
// init trie and reset root too null
|
||||||
@ -35,7 +36,7 @@ impl State {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates new state with existing state root
|
/// Creates new state with existing state root
|
||||||
pub fn from_existing(db: OverlayDB, root: H256, account_start_nonce: U256) -> State {
|
pub fn from_existing(db: JournalDB, root: H256, account_start_nonce: U256) -> State {
|
||||||
{
|
{
|
||||||
// trie should panic! if root does not exist
|
// trie should panic! if root does not exist
|
||||||
let _ = SecTrieDB::new(&db, &root);
|
let _ = SecTrieDB::new(&db, &root);
|
||||||
@ -51,11 +52,11 @@ impl State {
|
|||||||
|
|
||||||
/// Create temporary state object
|
/// Create temporary state object
|
||||||
pub fn new_temp() -> State {
|
pub fn new_temp() -> State {
|
||||||
Self::new(OverlayDB::new_temp(), U256::from(0u8))
|
Self::new(JournalDB::new_temp(), U256::from(0u8))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Destroy the current object and return root and database.
|
/// Destroy the current object and return root and database.
|
||||||
pub fn drop(self) -> (H256, OverlayDB) {
|
pub fn drop(self) -> (H256, JournalDB) {
|
||||||
(self.root, self.db)
|
(self.root, self.db)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,7 +66,7 @@ impl State {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Expose the underlying database; good to use for calling `state.db().commit()`.
|
/// Expose the underlying database; good to use for calling `state.db().commit()`.
|
||||||
pub fn db(&mut self) -> &mut OverlayDB {
|
pub fn db(&mut self) -> &mut JournalDB {
|
||||||
&mut self.db
|
&mut self.db
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,6 +158,7 @@ impl State {
|
|||||||
Ok(receipt)
|
Ok(receipt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn revert(&mut self, backup: State) {
|
pub fn revert(&mut self, backup: State) {
|
||||||
self.cache = backup.cache;
|
self.cache = backup.cache;
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ use pod_state::*;
|
|||||||
use account_diff::*;
|
use account_diff::*;
|
||||||
|
|
||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct StateDiff (BTreeMap<Address, AccountDiff>);
|
pub struct StateDiff (BTreeMap<Address, AccountDiff>);
|
||||||
|
|
||||||
impl StateDiff {
|
impl StateDiff {
|
||||||
|
@ -25,6 +25,7 @@ impl Substate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn accrue(&mut self, s: Substate) {
|
pub fn accrue(&mut self, s: Substate) {
|
||||||
self.suicides.extend(s.suicides.into_iter());
|
self.suicides.extend(s.suicides.into_iter());
|
||||||
self.logs.extend(s.logs.into_iter());
|
self.logs.extend(s.logs.into_iter());
|
||||||
|
@ -475,7 +475,7 @@ impl ChainSync {
|
|||||||
pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) {
|
pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) {
|
||||||
trace!(target: "sync", "== Disconnecting {}", peer);
|
trace!(target: "sync", "== Disconnecting {}", peer);
|
||||||
if self.peers.contains_key(&peer) {
|
if self.peers.contains_key(&peer) {
|
||||||
info!(target: "sync", "Disconneced {}:{}", peer, io.peer_info(peer));
|
info!(target: "sync", "Disconnected {}:{}", peer, io.peer_info(peer));
|
||||||
self.clear_peer_download(peer);
|
self.clear_peer_download(peer);
|
||||||
self.peers.remove(&peer);
|
self.peers.remove(&peer);
|
||||||
self.continue_sync(io);
|
self.continue_sync(io);
|
||||||
|
@ -48,6 +48,7 @@ pub enum SyncMessage {
|
|||||||
BlockVerified,
|
BlockVerified,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
||||||
|
|
||||||
/// Ethereum network protocol handler
|
/// Ethereum network protocol handler
|
||||||
|
@ -4,8 +4,11 @@ use error::*;
|
|||||||
use evm::Schedule;
|
use evm::Schedule;
|
||||||
|
|
||||||
#[derive(Debug,Clone)]
|
#[derive(Debug,Clone)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub enum Action {
|
pub enum Action {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Create,
|
Create,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
Call(Address),
|
Call(Address),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -13,16 +16,25 @@ pub enum Action {
|
|||||||
/// or contract creation operation.
|
/// or contract creation operation.
|
||||||
#[derive(Debug,Clone)]
|
#[derive(Debug,Clone)]
|
||||||
pub struct Transaction {
|
pub struct Transaction {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub nonce: U256,
|
pub nonce: U256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub gas_price: U256,
|
pub gas_price: U256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub gas: U256,
|
pub gas: U256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub action: Action,
|
pub action: Action,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub value: U256,
|
pub value: U256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub data: Bytes,
|
pub data: Bytes,
|
||||||
|
|
||||||
// signature
|
// signature
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub v: u8,
|
pub v: u8,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub r: U256,
|
pub r: U256,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub s: U256,
|
pub s: U256,
|
||||||
|
|
||||||
hash: RefCell<Option<H256>>,
|
hash: RefCell<Option<H256>>,
|
||||||
@ -30,6 +42,7 @@ pub struct Transaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Transaction {
|
impl Transaction {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Transaction {
|
Transaction {
|
||||||
nonce: x!(0),
|
nonce: x!(0),
|
||||||
|
1
util/cov.sh
Symbolic link
1
util/cov.sh
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../cov.sh
|
@ -43,6 +43,7 @@ use std::ops::{Deref, DerefMut};
|
|||||||
use uint::{Uint, U128, U256};
|
use uint::{Uint, U128, U256};
|
||||||
use hash::FixedHash;
|
use hash::FixedHash;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct PrettySlice<'a> (&'a [u8]);
|
pub struct PrettySlice<'a> (&'a [u8]);
|
||||||
|
|
||||||
impl<'a> fmt::Debug for PrettySlice<'a> {
|
impl<'a> fmt::Debug for PrettySlice<'a> {
|
||||||
@ -66,8 +67,11 @@ impl<'a> fmt::Display for PrettySlice<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub trait ToPretty {
|
pub trait ToPretty {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn pretty(&self) -> PrettySlice;
|
fn pretty(&self) -> PrettySlice;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn to_hex(&self) -> String {
|
fn to_hex(&self) -> String {
|
||||||
format!("{}", self.pretty())
|
format!("{}", self.pretty())
|
||||||
}
|
}
|
||||||
@ -90,8 +94,11 @@ impl ToPretty for Bytes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub enum BytesRef<'a> {
|
pub enum BytesRef<'a> {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
Flexible(&'a mut Bytes),
|
Flexible(&'a mut Bytes),
|
||||||
|
/// TODO [debris] Please document me
|
||||||
Fixed(&'a mut [u8])
|
Fixed(&'a mut [u8])
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,8 +128,11 @@ pub type Bytes = Vec<u8>;
|
|||||||
/// Slice of bytes to underlying memory
|
/// Slice of bytes to underlying memory
|
||||||
pub trait BytesConvertable {
|
pub trait BytesConvertable {
|
||||||
// TODO: rename to as_slice
|
// TODO: rename to as_slice
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn bytes(&self) -> &[u8];
|
fn bytes(&self) -> &[u8];
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn as_slice(&self) -> &[u8] { self.bytes() }
|
fn as_slice(&self) -> &[u8] { self.bytes() }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() }
|
fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,8 +170,11 @@ fn bytes_convertable() {
|
|||||||
///
|
///
|
||||||
/// TODO: optimise some conversations
|
/// TODO: optimise some conversations
|
||||||
pub trait ToBytes {
|
pub trait ToBytes {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn to_bytes(&self) -> Vec<u8>;
|
fn to_bytes(&self) -> Vec<u8>;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn to_bytes_len(&self) -> usize { self.to_bytes().len() }
|
fn to_bytes_len(&self) -> usize { self.to_bytes().len() }
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn first_byte(&self) -> Option<u8> { self.to_bytes().first().map(|&x| { x })}
|
fn first_byte(&self) -> Option<u8> { self.to_bytes().first().map(|&x| { x })}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -257,7 +270,9 @@ impl <T>ToBytes for T where T: FixedHash {
|
|||||||
/// Error returned when FromBytes conversation goes wrong
|
/// Error returned when FromBytes conversation goes wrong
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum FromBytesError {
|
pub enum FromBytesError {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
DataIsTooShort,
|
DataIsTooShort,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
DataIsTooLong
|
DataIsTooLong
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -278,6 +293,7 @@ pub type FromBytesResult<T> = Result<T, FromBytesError>;
|
|||||||
///
|
///
|
||||||
/// TODO: check size of bytes before conversation and return appropriate error
|
/// TODO: check size of bytes before conversation and return appropriate error
|
||||||
pub trait FromBytes: Sized {
|
pub trait FromBytes: Sized {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn from_bytes(bytes: &[u8]) -> FromBytesResult<Self>;
|
fn from_bytes(bytes: &[u8]) -> FromBytesResult<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,9 @@ use sha3::*;
|
|||||||
/// index. Their `BloomIndex` can be created from block number and given level.
|
/// index. Their `BloomIndex` can be created from block number and given level.
|
||||||
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
||||||
pub struct BloomIndex {
|
pub struct BloomIndex {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub level: u8,
|
pub level: u8,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub index: usize,
|
pub index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,6 +46,7 @@ macro_rules! flushln {
|
|||||||
($fmt:expr, $($arg:tt)*) => (flush!(concat!($fmt, "\n"), $($arg)*));
|
($fmt:expr, $($arg:tt)*) => (flush!(concat!($fmt, "\n"), $($arg)*));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn flush(s: String) {
|
pub fn flush(s: String) {
|
||||||
::std::io::stdout().write(s.as_bytes()).unwrap();
|
::std::io::stdout().write(s.as_bytes()).unwrap();
|
||||||
::std::io::stdout().flush().unwrap();
|
::std::io::stdout().flush().unwrap();
|
||||||
|
@ -4,8 +4,11 @@ use uint::*;
|
|||||||
use secp256k1::{key, Secp256k1};
|
use secp256k1::{key, Secp256k1};
|
||||||
use rand::os::OsRng;
|
use rand::os::OsRng;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub type Secret = H256;
|
pub type Secret = H256;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub type Public = H512;
|
pub type Public = H512;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub type Signature = H520;
|
pub type Signature = H520;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
@ -33,11 +36,17 @@ impl Signature {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub enum CryptoError {
|
pub enum CryptoError {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidSecret,
|
InvalidSecret,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidPublic,
|
InvalidPublic,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidSignature,
|
InvalidSignature,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
InvalidMessage,
|
InvalidMessage,
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
Io(::std::io::Error),
|
Io(::std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,6 +131,7 @@ impl KeyPair {
|
|||||||
pub fn sign(&self, message: &H256) -> Result<Signature, CryptoError> { ec::sign(&self.secret, message) }
|
pub fn sign(&self, message: &H256) -> Result<Signature, CryptoError> { ec::sign(&self.secret, message) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod ec {
|
pub mod ec {
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use uint::*;
|
use uint::*;
|
||||||
@ -151,6 +161,12 @@ pub mod ec {
|
|||||||
let mut signature: crypto::Signature = unsafe { ::std::mem::uninitialized() };
|
let mut signature: crypto::Signature = unsafe { ::std::mem::uninitialized() };
|
||||||
signature.clone_from_slice(&data);
|
signature.clone_from_slice(&data);
|
||||||
signature[64] = rec_id.to_i32() as u8;
|
signature[64] = rec_id.to_i32() as u8;
|
||||||
|
|
||||||
|
let (_, s, v) = signature.to_rsv();
|
||||||
|
let secp256k1n = U256::from_str("fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141").unwrap();
|
||||||
|
if !is_low_s(&s) {
|
||||||
|
signature = super::Signature::from_rsv(&H256::from_slice(&signature[0..32]), &H256::from(secp256k1n - s), v ^ 1);
|
||||||
|
}
|
||||||
Ok(signature)
|
Ok(signature)
|
||||||
}
|
}
|
||||||
/// Verify signature.
|
/// Verify signature.
|
||||||
@ -174,7 +190,7 @@ pub mod ec {
|
|||||||
|
|
||||||
/// Check if this is a "low" signature.
|
/// Check if this is a "low" signature.
|
||||||
pub fn is_low(sig: &Signature) -> bool {
|
pub fn is_low(sig: &Signature) -> bool {
|
||||||
H256::from_slice(&sig[32..64]) <= h256_from_hex("7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0")
|
H256::from_slice(&sig[32..64]) <= h256_from_hex("7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if this is a "low" signature.
|
/// Check if this is a "low" signature.
|
||||||
@ -192,10 +208,12 @@ pub mod ec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod ecdh {
|
pub mod ecdh {
|
||||||
use crypto::*;
|
use crypto::*;
|
||||||
use crypto::{self};
|
use crypto::{self};
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn agree(secret: &Secret, public: &Public, ) -> Result<Secret, CryptoError> {
|
pub fn agree(secret: &Secret, public: &Public, ) -> Result<Secret, CryptoError> {
|
||||||
use secp256k1::*;
|
use secp256k1::*;
|
||||||
let context = &crypto::SECP256K1;
|
let context = &crypto::SECP256K1;
|
||||||
@ -211,11 +229,13 @@ pub mod ecdh {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod ecies {
|
pub mod ecies {
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use bytes::*;
|
use bytes::*;
|
||||||
use crypto::*;
|
use crypto::*;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn encrypt(public: &Public, plain: &[u8]) -> Result<Bytes, CryptoError> {
|
pub fn encrypt(public: &Public, plain: &[u8]) -> Result<Bytes, CryptoError> {
|
||||||
use ::rcrypto::digest::Digest;
|
use ::rcrypto::digest::Digest;
|
||||||
use ::rcrypto::sha2::Sha256;
|
use ::rcrypto::sha2::Sha256;
|
||||||
@ -251,6 +271,7 @@ pub mod ecies {
|
|||||||
Ok(msg)
|
Ok(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn decrypt(secret: &Secret, encrypted: &[u8]) -> Result<Bytes, CryptoError> {
|
pub fn decrypt(secret: &Secret, encrypted: &[u8]) -> Result<Bytes, CryptoError> {
|
||||||
use ::rcrypto::digest::Digest;
|
use ::rcrypto::digest::Digest;
|
||||||
use ::rcrypto::sha2::Sha256;
|
use ::rcrypto::sha2::Sha256;
|
||||||
@ -316,17 +337,20 @@ pub mod ecies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub mod aes {
|
pub mod aes {
|
||||||
use ::rcrypto::blockmodes::*;
|
use ::rcrypto::blockmodes::*;
|
||||||
use ::rcrypto::aessafe::*;
|
use ::rcrypto::aessafe::*;
|
||||||
use ::rcrypto::symmetriccipher::*;
|
use ::rcrypto::symmetriccipher::*;
|
||||||
use ::rcrypto::buffer::*;
|
use ::rcrypto::buffer::*;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
|
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
|
||||||
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
||||||
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
|
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
|
||||||
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
|
||||||
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
|
||||||
|
@ -6,22 +6,36 @@ use rlp::DecoderError;
|
|||||||
use io;
|
use io;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub enum BaseDataError {
|
pub enum BaseDataError {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
NegativelyReferencedHash,
|
NegativelyReferencedHash,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// General error type which should be capable of representing all errors in ethcore.
|
/// General error type which should be capable of representing all errors in ethcore.
|
||||||
pub enum UtilError {
|
pub enum UtilError {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Crypto(::crypto::CryptoError),
|
Crypto(::crypto::CryptoError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
StdIo(::std::io::Error),
|
StdIo(::std::io::Error),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Io(io::IoError),
|
Io(io::IoError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
AddressParse(::std::net::AddrParseError),
|
AddressParse(::std::net::AddrParseError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
AddressResolve(Option<::std::io::Error>),
|
AddressResolve(Option<::std::io::Error>),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
FromHex(FromHexError),
|
FromHex(FromHexError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
BaseData(BaseDataError),
|
BaseData(BaseDataError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Network(NetworkError),
|
Network(NetworkError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Decoder(DecoderError),
|
Decoder(DecoderError),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
SimpleString(String),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
BadSize,
|
BadSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,6 +87,12 @@ impl From<::rlp::DecoderError> for UtilError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<String> for UtilError {
|
||||||
|
fn from(err: String) -> UtilError {
|
||||||
|
UtilError::SimpleString(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: uncomment below once https://github.com/rust-lang/rust/issues/27336 sorted.
|
// TODO: uncomment below once https://github.com/rust-lang/rust/issues/27336 sorted.
|
||||||
/*#![feature(concat_idents)]
|
/*#![feature(concat_idents)]
|
||||||
macro_rules! assimilate {
|
macro_rules! assimilate {
|
||||||
|
@ -7,6 +7,8 @@ macro_rules! xjson {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub trait FromJson {
|
pub trait FromJson {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn from_json(json: &Json) -> Self;
|
fn from_json(json: &Json) -> Self;
|
||||||
}
|
}
|
||||||
|
@ -13,20 +13,33 @@ use uint::{Uint, U256};
|
|||||||
///
|
///
|
||||||
/// Note: types implementing `FixedHash` must be also `BytesConvertable`.
|
/// Note: types implementing `FixedHash` must be also `BytesConvertable`.
|
||||||
pub trait FixedHash: Sized + BytesConvertable + Populatable + FromStr + Default {
|
pub trait FixedHash: Sized + BytesConvertable + Populatable + FromStr + Default {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn new() -> Self;
|
fn new() -> Self;
|
||||||
/// Synonym for `new()`. Prefer to new as it's more readable.
|
/// Synonym for `new()`. Prefer to new as it's more readable.
|
||||||
fn zero() -> Self;
|
fn zero() -> Self;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn random() -> Self;
|
fn random() -> Self;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn randomize(&mut self);
|
fn randomize(&mut self);
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
fn size() -> usize;
|
fn size() -> usize;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
fn from_slice(src: &[u8]) -> Self;
|
fn from_slice(src: &[u8]) -> Self;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
fn clone_from_slice(&mut self, src: &[u8]) -> usize;
|
fn clone_from_slice(&mut self, src: &[u8]) -> usize;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn copy_to(&self, dest: &mut [u8]);
|
fn copy_to(&self, dest: &mut [u8]);
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn shift_bloomed<'a, T>(&'a mut self, b: &T) -> &'a mut Self where T: FixedHash;
|
fn shift_bloomed<'a, T>(&'a mut self, b: &T) -> &'a mut Self where T: FixedHash;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn with_bloomed<T>(mut self, b: &T) -> Self where T: FixedHash { self.shift_bloomed(b); self }
|
fn with_bloomed<T>(mut self, b: &T) -> Self where T: FixedHash { self.shift_bloomed(b); self }
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn bloom_part<T>(&self, m: usize) -> T where T: FixedHash;
|
fn bloom_part<T>(&self, m: usize) -> T where T: FixedHash;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn contains_bloomed<T>(&self, b: &T) -> bool where T: FixedHash;
|
fn contains_bloomed<T>(&self, b: &T) -> bool where T: FixedHash;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn is_zero(&self) -> bool;
|
fn is_zero(&self) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,6 +54,7 @@ fn clean_0x(s: &str) -> &str {
|
|||||||
macro_rules! impl_hash {
|
macro_rules! impl_hash {
|
||||||
($from: ident, $size: expr) => {
|
($from: ident, $size: expr) => {
|
||||||
#[derive(Eq)]
|
#[derive(Eq)]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct $from (pub [u8; $size]);
|
pub struct $from (pub [u8; $size]);
|
||||||
|
|
||||||
impl BytesConvertable for $from {
|
impl BytesConvertable for $from {
|
||||||
@ -396,10 +410,12 @@ macro_rules! impl_hash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl $from {
|
impl $from {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn hex(&self) -> String {
|
pub fn hex(&self) -> String {
|
||||||
format!("{:?}", self)
|
format!("{:?}", self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn from_bloomed<T>(b: &T) -> Self where T: FixedHash { b.bloom_part($size) }
|
pub fn from_bloomed<T>(b: &T) -> Self where T: FixedHash { b.bloom_part($size) }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -503,21 +519,25 @@ impl<'_> From<&'_ Address> for H256 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn h256_from_hex(s: &str) -> H256 {
|
pub fn h256_from_hex(s: &str) -> H256 {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
H256::from_str(s).unwrap()
|
H256::from_str(s).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn h256_from_u64(n: u64) -> H256 {
|
pub fn h256_from_u64(n: u64) -> H256 {
|
||||||
use uint::U256;
|
use uint::U256;
|
||||||
H256::from(&U256::from(n))
|
H256::from(&U256::from(n))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn address_from_hex(s: &str) -> Address {
|
pub fn address_from_hex(s: &str) -> Address {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
Address::from_str(s).unwrap()
|
Address::from_str(s).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn address_from_u64(n: u64) -> Address {
|
pub fn address_from_u64(n: u64) -> Address {
|
||||||
let h256 = h256_from_u64(n);
|
let h256 = h256_from_u64(n);
|
||||||
From::from(h256)
|
From::from(h256)
|
||||||
|
@ -41,7 +41,9 @@ mod worker;
|
|||||||
use mio::{EventLoop, Token};
|
use mio::{EventLoop, Token};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub enum IoError {
|
pub enum IoError {
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
Mio(::std::io::Error),
|
Mio(::std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,13 +75,20 @@ pub trait IoHandler<Message>: Send + Sync where Message: Send + Sync + Clone + '
|
|||||||
fn update_stream(&self, _stream: StreamToken, _reg: Token, _event_loop: &mut EventLoop<IoManager<Message>>) {}
|
fn update_stream(&self, _stream: StreamToken, _reg: Token, _event_loop: &mut EventLoop<IoManager<Message>>) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type TimerToken = service::TimerToken;
|
/// TODO [arkpar] Please document me
|
||||||
pub type StreamToken = service::StreamToken;
|
pub use io::service::TimerToken;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
|
pub use io::service::StreamToken;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use io::service::IoContext;
|
pub use io::service::IoContext;
|
||||||
pub type IoService<Message> = service::IoService<Message>;
|
/// TODO [arkpar] Please document me
|
||||||
pub type IoChannel<Message> = service::IoChannel<Message>;
|
pub use io::service::IoService;
|
||||||
pub type IoManager<Message> = service::IoManager<Message>;
|
/// TODO [arkpar] Please document me
|
||||||
pub const TOKENS_PER_HANDLER: usize = service::TOKENS_PER_HANDLER;
|
pub use io::service::IoChannel;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
|
pub use io::service::IoManager;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
|
pub use io::service::TOKENS_PER_HANDLER;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
104
util/src/io/worker.rs
Normal file
104
util/src/io/worker.rs
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
use std::sync::*;
|
||||||
|
use std::mem;
|
||||||
|
use std::thread::{JoinHandle, self};
|
||||||
|
use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
|
||||||
|
use crossbeam::sync::chase_lev;
|
||||||
|
use io::service::{HandlerId, IoChannel, IoContext};
|
||||||
|
use io::{IoHandler};
|
||||||
|
|
||||||
|
pub enum WorkType<Message> {
|
||||||
|
Readable,
|
||||||
|
Writable,
|
||||||
|
Hup,
|
||||||
|
Timeout,
|
||||||
|
Message(Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Work<Message> {
|
||||||
|
pub work_type: WorkType<Message>,
|
||||||
|
pub token: usize,
|
||||||
|
pub handler_id: HandlerId,
|
||||||
|
pub handler: Arc<IoHandler<Message>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An IO worker thread
|
||||||
|
/// Sorts them ready for blockchain insertion.
|
||||||
|
pub struct Worker {
|
||||||
|
thread: Option<JoinHandle<()>>,
|
||||||
|
wait: Arc<Condvar>,
|
||||||
|
deleting: Arc<AtomicBool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Worker {
|
||||||
|
/// Creates a new worker instance.
|
||||||
|
pub fn new<Message>(index: usize,
|
||||||
|
stealer: chase_lev::Stealer<Work<Message>>,
|
||||||
|
channel: IoChannel<Message>,
|
||||||
|
wait: Arc<Condvar>,
|
||||||
|
wait_mutex: Arc<Mutex<bool>>) -> Worker
|
||||||
|
where Message: Send + Sync + Clone + 'static {
|
||||||
|
let deleting = Arc::new(AtomicBool::new(false));
|
||||||
|
let mut worker = Worker {
|
||||||
|
thread: None,
|
||||||
|
wait: wait.clone(),
|
||||||
|
deleting: deleting.clone(),
|
||||||
|
};
|
||||||
|
worker.thread = Some(thread::Builder::new().name(format!("IO Worker #{}", index)).spawn(
|
||||||
|
move || Worker::work_loop(stealer, channel.clone(), wait, wait_mutex.clone(), deleting))
|
||||||
|
.expect("Error creating worker thread"));
|
||||||
|
worker
|
||||||
|
}
|
||||||
|
|
||||||
|
fn work_loop<Message>(stealer: chase_lev::Stealer<Work<Message>>,
|
||||||
|
channel: IoChannel<Message>, wait: Arc<Condvar>,
|
||||||
|
wait_mutex: Arc<Mutex<bool>>,
|
||||||
|
deleting: Arc<AtomicBool>)
|
||||||
|
where Message: Send + Sync + Clone + 'static {
|
||||||
|
while !deleting.load(AtomicOrdering::Relaxed) {
|
||||||
|
{
|
||||||
|
let lock = wait_mutex.lock().unwrap();
|
||||||
|
let _ = wait.wait(lock).unwrap();
|
||||||
|
if deleting.load(AtomicOrdering::Relaxed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loop {
|
||||||
|
match stealer.steal() {
|
||||||
|
chase_lev::Steal::Data(work) => {
|
||||||
|
Worker::do_work(work, channel.clone());
|
||||||
|
}
|
||||||
|
_ => break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_work<Message>(work: Work<Message>, channel: IoChannel<Message>) where Message: Send + Sync + Clone + 'static {
|
||||||
|
match work.work_type {
|
||||||
|
WorkType::Readable => {
|
||||||
|
work.handler.stream_readable(&mut IoContext::new(channel, work.handler_id), work.token);
|
||||||
|
},
|
||||||
|
WorkType::Writable => {
|
||||||
|
work.handler.stream_writable(&mut IoContext::new(channel, work.handler_id), work.token);
|
||||||
|
}
|
||||||
|
WorkType::Hup => {
|
||||||
|
work.handler.stream_hup(&mut IoContext::new(channel, work.handler_id), work.token);
|
||||||
|
}
|
||||||
|
WorkType::Timeout => {
|
||||||
|
work.handler.timeout(&mut IoContext::new(channel, work.handler_id), work.token);
|
||||||
|
}
|
||||||
|
WorkType::Message(message) => {
|
||||||
|
work.handler.message(&mut IoContext::new(channel, work.handler_id), &message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Worker {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.deleting.store(true, AtomicOrdering::Relaxed);
|
||||||
|
self.wait.notify_all();
|
||||||
|
let thread = mem::replace(&mut self.thread, None).unwrap();
|
||||||
|
thread.join().unwrap();
|
||||||
|
}
|
||||||
|
}
|
214
util/src/journaldb.rs
Normal file
214
util/src/journaldb.rs
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
//! Disk-backed HashDB implementation.
|
||||||
|
|
||||||
|
use std::env;
|
||||||
|
use common::*;
|
||||||
|
use rlp::*;
|
||||||
|
use hashdb::*;
|
||||||
|
use overlaydb::*;
|
||||||
|
use rocksdb::{DB, Writable};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
/// Implementation of the HashDB trait for a disk-backed database with a memory overlay
|
||||||
|
/// and latent-removal semantics.
|
||||||
|
///
|
||||||
|
/// Like OverlayDB, there is a memory overlay; `commit()` must be called in order to
|
||||||
|
/// write operations out to disk. Unlike OverlayDB, `remove()` operations do not take effect
|
||||||
|
/// immediately. Rather some age (based on a linear but arbitrary metric) must pass before
|
||||||
|
/// the removals actually take effect.
|
||||||
|
pub struct JournalDB {
|
||||||
|
forward: OverlayDB,
|
||||||
|
backing: Arc<DB>,
|
||||||
|
inserts: Vec<H256>,
|
||||||
|
removes: Vec<H256>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JournalDB {
|
||||||
|
/// Create a new instance given a `backing` database.
|
||||||
|
pub fn new(backing: DB) -> JournalDB {
|
||||||
|
let db = Arc::new(backing);
|
||||||
|
JournalDB {
|
||||||
|
forward: OverlayDB::new_with_arc(db.clone()),
|
||||||
|
backing: db,
|
||||||
|
inserts: vec![],
|
||||||
|
removes: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new instance with an anonymous temporary database.
|
||||||
|
pub fn new_temp() -> JournalDB {
|
||||||
|
let mut dir = env::temp_dir();
|
||||||
|
dir.push(H32::random().hex());
|
||||||
|
Self::new(DB::open_default(dir.to_str().unwrap()).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a clone of the overlay db portion of this.
|
||||||
|
pub fn to_overlaydb(&self) -> OverlayDB { self.forward.clone() }
|
||||||
|
|
||||||
|
/// Commit all recent insert operations and historical removals from the old era
|
||||||
|
/// to the backing database.
|
||||||
|
pub fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
|
||||||
|
// journal format:
|
||||||
|
// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ]
|
||||||
|
// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ]
|
||||||
|
// [era, n] => [ ... ]
|
||||||
|
|
||||||
|
// TODO: store last_era, reclaim_period.
|
||||||
|
|
||||||
|
// when we make a new commit, we journal the inserts and removes.
|
||||||
|
// for each end_era that we journaled that we are no passing by,
|
||||||
|
// we remove all of its removes assuming it is canonical and all
|
||||||
|
// of its inserts otherwise.
|
||||||
|
|
||||||
|
// record new commit's details.
|
||||||
|
{
|
||||||
|
let mut index = 0usize;
|
||||||
|
let mut last;
|
||||||
|
|
||||||
|
while try!(self.backing.get({
|
||||||
|
let mut r = RlpStream::new_list(2);
|
||||||
|
r.append(&now);
|
||||||
|
r.append(&index);
|
||||||
|
last = r.drain();
|
||||||
|
&last
|
||||||
|
})).is_some() {
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut r = RlpStream::new_list(3);
|
||||||
|
r.append(id);
|
||||||
|
r.append(&self.inserts);
|
||||||
|
r.append(&self.removes);
|
||||||
|
try!(self.backing.put(&last, r.as_raw()));
|
||||||
|
self.inserts.clear();
|
||||||
|
self.removes.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
// apply old commits' details
|
||||||
|
if let Some((end_era, canon_id)) = end {
|
||||||
|
let mut index = 0usize;
|
||||||
|
let mut last;
|
||||||
|
while let Some(rlp_data) = try!(self.backing.get({
|
||||||
|
let mut r = RlpStream::new_list(2);
|
||||||
|
r.append(&end_era);
|
||||||
|
r.append(&index);
|
||||||
|
last = r.drain();
|
||||||
|
&last
|
||||||
|
})) {
|
||||||
|
let rlp = Rlp::new(&rlp_data);
|
||||||
|
let to_remove: Vec<H256> = rlp.val_at(if canon_id == rlp.val_at(0) {2} else {1});
|
||||||
|
for i in to_remove.iter() {
|
||||||
|
self.forward.remove(i);
|
||||||
|
}
|
||||||
|
try!(self.backing.delete(&last));
|
||||||
|
trace!("JournalDB: delete journal for time #{}.{}, (canon was {}): {} entries", end_era, index, canon_id, to_remove.len());
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.forward.commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Revert all operations on this object (i.e. `insert()`s and `removes()`s) since the
|
||||||
|
/// last `commit()`.
|
||||||
|
pub fn revert(&mut self) { self.forward.revert(); self.removes.clear(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HashDB for JournalDB {
|
||||||
|
fn keys(&self) -> HashMap<H256, i32> { self.forward.keys() }
|
||||||
|
fn lookup(&self, key: &H256) -> Option<&[u8]> { self.forward.lookup(key) }
|
||||||
|
fn exists(&self, key: &H256) -> bool { self.forward.exists(key) }
|
||||||
|
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r }
|
||||||
|
fn emplace(&mut self, key: H256, value: Bytes) { self.inserts.push(key.clone()); self.forward.emplace(key, value); }
|
||||||
|
fn kill(&mut self, key: &H256) { self.removes.push(key.clone()); }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use common::*;
|
||||||
|
use super::*;
|
||||||
|
use hashdb::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn long_history() {
|
||||||
|
// history is 3
|
||||||
|
let mut jdb = JournalDB::new_temp();
|
||||||
|
let h = jdb.insert(b"foo");
|
||||||
|
jdb.commit(0, &b"0".sha3(), None).unwrap();
|
||||||
|
assert!(jdb.exists(&h));
|
||||||
|
jdb.remove(&h);
|
||||||
|
jdb.commit(1, &b"1".sha3(), None).unwrap();
|
||||||
|
assert!(jdb.exists(&h));
|
||||||
|
jdb.commit(2, &b"2".sha3(), None).unwrap();
|
||||||
|
assert!(jdb.exists(&h));
|
||||||
|
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
|
||||||
|
assert!(jdb.exists(&h));
|
||||||
|
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
|
||||||
|
assert!(!jdb.exists(&h));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complex() {
|
||||||
|
// history is 1
|
||||||
|
let mut jdb = JournalDB::new_temp();
|
||||||
|
|
||||||
|
let foo = jdb.insert(b"foo");
|
||||||
|
let bar = jdb.insert(b"bar");
|
||||||
|
jdb.commit(0, &b"0".sha3(), None).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(jdb.exists(&bar));
|
||||||
|
|
||||||
|
jdb.remove(&foo);
|
||||||
|
jdb.remove(&bar);
|
||||||
|
let baz = jdb.insert(b"baz");
|
||||||
|
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(jdb.exists(&bar));
|
||||||
|
assert!(jdb.exists(&baz));
|
||||||
|
|
||||||
|
let foo = jdb.insert(b"foo");
|
||||||
|
jdb.remove(&baz);
|
||||||
|
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(!jdb.exists(&bar));
|
||||||
|
assert!(jdb.exists(&baz));
|
||||||
|
|
||||||
|
jdb.remove(&foo);
|
||||||
|
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(!jdb.exists(&bar));
|
||||||
|
assert!(!jdb.exists(&baz));
|
||||||
|
|
||||||
|
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
|
||||||
|
assert!(!jdb.exists(&foo));
|
||||||
|
assert!(!jdb.exists(&bar));
|
||||||
|
assert!(!jdb.exists(&baz));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fork() {
|
||||||
|
// history is 1
|
||||||
|
let mut jdb = JournalDB::new_temp();
|
||||||
|
|
||||||
|
let foo = jdb.insert(b"foo");
|
||||||
|
let bar = jdb.insert(b"bar");
|
||||||
|
jdb.commit(0, &b"0".sha3(), None).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(jdb.exists(&bar));
|
||||||
|
|
||||||
|
jdb.remove(&foo);
|
||||||
|
let baz = jdb.insert(b"baz");
|
||||||
|
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
|
||||||
|
|
||||||
|
jdb.remove(&bar);
|
||||||
|
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
|
||||||
|
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(jdb.exists(&bar));
|
||||||
|
assert!(jdb.exists(&baz));
|
||||||
|
|
||||||
|
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
|
||||||
|
assert!(jdb.exists(&foo));
|
||||||
|
assert!(!jdb.exists(&baz));
|
||||||
|
assert!(!jdb.exists(&bar));
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
use common::*;
|
use common::*;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn clean(s: &str) -> &str {
|
pub fn clean(s: &str) -> &str {
|
||||||
if s.len() >= 2 && &s[0..2] == "0x" {
|
if s.len() >= 2 && &s[0..2] == "0x" {
|
||||||
&s[2..]
|
&s[2..]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
#![warn(missing_docs)]
|
||||||
#![feature(op_assign_traits)]
|
#![feature(op_assign_traits)]
|
||||||
#![feature(augmented_assignments)]
|
#![feature(augmented_assignments)]
|
||||||
#![feature(associated_consts)]
|
#![feature(associated_consts)]
|
||||||
#![feature(wrapping)]
|
|
||||||
//! Ethcore-util library
|
//! Ethcore-util library
|
||||||
//!
|
//!
|
||||||
//! ### Rust version:
|
//! ### Rust version:
|
||||||
@ -53,33 +53,46 @@ extern crate arrayvec;
|
|||||||
extern crate elastic_array;
|
extern crate elastic_array;
|
||||||
extern crate crossbeam;
|
extern crate crossbeam;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod standard;
|
pub mod standard;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod from_json;
|
pub mod from_json;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod common;
|
pub mod common;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod hash;
|
pub mod hash;
|
||||||
pub mod uint;
|
pub mod uint;
|
||||||
pub mod bytes;
|
pub mod bytes;
|
||||||
pub mod rlp;
|
pub mod rlp;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod misc;
|
pub mod misc;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod json_aid;
|
pub mod json_aid;
|
||||||
pub mod vector;
|
pub mod vector;
|
||||||
pub mod sha3;
|
pub mod sha3;
|
||||||
pub mod hashdb;
|
pub mod hashdb;
|
||||||
pub mod memorydb;
|
pub mod memorydb;
|
||||||
pub mod overlaydb;
|
pub mod overlaydb;
|
||||||
|
pub mod journaldb;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod math;
|
pub mod math;
|
||||||
pub mod chainfilter;
|
pub mod chainfilter;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
pub mod triehash;
|
pub mod triehash;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod trie;
|
pub mod trie;
|
||||||
pub mod nibbleslice;
|
pub mod nibbleslice;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod heapsizeof;
|
pub mod heapsizeof;
|
||||||
pub mod squeeze;
|
pub mod squeeze;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod semantic_version;
|
pub mod semantic_version;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod io;
|
pub mod io;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod network;
|
pub mod network;
|
||||||
|
|
||||||
pub use common::*;
|
pub use common::*;
|
||||||
@ -89,6 +102,7 @@ pub use rlp::*;
|
|||||||
pub use hashdb::*;
|
pub use hashdb::*;
|
||||||
pub use memorydb::*;
|
pub use memorydb::*;
|
||||||
pub use overlaydb::*;
|
pub use overlaydb::*;
|
||||||
|
pub use journaldb::*;
|
||||||
pub use math::*;
|
pub use math::*;
|
||||||
pub use chainfilter::*;
|
pub use chainfilter::*;
|
||||||
pub use crypto::*;
|
pub use crypto::*;
|
||||||
|
@ -107,12 +107,14 @@ impl MemoryDB {
|
|||||||
self.data.get(key)
|
self.data.get(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn drain(&mut self) -> HashMap<H256, (Bytes, i32)> {
|
pub fn drain(&mut self) -> HashMap<H256, (Bytes, i32)> {
|
||||||
let mut data = HashMap::new();
|
let mut data = HashMap::new();
|
||||||
mem::swap(&mut self.data, &mut data);
|
mem::swap(&mut self.data, &mut data);
|
||||||
data
|
data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn denote(&self, key: &H256, value: Bytes) -> &(Bytes, i32) {
|
pub fn denote(&self, key: &H256, value: Bytes) -> &(Bytes, i32) {
|
||||||
if self.raw(key) == None {
|
if self.raw(key) == None {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -3,9 +3,13 @@ use common::*;
|
|||||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||||
/// Diff type for specifying a change (or not).
|
/// Diff type for specifying a change (or not).
|
||||||
pub enum Diff<T> where T: Eq {
|
pub enum Diff<T> where T: Eq {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Same,
|
Same,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Born(T),
|
Born(T),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Changed(T, T),
|
Changed(T, T),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Died(T),
|
Died(T),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -26,6 +30,8 @@ impl<T> Diff<T> where T: Eq {
|
|||||||
#[derive(PartialEq,Eq,Clone,Copy)]
|
#[derive(PartialEq,Eq,Clone,Copy)]
|
||||||
/// Boolean type for clean/dirty status.
|
/// Boolean type for clean/dirty status.
|
||||||
pub enum Filth {
|
pub enum Filth {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Clean,
|
Clean,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Dirty,
|
Dirty,
|
||||||
}
|
}
|
||||||
|
@ -56,12 +56,19 @@ mod service;
|
|||||||
mod error;
|
mod error;
|
||||||
mod node;
|
mod node;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::host::PeerId;
|
pub use network::host::PeerId;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::host::PacketId;
|
pub use network::host::PacketId;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::host::NetworkContext;
|
pub use network::host::NetworkContext;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::service::NetworkService;
|
pub use network::service::NetworkService;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::host::NetworkIoMessage;
|
pub use network::host::NetworkIoMessage;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::host::NetworkIoMessage::User as UserMessage;
|
pub use network::host::NetworkIoMessage::User as UserMessage;
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub use network::error::NetworkError;
|
pub use network::error::NetworkError;
|
||||||
|
|
||||||
use io::TimerToken;
|
use io::TimerToken;
|
||||||
|
@ -34,6 +34,7 @@ pub struct NibbleSlice<'a> {
|
|||||||
offset_encode_suffix: usize,
|
offset_encode_suffix: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub struct NibbleSliceIterator<'a> {
|
pub struct NibbleSliceIterator<'a> {
|
||||||
p: &'a NibbleSlice<'a>,
|
p: &'a NibbleSlice<'a>,
|
||||||
i: usize,
|
i: usize,
|
||||||
@ -76,6 +77,7 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
(r, a.len() + b.len())
|
(r, a.len() + b.len())
|
||||||
}*/
|
}*/
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn iter(&'a self) -> NibbleSliceIterator<'a> {
|
pub fn iter(&'a self) -> NibbleSliceIterator<'a> {
|
||||||
NibbleSliceIterator { p: self, i: 0 }
|
NibbleSliceIterator { p: self, i: 0 }
|
||||||
}
|
}
|
||||||
@ -130,6 +132,7 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
i
|
i
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn encoded(&self, is_leaf: bool) -> Bytes {
|
pub fn encoded(&self, is_leaf: bool) -> Bytes {
|
||||||
let l = self.len();
|
let l = self.len();
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = Bytes::with_capacity(l / 2 + 1);
|
||||||
@ -142,6 +145,7 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> Bytes {
|
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> Bytes {
|
||||||
let l = min(self.len(), n);
|
let l = min(self.len(), n);
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = Bytes::with_capacity(l / 2 + 1);
|
||||||
|
@ -15,11 +15,11 @@ use rocksdb::{DB, Writable, IteratorMode};
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
/// Implementation of the HashDB trait for a disk-backed database with a memory overlay.
|
/// Implementation of the HashDB trait for a disk-backed database with a memory overlay.
|
||||||
///
|
///
|
||||||
/// The operations `insert()` and `kill()` take place on the memory overlay; batches of
|
/// The operations `insert()` and `remove()` take place on the memory overlay; batches of
|
||||||
/// such operations may be flushed to the disk-backed DB with `commit()` or discarded with
|
/// such operations may be flushed to the disk-backed DB with `commit()` or discarded with
|
||||||
/// `revert()`.
|
/// `revert()`.
|
||||||
///
|
///
|
||||||
/// `lookup()` and `exists()` maintain normal behaviour - all `insert()` and `kill()`
|
/// `lookup()` and `contains()` maintain normal behaviour - all `insert()` and `remove()`
|
||||||
/// queries have an immediate effect in terms of these functions.
|
/// queries have an immediate effect in terms of these functions.
|
||||||
pub struct OverlayDB {
|
pub struct OverlayDB {
|
||||||
overlay: MemoryDB,
|
overlay: MemoryDB,
|
||||||
@ -28,8 +28,11 @@ pub struct OverlayDB {
|
|||||||
|
|
||||||
impl OverlayDB {
|
impl OverlayDB {
|
||||||
/// Create a new instance of OverlayDB given a `backing` database.
|
/// Create a new instance of OverlayDB given a `backing` database.
|
||||||
pub fn new(backing: DB) -> OverlayDB {
|
pub fn new(backing: DB) -> OverlayDB { Self::new_with_arc(Arc::new(backing)) }
|
||||||
OverlayDB{ overlay: MemoryDB::new(), backing: Arc::new(backing) }
|
|
||||||
|
/// Create a new instance of OverlayDB given a `backing` database.
|
||||||
|
pub fn new_with_arc(backing: Arc<DB>) -> OverlayDB {
|
||||||
|
OverlayDB{ overlay: MemoryDB::new(), backing: backing }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new instance of OverlayDB with an anonymous temporary database.
|
/// Create a new instance of OverlayDB with an anonymous temporary database.
|
||||||
@ -68,11 +71,10 @@ impl OverlayDB {
|
|||||||
/// ```
|
/// ```
|
||||||
pub fn commit(&mut self) -> Result<u32, UtilError> {
|
pub fn commit(&mut self) -> Result<u32, UtilError> {
|
||||||
let mut ret = 0u32;
|
let mut ret = 0u32;
|
||||||
|
let mut deletes = 0usize;
|
||||||
for i in self.overlay.drain().into_iter() {
|
for i in self.overlay.drain().into_iter() {
|
||||||
let (key, (value, rc)) = i;
|
let (key, (value, rc)) = i;
|
||||||
// until we figure out state trie pruning, only commit stuff when it has a strictly positive delkta of RCs -
|
if rc != 0 {
|
||||||
// this prevents RCs being reduced to 0 where the DB would pretent that the node had been removed.
|
|
||||||
if rc > 0 {
|
|
||||||
match self.payload(&key) {
|
match self.payload(&key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (back_value, back_rc) = x;
|
let (back_value, back_rc) = x;
|
||||||
@ -80,7 +82,7 @@ impl OverlayDB {
|
|||||||
if total_rc < 0 {
|
if total_rc < 0 {
|
||||||
return Err(From::from(BaseDataError::NegativelyReferencedHash));
|
return Err(From::from(BaseDataError::NegativelyReferencedHash));
|
||||||
}
|
}
|
||||||
self.put_payload(&key, (back_value, total_rc as u32));
|
deletes += if self.put_payload(&key, (back_value, total_rc as u32)) {1} else {0};
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if rc < 0 {
|
if rc < 0 {
|
||||||
@ -92,6 +94,7 @@ impl OverlayDB {
|
|||||||
ret += 1;
|
ret += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
trace!("OverlayDB::commit() deleted {} nodes", deletes);
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,11 +132,18 @@ impl OverlayDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get the refs and value of the given key.
|
/// Get the refs and value of the given key.
|
||||||
fn put_payload(&self, key: &H256, payload: (Bytes, u32)) {
|
fn put_payload(&self, key: &H256, payload: (Bytes, u32)) -> bool {
|
||||||
let mut s = RlpStream::new_list(2);
|
if payload.1 > 0 {
|
||||||
s.append(&payload.1);
|
let mut s = RlpStream::new_list(2);
|
||||||
s.append(&payload.0);
|
s.append(&payload.1);
|
||||||
self.backing.put(&key.bytes(), &s.out()).expect("Low-level database error. Some issue with your hard disk?");
|
s.append(&payload.0);
|
||||||
|
self.backing.put(&key.bytes(), s.as_raw()).expect("Low-level database error. Some issue with your hard disk?");
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
self.backing.delete(&key.bytes()).expect("Low-level database error. Some issue with your hard disk?");
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,10 +30,15 @@
|
|||||||
//! * You want to get view onto rlp-slice.
|
//! * You want to get view onto rlp-slice.
|
||||||
//! * You don't want to decode whole rlp at once.
|
//! * You don't want to decode whole rlp at once.
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod rlptraits;
|
pub mod rlptraits;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod rlperrors;
|
pub mod rlperrors;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod rlpin;
|
pub mod rlpin;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod untrusted_rlp;
|
pub mod untrusted_rlp;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub mod rlpstream;
|
pub mod rlpstream;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -46,9 +51,13 @@ pub use self::rlpin::{Rlp, RlpIterator};
|
|||||||
pub use self::rlpstream::{RlpStream,RlpStandard};
|
pub use self::rlpstream::{RlpStream,RlpStandard};
|
||||||
use super::hash::H256;
|
use super::hash::H256;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub const NULL_RLP: [u8; 1] = [0x80; 1];
|
pub const NULL_RLP: [u8; 1] = [0x80; 1];
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub const EMPTY_LIST_RLP: [u8; 1] = [0xC0; 1];
|
pub const EMPTY_LIST_RLP: [u8; 1] = [0xC0; 1];
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
pub const SHA3_NULL_RLP: H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] );
|
pub const SHA3_NULL_RLP: H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] );
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub const SHA3_EMPTY_LIST_RLP: H256 = H256( [0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47] );
|
pub const SHA3_EMPTY_LIST_RLP: H256 = H256( [0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47] );
|
||||||
|
|
||||||
/// Shortcut function to decode trusted rlp
|
/// Shortcut function to decode trusted rlp
|
||||||
|
@ -3,14 +3,23 @@ use std::error::Error as StdError;
|
|||||||
use bytes::FromBytesError;
|
use bytes::FromBytesError;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub enum DecoderError {
|
pub enum DecoderError {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
FromBytesError(FromBytesError),
|
FromBytesError(FromBytesError),
|
||||||
|
/// TODO [debris] Please document me
|
||||||
RlpIsTooShort,
|
RlpIsTooShort,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
RlpExpectedToBeList,
|
RlpExpectedToBeList,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
RlpExpectedToBeData,
|
RlpExpectedToBeData,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
RlpIncorrectListLen,
|
RlpIncorrectListLen,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
RlpDataLenWithZeroPrefix,
|
RlpDataLenWithZeroPrefix,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
RlpListLenWithZeroPrefix,
|
RlpListLenWithZeroPrefix,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
RlpInvalidIndirection,
|
RlpInvalidIndirection,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,10 +103,12 @@ impl <'a, 'view> Rlp<'a> where 'a: 'view {
|
|||||||
res.unwrap_or_else(|_| panic!())
|
res.unwrap_or_else(|_| panic!())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn as_val<T>(&self) -> T where T: Decodable {
|
pub fn as_val<T>(&self) -> T where T: Decodable {
|
||||||
Self::view_as_val(self)
|
Self::view_as_val(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub fn val_at<T>(&self, index: usize) -> T where T: Decodable {
|
pub fn val_at<T>(&self, index: usize) -> T where T: Decodable {
|
||||||
Self::view_as_val(&self.at(index))
|
Self::view_as_val(&self.at(index))
|
||||||
}
|
}
|
||||||
|
@ -142,6 +142,14 @@ impl RlpStream {
|
|||||||
self.note_appended(1);
|
self.note_appended(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Drain the object and return the underlying ElasticArray.
|
||||||
|
pub fn drain(self) -> ElasticArray1024<u8> {
|
||||||
|
match self.is_finished() {
|
||||||
|
true => self.encoder.bytes,
|
||||||
|
false => panic!()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BasicEncoder {
|
struct BasicEncoder {
|
||||||
@ -215,15 +223,19 @@ impl Encoder for BasicEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub trait RlpStandard {
|
pub trait RlpStandard {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn rlp_append(&self, s: &mut RlpStream);
|
fn rlp_append(&self, s: &mut RlpStream);
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn rlp_bytes(&self) -> Bytes {
|
fn rlp_bytes(&self) -> Bytes {
|
||||||
let mut s = RlpStream::new();
|
let mut s = RlpStream::new();
|
||||||
self.rlp_append(&mut s);
|
self.rlp_append(&mut s);
|
||||||
s.out()
|
s.out()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn rlp_sha3(&self) -> H256 { self.rlp_bytes().sha3() }
|
fn rlp_sha3(&self) -> H256 { self.rlp_bytes().sha3() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,23 +1,36 @@
|
|||||||
use rlp::{DecoderError, UntrustedRlp};
|
use rlp::{DecoderError, UntrustedRlp};
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Decoder: Sized {
|
pub trait Decoder: Sized {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError>
|
fn read_value<T, F>(&self, f: F) -> Result<T, DecoderError>
|
||||||
where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
|
where F: FnOnce(&[u8]) -> Result<T, DecoderError>;
|
||||||
|
|
||||||
|
/// TODO [arkpar] Please document me
|
||||||
fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
|
fn as_list(&self) -> Result<Vec<Self>, DecoderError>;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn as_rlp<'a>(&'a self) -> &'a UntrustedRlp<'a>;
|
fn as_rlp<'a>(&'a self) -> &'a UntrustedRlp<'a>;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn as_raw(&self) -> &[u8];
|
fn as_raw(&self) -> &[u8];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Decodable: Sized {
|
pub trait Decodable: Sized {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
|
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait View<'a, 'view>: Sized {
|
pub trait View<'a, 'view>: Sized {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
type Prototype;
|
type Prototype;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
type PayloadInfo;
|
type PayloadInfo;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
type Data;
|
type Data;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
type Item;
|
type Item;
|
||||||
|
/// TODO [debris] Please document me
|
||||||
type Iter;
|
type Iter;
|
||||||
|
|
||||||
/// Creates a new instance of `Rlp` reader
|
/// Creates a new instance of `Rlp` reader
|
||||||
@ -41,8 +54,10 @@ pub trait View<'a, 'view>: Sized {
|
|||||||
/// Get the prototype of the RLP.
|
/// Get the prototype of the RLP.
|
||||||
fn prototype(&self) -> Self::Prototype;
|
fn prototype(&self) -> Self::Prototype;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn payload_info(&self) -> Self::PayloadInfo;
|
fn payload_info(&self) -> Self::PayloadInfo;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn data(&'view self) -> Self::Data;
|
fn data(&'view self) -> Self::Data;
|
||||||
|
|
||||||
/// Returns number of RLP items.
|
/// Returns number of RLP items.
|
||||||
@ -179,21 +194,30 @@ pub trait View<'a, 'view>: Sized {
|
|||||||
/// ```
|
/// ```
|
||||||
fn iter(&'view self) -> Self::Iter;
|
fn iter(&'view self) -> Self::Iter;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn as_val<T>(&self) -> Result<T, DecoderError> where T: Decodable;
|
fn as_val<T>(&self) -> Result<T, DecoderError> where T: Decodable;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn val_at<T>(&self, index: usize) -> Result<T, DecoderError> where T: Decodable;
|
fn val_at<T>(&self, index: usize) -> Result<T, DecoderError> where T: Decodable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Encoder {
|
pub trait Encoder {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn emit_value(&mut self, bytes: &[u8]) -> ();
|
fn emit_value(&mut self, bytes: &[u8]) -> ();
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn emit_list<F>(&mut self, f: F) -> () where F: FnOnce(&mut Self) -> ();
|
fn emit_list<F>(&mut self, f: F) -> () where F: FnOnce(&mut Self) -> ();
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn emit_raw(&mut self, bytes: &[u8]) -> ();
|
fn emit_raw(&mut self, bytes: &[u8]) -> ();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Encodable {
|
pub trait Encodable {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn encode<E>(&self, encoder: &mut E) -> () where E: Encoder;
|
fn encode<E>(&self, encoder: &mut E) -> () where E: Encoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait Stream: Sized {
|
pub trait Stream: Sized {
|
||||||
|
|
||||||
/// Initializes instance of empty `Stream`.
|
/// Initializes instance of empty `Stream`.
|
||||||
@ -284,6 +308,7 @@ pub trait Stream: Sized {
|
|||||||
/// }
|
/// }
|
||||||
fn is_finished(&self) -> bool;
|
fn is_finished(&self) -> bool;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn as_raw(&self) -> &[u8];
|
fn as_raw(&self) -> &[u8];
|
||||||
|
|
||||||
/// Streams out encoded bytes.
|
/// Streams out encoded bytes.
|
||||||
|
@ -21,15 +21,21 @@ impl OffsetCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub enum Prototype {
|
pub enum Prototype {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
Null,
|
Null,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
Data(usize),
|
Data(usize),
|
||||||
|
/// TODO [debris] Please document me
|
||||||
List(usize),
|
List(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores basic information about item
|
/// Stores basic information about item
|
||||||
pub struct PayloadInfo {
|
pub struct PayloadInfo {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub header_len: usize,
|
pub header_len: usize,
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub value_len: usize,
|
pub value_len: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ use bytes::{BytesConvertable, Populatable};
|
|||||||
use hash::{H256, FixedHash};
|
use hash::{H256, FixedHash};
|
||||||
use self::sha3_ext::*;
|
use self::sha3_ext::*;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub const SHA3_EMPTY: H256 = H256( [0xc5, 0xd2, 0x46, 0x01, 0x86, 0xf7, 0x23, 0x3c, 0x92, 0x7e, 0x7d, 0xb2, 0xdc, 0xc7, 0x03, 0xc0, 0xe5, 0x00, 0xb6, 0x53, 0xca, 0x82, 0x27, 0x3b, 0x7b, 0xfa, 0xd8, 0x04, 0x5d, 0x85, 0xa4, 0x70] );
|
pub const SHA3_EMPTY: H256 = H256( [0xc5, 0xd2, 0x46, 0x01, 0x86, 0xf7, 0x23, 0x3c, 0x92, 0x7e, 0x7d, 0xb2, 0xdc, 0xc7, 0x03, 0xc0, 0xe5, 0x00, 0xb6, 0x53, 0xca, 0x82, 0x27, 0x3b, 0x7b, 0xfa, 0xd8, 0x04, 0x5d, 0x85, 0xa4, 0x70] );
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ use heapsize::HeapSizeOf;
|
|||||||
|
|
||||||
/// Should be used to squeeze collections to certain size in bytes
|
/// Should be used to squeeze collections to certain size in bytes
|
||||||
pub trait Squeeze {
|
pub trait Squeeze {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn squeeze(&mut self, size: usize);
|
fn squeeze(&mut self, size: usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,17 @@
|
|||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod trietraits;
|
pub mod trietraits;
|
||||||
pub mod standardmap;
|
pub mod standardmap;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod journal;
|
pub mod journal;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod node;
|
pub mod node;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod triedb;
|
pub mod triedb;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod triedbmut;
|
pub mod triedbmut;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod sectriedb;
|
pub mod sectriedb;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod sectriedbmut;
|
pub mod sectriedbmut;
|
||||||
|
|
||||||
pub use self::trietraits::*;
|
pub use self::trietraits::*;
|
||||||
|
@ -7,9 +7,13 @@ use super::journal::*;
|
|||||||
/// Type of node in the trie and essential information thereof.
|
/// Type of node in the trie and essential information thereof.
|
||||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||||
pub enum Node<'a> {
|
pub enum Node<'a> {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Empty,
|
Empty,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Leaf(NibbleSlice<'a>, &'a[u8]),
|
Leaf(NibbleSlice<'a>, &'a[u8]),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Extension(NibbleSlice<'a>, &'a[u8]),
|
Extension(NibbleSlice<'a>, &'a[u8]),
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Branch([&'a[u8]; 16], Option<&'a [u8]>)
|
Branch([&'a[u8]; 16], Option<&'a [u8]>)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,9 +7,13 @@ use hash::*;
|
|||||||
|
|
||||||
/// Alphabet to use when creating words for insertion into tries.
|
/// Alphabet to use when creating words for insertion into tries.
|
||||||
pub enum Alphabet {
|
pub enum Alphabet {
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
All,
|
All,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Low,
|
Low,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Mid,
|
Mid,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
Custom(Bytes),
|
Custom(Bytes),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,6 +34,7 @@ use super::node::*;
|
|||||||
pub struct TrieDB<'db> {
|
pub struct TrieDB<'db> {
|
||||||
db: &'db HashDB,
|
db: &'db HashDB,
|
||||||
root: &'db H256,
|
root: &'db H256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub hash_count: usize,
|
pub hash_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ use super::trietraits::*;
|
|||||||
pub struct TrieDBMut<'db> {
|
pub struct TrieDBMut<'db> {
|
||||||
db: &'db mut HashDB,
|
db: &'db mut HashDB,
|
||||||
root: &'db mut H256,
|
root: &'db mut H256,
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub hash_count: usize,
|
pub hash_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
245
util/src/uint.rs
245
util/src/uint.rs
@ -23,7 +23,6 @@
|
|||||||
|
|
||||||
use standard::*;
|
use standard::*;
|
||||||
use from_json::*;
|
use from_json::*;
|
||||||
use std::num::wrapping::OverflowingOps;
|
|
||||||
|
|
||||||
macro_rules! impl_map_from {
|
macro_rules! impl_map_from {
|
||||||
($thing:ident, $from:ty, $to:ty) => {
|
($thing:ident, $from:ty, $to:ty) => {
|
||||||
@ -59,15 +58,20 @@ macro_rules! panic_on_overflow {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug + fmt::Display + PartialOrd + Ord + PartialEq + Eq + Hash {
|
pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug + fmt::Display + PartialOrd + Ord + PartialEq + Eq + Hash {
|
||||||
|
|
||||||
/// Size of this type.
|
/// Size of this type.
|
||||||
const SIZE: usize;
|
const SIZE: usize;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn zero() -> Self;
|
fn zero() -> Self;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn one() -> Self;
|
fn one() -> Self;
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
type FromDecStrErr;
|
type FromDecStrErr;
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
fn from_dec_str(value: &str) -> Result<Self, Self::FromDecStrErr>;
|
fn from_dec_str(value: &str) -> Result<Self, Self::FromDecStrErr>;
|
||||||
|
|
||||||
/// Conversion to u32
|
/// Conversion to u32
|
||||||
@ -97,6 +101,28 @@ pub trait Uint: Sized + Default + FromStr + From<u64> + FromJson + fmt::Debug +
|
|||||||
fn pow(self, other: Self) -> Self;
|
fn pow(self, other: Self) -> Self;
|
||||||
/// Return wrapped eponentation `self**other` and flag if there was an overflow
|
/// Return wrapped eponentation `self**other` and flag if there was an overflow
|
||||||
fn overflowing_pow(self, other: Self) -> (Self, bool);
|
fn overflowing_pow(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_add(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_sub(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_mul(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_div(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_rem(self, other: Self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
|
fn overflowing_neg(self) -> (Self, bool);
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
|
fn overflowing_shl(self, shift: u32) -> (Self, bool);
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! construct_uint {
|
macro_rules! construct_uint {
|
||||||
@ -259,6 +285,98 @@ macro_rules! construct_uint {
|
|||||||
let res = overflowing!(x.overflowing_mul(y), overflow);
|
let res = overflowing!(x.overflowing_mul(y), overflow);
|
||||||
(res, overflow)
|
(res, overflow)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn overflowing_add(self, other: $name) -> ($name, bool) {
|
||||||
|
let $name(ref me) = self;
|
||||||
|
let $name(ref you) = other;
|
||||||
|
let mut ret = [0u64; $n_words];
|
||||||
|
let mut carry = [0u64; $n_words];
|
||||||
|
let mut b_carry = false;
|
||||||
|
let mut overflow = false;
|
||||||
|
|
||||||
|
for i in 0..$n_words {
|
||||||
|
ret[i] = me[i].wrapping_add(you[i]);
|
||||||
|
|
||||||
|
if ret[i] < me[i] {
|
||||||
|
if i < $n_words - 1 {
|
||||||
|
carry[i + 1] = 1;
|
||||||
|
b_carry = true;
|
||||||
|
} else {
|
||||||
|
overflow = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if b_carry {
|
||||||
|
let ret = overflowing!($name(ret).overflowing_add($name(carry)), overflow);
|
||||||
|
(ret, overflow)
|
||||||
|
} else {
|
||||||
|
($name(ret), overflow)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_sub(self, other: $name) -> ($name, bool) {
|
||||||
|
let res = overflowing!((!other).overflowing_add(From::from(1u64)));
|
||||||
|
let res = overflowing!(self.overflowing_add(res));
|
||||||
|
(res, self < other)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_mul(self, other: $name) -> ($name, bool) {
|
||||||
|
let mut res = $name::from(0u64);
|
||||||
|
let mut overflow = false;
|
||||||
|
// TODO: be more efficient about this
|
||||||
|
for i in 0..(2 * $n_words) {
|
||||||
|
let v = overflowing!(self.overflowing_mul_u32((other >> (32 * i)).low_u32()), overflow);
|
||||||
|
let res2 = overflowing!(v.overflowing_shl(32 * i as u32), overflow);
|
||||||
|
res = overflowing!(res.overflowing_add(res2), overflow);
|
||||||
|
}
|
||||||
|
(res, overflow)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_div(self, other: $name) -> ($name, bool) {
|
||||||
|
(self / other, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_rem(self, other: $name) -> ($name, bool) {
|
||||||
|
(self % other, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_neg(self) -> ($name, bool) {
|
||||||
|
(!self, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn overflowing_shl(self, shift32: u32) -> ($name, bool) {
|
||||||
|
let $name(ref original) = self;
|
||||||
|
let mut ret = [0u64; $n_words];
|
||||||
|
let shift = shift32 as usize;
|
||||||
|
let word_shift = shift / 64;
|
||||||
|
let bit_shift = shift % 64;
|
||||||
|
for i in 0..$n_words {
|
||||||
|
// Shift
|
||||||
|
if i + word_shift < $n_words {
|
||||||
|
ret[i + word_shift] += original[i] << bit_shift;
|
||||||
|
}
|
||||||
|
// Carry
|
||||||
|
if bit_shift > 0 && i + word_shift + 1 < $n_words {
|
||||||
|
ret[i + word_shift + 1] += original[i] >> (64 - bit_shift);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Detecting overflow
|
||||||
|
let last = $n_words - word_shift - if bit_shift > 0 { 1 } else { 0 };
|
||||||
|
let overflow = if bit_shift > 0 {
|
||||||
|
(original[last] >> (64 - bit_shift)) > 0
|
||||||
|
} else if word_shift > 0 {
|
||||||
|
original[last] > 0
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
for i in last+1..$n_words-1 {
|
||||||
|
if original[i] > 0 {
|
||||||
|
return ($name(ret), true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
($name(ret), overflow)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl $name {
|
impl $name {
|
||||||
@ -390,105 +508,6 @@ macro_rules! construct_uint {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OverflowingOps for $name {
|
|
||||||
fn overflowing_add(self, other: $name) -> ($name, bool) {
|
|
||||||
let $name(ref me) = self;
|
|
||||||
let $name(ref you) = other;
|
|
||||||
let mut ret = [0u64; $n_words];
|
|
||||||
let mut carry = [0u64; $n_words];
|
|
||||||
let mut b_carry = false;
|
|
||||||
let mut overflow = false;
|
|
||||||
|
|
||||||
for i in 0..$n_words {
|
|
||||||
ret[i] = me[i].wrapping_add(you[i]);
|
|
||||||
|
|
||||||
if ret[i] < me[i] {
|
|
||||||
if i < $n_words - 1 {
|
|
||||||
carry[i + 1] = 1;
|
|
||||||
b_carry = true;
|
|
||||||
} else {
|
|
||||||
overflow = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if b_carry {
|
|
||||||
let ret = overflowing!($name(ret).overflowing_add($name(carry)), overflow);
|
|
||||||
(ret, overflow)
|
|
||||||
} else {
|
|
||||||
($name(ret), overflow)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_sub(self, other: $name) -> ($name, bool) {
|
|
||||||
let res = overflowing!((!other).overflowing_add(From::from(1u64)));
|
|
||||||
let res = overflowing!(self.overflowing_add(res));
|
|
||||||
(res, self < other)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_mul(self, other: $name) -> ($name, bool) {
|
|
||||||
let mut res = $name::from(0u64);
|
|
||||||
let mut overflow = false;
|
|
||||||
// TODO: be more efficient about this
|
|
||||||
for i in 0..(2 * $n_words) {
|
|
||||||
let v = overflowing!(self.overflowing_mul_u32((other >> (32 * i)).low_u32()), overflow);
|
|
||||||
let res2 = overflowing!(v.overflowing_shl(32 * i as u32), overflow);
|
|
||||||
res = overflowing!(res.overflowing_add(res2), overflow);
|
|
||||||
}
|
|
||||||
(res, overflow)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_div(self, other: $name) -> ($name, bool) {
|
|
||||||
(self / other, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_rem(self, other: $name) -> ($name, bool) {
|
|
||||||
(self % other, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_neg(self) -> ($name, bool) {
|
|
||||||
(!self, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_shl(self, shift32: u32) -> ($name, bool) {
|
|
||||||
let $name(ref original) = self;
|
|
||||||
let mut ret = [0u64; $n_words];
|
|
||||||
let shift = shift32 as usize;
|
|
||||||
let word_shift = shift / 64;
|
|
||||||
let bit_shift = shift % 64;
|
|
||||||
for i in 0..$n_words {
|
|
||||||
// Shift
|
|
||||||
if i + word_shift < $n_words {
|
|
||||||
ret[i + word_shift] += original[i] << bit_shift;
|
|
||||||
}
|
|
||||||
// Carry
|
|
||||||
if bit_shift > 0 && i + word_shift + 1 < $n_words {
|
|
||||||
ret[i + word_shift + 1] += original[i] >> (64 - bit_shift);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Detecting overflow
|
|
||||||
let last = $n_words - word_shift - if bit_shift > 0 { 1 } else { 0 };
|
|
||||||
let overflow = if bit_shift > 0 {
|
|
||||||
(original[last] >> (64 - bit_shift)) > 0
|
|
||||||
} else if word_shift > 0 {
|
|
||||||
original[last] > 0
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
for i in last+1..$n_words-1 {
|
|
||||||
if original[i] > 0 {
|
|
||||||
return ($name(ret), true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
($name(ret), overflow)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn overflowing_shr(self, _shift32: u32) -> ($name, bool) {
|
|
||||||
// TODO [todr] not used for now
|
|
||||||
unimplemented!();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Add<$name> for $name {
|
impl Add<$name> for $name {
|
||||||
type Output = $name;
|
type Output = $name;
|
||||||
|
|
||||||
@ -907,15 +926,17 @@ impl From<U256> for u32 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub const ZERO_U256: U256 = U256([0x00u64; 4]);
|
pub const ZERO_U256: U256 = U256([0x00u64; 4]);
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]);
|
pub const ONE_U256: U256 = U256([0x01u64, 0x00u64, 0x00u64, 0x00u64]);
|
||||||
|
/// TODO [Gav Wood] Please document me
|
||||||
pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]);
|
pub const BAD_U256: U256 = U256([0xffffffffffffffffu64; 4]);
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use uint::{Uint, U128, U256, U512};
|
use uint::{Uint, U128, U256, U512};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::num::wrapping::OverflowingOps;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn assign_ops() {
|
pub fn assign_ops() {
|
||||||
@ -1297,28 +1318,6 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[ignore]
|
|
||||||
#[test]
|
|
||||||
pub fn uint256_shr_overflow() {
|
|
||||||
assert_eq!(
|
|
||||||
U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap()
|
|
||||||
.overflowing_shr(4),
|
|
||||||
(U256::from_str("0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap(), true)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[ignore]
|
|
||||||
#[test]
|
|
||||||
pub fn uint256_shr_overflow2() {
|
|
||||||
assert_eq!(
|
|
||||||
U256::from_str("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0").unwrap()
|
|
||||||
.overflowing_shr(4),
|
|
||||||
(U256::from_str("0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap(), false)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn uint256_mul() {
|
pub fn uint256_mul() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
|
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
|
||||||
|
/// TODO [debris] Please document me
|
||||||
pub trait InsertSlice<T> {
|
pub trait InsertSlice<T> {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn insert_slice(&mut self, index: usize, elements: &[T]);
|
fn insert_slice(&mut self, index: usize, elements: &[T]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,6 +49,7 @@ impl<T> InsertSlice<T> for Vec<T> {
|
|||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
pub trait SharedPrefix <T> {
|
pub trait SharedPrefix <T> {
|
||||||
|
/// TODO [debris] Please document me
|
||||||
fn shared_prefix_len(&self, elem: &[T]) -> usize;
|
fn shared_prefix_len(&self, elem: &[T]) -> usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user