Merge branch 'lightcli' into light-filters
This commit is contained in:
commit
8c715e025a
@ -448,8 +448,8 @@ windows:
|
|||||||
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
|
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
|
||||||
- target\release\parity.exe tools hash target\release\parity.exe > parity.sha3
|
- target\release\parity.exe tools hash target\release\parity.exe > parity.sha3
|
||||||
- set /P SHA3=<parity.sha3
|
- set /P SHA3=<parity.sha3
|
||||||
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
||||||
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
||||||
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
|
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
|
||||||
- signtool sign /f %keyfile% /p %certpass% windows\ptray\x64\release\ptray.exe
|
- signtool sign /f %keyfile% /p %certpass% windows\ptray\x64\release\ptray.exe
|
||||||
- cd nsis
|
- cd nsis
|
||||||
|
445
Cargo.lock
generated
445
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -8,16 +8,16 @@ build = "build.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.4"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
docopt = "0.6"
|
docopt = "0.7"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
num_cpus = "1.2"
|
num_cpus = "1.2"
|
||||||
number_prefix = "0.2"
|
number_prefix = "0.2"
|
||||||
rpassword = "0.2.1"
|
rpassword = "0.2.1"
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
ansi_term = "0.9"
|
ansi_term = "0.9"
|
||||||
regex = "0.1"
|
regex = "0.2"
|
||||||
isatty = "0.1"
|
isatty = "0.1"
|
||||||
toml = "0.2"
|
toml = "0.2"
|
||||||
serde = "0.9"
|
serde = "0.9"
|
||||||
|
12
README.md
12
README.md
@ -13,14 +13,14 @@ parity.js [![Join the chat at https://gitter.im/ethcore/parity.js](https://badge
|
|||||||
|
|
||||||
Be sure to check out [our wiki][wiki-url] for more information.
|
Be sure to check out [our wiki][wiki-url] for more information.
|
||||||
|
|
||||||
[coveralls-image]: https://coveralls.io/repos/github/ethcore/parity/badge.svg?branch=master
|
[coveralls-image]: https://coveralls.io/repos/github/paritytech/parity/badge.svg?branch=master
|
||||||
[coveralls-url]: https://coveralls.io/github/ethcore/parity?branch=master
|
[coveralls-url]: https://coveralls.io/github/paritytech/parity?branch=master
|
||||||
[gitter-image]: https://badges.gitter.im/Join%20Chat.svg
|
[gitter-image]: https://badges.gitter.im/Join%20Chat.svg
|
||||||
[gitter-url]: https://gitter.im/ethcore/parity?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
[gitter-url]: https://gitter.im/ethcore/parity?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||||
[license-image]: https://img.shields.io/badge/license-GPL%20v3-green.svg
|
[license-image]: https://img.shields.io/badge/license-GPL%20v3-green.svg
|
||||||
[license-url]: https://www.gnu.org/licenses/gpl-3.0.en.html
|
[license-url]: https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||||
[doc-url]: https://ethcore.github.io/parity/ethcore/index.html
|
[doc-url]: https://paritytech.github.io/parity/ethcore/index.html
|
||||||
[wiki-url]: https://github.com/ethcore/parity/wiki
|
[wiki-url]: https://github.com/paritytech/parity/wiki
|
||||||
|
|
||||||
**Parity requires Rust version 1.15.0 to build**
|
**Parity requires Rust version 1.15.0 to build**
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ Once you have rustup, install parity or download and build from source
|
|||||||
## Quick install
|
## Quick install
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cargo install --git https://github.com/ethcore/parity.git parity
|
cargo install --git https://github.com/paritytech/parity.git parity
|
||||||
```
|
```
|
||||||
|
|
||||||
----
|
----
|
||||||
@ -92,7 +92,7 @@ cargo install --git https://github.com/ethcore/parity.git parity
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# download Parity code
|
# download Parity code
|
||||||
$ git clone https://github.com/ethcore/parity
|
$ git clone https://github.com/paritytech/parity
|
||||||
$ cd parity
|
$ cd parity
|
||||||
|
|
||||||
# build in release mode
|
# build in release mode
|
||||||
|
@ -9,7 +9,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base32 = "0.3"
|
base32 = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.4"
|
||||||
futures = "0.1"
|
futures = "0.1"
|
||||||
linked-hash-map = "0.3"
|
linked-hash-map = "0.3"
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
|
@ -6,7 +6,7 @@ Code generator to simplify creating a built-in Parity Dapp
|
|||||||
1. Clone this repository.
|
1. Clone this repository.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git clone https://github.com/ethcore/parity.git
|
$ git clone https://github.com/paritytech/parity.git
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Create a new directory for your Dapp. (`./myapp`)
|
1. Create a new directory for your Dapp. (`./myapp`)
|
||||||
@ -29,10 +29,10 @@ Code generator to simplify creating a built-in Parity Dapp
|
|||||||
|
|
||||||
The `inject.js` script will create global `web3` instance with proper provider that should be used by your dapp.
|
The `inject.js` script will create global `web3` instance with proper provider that should be used by your dapp.
|
||||||
|
|
||||||
1. Create `./parity/dapps/myapp/Cargo.toml` with you apps details. See example here: [parity-status Cargo.toml](https://github.com/ethcore/parity-ui/blob/master/status/Cargo.toml).
|
1. Create `./parity/dapps/myapp/Cargo.toml` with you apps details. See example here: [parity-status Cargo.toml](https://github.com/paritytech/parity-ui/blob/master/status/Cargo.toml).
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git clone https://github.com/ethcore/parity-ui.git
|
$ git clone https://github.com/paritytech/parity-ui.git
|
||||||
$ cd ./parity-ui/
|
$ cd ./parity-ui/
|
||||||
$ cp ./home/Cargo.toml ../parity/dapps/myapp/Cargo.toml
|
$ cp ./home/Cargo.toml ../parity/dapps/myapp/Cargo.toml
|
||||||
$ cp ./home/build.rs ../parity/dapps/myapp/build.rs
|
$ cp ./home/build.rs ../parity/dapps/myapp/build.rs
|
||||||
|
@ -67,7 +67,7 @@ pub fn init_server<F, B>(process: F, io: MetaIoHandler<Metadata>, remote: Remote
|
|||||||
let mut dapps_path = env::temp_dir();
|
let mut dapps_path = env::temp_dir();
|
||||||
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
|
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
|
||||||
|
|
||||||
// TODO [ToDr] When https://github.com/ethcore/jsonrpc/issues/26 is resolved
|
// TODO [ToDr] When https://github.com/paritytech/jsonrpc/issues/26 is resolved
|
||||||
// this additional EventLoop wouldn't be needed, we should be able to re-use remote.
|
// this additional EventLoop wouldn't be needed, we should be able to re-use remote.
|
||||||
let event_loop = EventLoop::spawn();
|
let event_loop = EventLoop::spawn();
|
||||||
let server = process(ServerBuilder::new(
|
let server = process(ServerBuilder::new(
|
||||||
|
@ -12,7 +12,7 @@ rustc_version = "0.1"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
parity-ui-dev = { path = "../../js", optional = true }
|
parity-ui-dev = { path = "../../js", optional = true }
|
||||||
# This is managed by the js/scripts/release.sh script on CI - keep it in a single line
|
# This is managed by the js/scripts/release.sh script on CI - keep it in a single line
|
||||||
parity-ui-precompiled = { git = "https://github.com/ethcore/js-precompiled.git", optional = true, branch = "master" }
|
parity-ui-precompiled = { git = "https://github.com/paritytech/js-precompiled.git", optional = true, branch = "master" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
no-precompiled-js = ["parity-ui-dev"]
|
no-precompiled-js = ["parity-ui-dev"]
|
||||||
|
@ -18,7 +18,7 @@ cargo -V && \
|
|||||||
gcc -v &&\
|
gcc -v &&\
|
||||||
g++ -v
|
g++ -v
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity&&\
|
cd parity&&\
|
||||||
git checkout beta && \
|
git checkout beta && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -48,7 +48,7 @@ RUN apt-get update && \
|
|||||||
# show backtraces
|
# show backtraces
|
||||||
RUST_BACKTRACE=1 && \
|
RUST_BACKTRACE=1 && \
|
||||||
# build parity
|
# build parity
|
||||||
cd /build&&git clone https://github.com/ethcore/parity && \
|
cd /build&&git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git pull&& \
|
git pull&& \
|
||||||
git checkout $BUILD_TAG && \
|
git checkout $BUILD_TAG && \
|
||||||
|
@ -28,7 +28,7 @@ ENV RUST_BACKTRACE 1
|
|||||||
cargo -V
|
cargo -V
|
||||||
|
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git checkout beta && \
|
git checkout beta && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -29,7 +29,7 @@ ENV RUST_BACKTRACE 1
|
|||||||
cargo -V
|
cargo -V
|
||||||
|
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git checkout beta && \
|
git checkout beta && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -45,7 +45,7 @@ gcc -v &&\
|
|||||||
g++ -v
|
g++ -v
|
||||||
|
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git checkout beta && \
|
git checkout beta && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -26,7 +26,7 @@ gcc -v &&\
|
|||||||
g++ -v
|
g++ -v
|
||||||
|
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git checkout stable && \
|
git checkout stable && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -26,7 +26,7 @@ gcc -v &&\
|
|||||||
g++ -v
|
g++ -v
|
||||||
|
|
||||||
# build parity
|
# build parity
|
||||||
RUN git clone https://github.com/ethcore/parity && \
|
RUN git clone https://github.com/paritytech/parity && \
|
||||||
cd parity && \
|
cd parity && \
|
||||||
git checkout beta && \
|
git checkout beta && \
|
||||||
git pull && \
|
git pull && \
|
||||||
|
@ -9,4 +9,4 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
log = "0.3"
|
log = "0.3"
|
||||||
sha3 = { path = "../util/sha3" }
|
sha3 = { path = "../util/sha3" }
|
||||||
primal = "0.2.3"
|
primal = "0.2.3"
|
||||||
parking_lot = "0.3"
|
parking_lot = "0.4"
|
||||||
|
@ -12,14 +12,14 @@ build = "build.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.4"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
rust-crypto = "0.2.34"
|
rust-crypto = "0.2.34"
|
||||||
num_cpus = "1.2"
|
num_cpus = "1.2"
|
||||||
crossbeam = "0.2.9"
|
crossbeam = "0.2.9"
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
bloomchain = "0.1"
|
bloomchain = "0.1"
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
bit-set = "0.4"
|
bit-set = "0.4"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
rand = "0.3"
|
rand = "0.3"
|
||||||
@ -27,6 +27,7 @@ byteorder = "1.0"
|
|||||||
transient-hashmap = "0.4"
|
transient-hashmap = "0.4"
|
||||||
linked-hash-map = "0.3.0"
|
linked-hash-map = "0.3.0"
|
||||||
lru-cache = "0.1.0"
|
lru-cache = "0.1.0"
|
||||||
|
itertools = "0.5"
|
||||||
ethabi = "1.0.0"
|
ethabi = "1.0.0"
|
||||||
evmjit = { path = "../evmjit", optional = true }
|
evmjit = { path = "../evmjit", optional = true }
|
||||||
clippy = { version = "0.0.103", optional = true}
|
clippy = { version = "0.0.103", optional = true}
|
||||||
@ -47,6 +48,7 @@ ethcore-logger = { path = "../logger" }
|
|||||||
stats = { path = "../util/stats" }
|
stats = { path = "../util/stats" }
|
||||||
hyper = { git = "https://github.com/paritytech/hyper", default-features = false }
|
hyper = { git = "https://github.com/paritytech/hyper", default-features = false }
|
||||||
num = "0.1"
|
num = "0.1"
|
||||||
|
bn = { git = "https://github.com/paritytech/bn" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jit = ["evmjit"]
|
jit = ["evmjit"]
|
||||||
|
@ -109,7 +109,7 @@ impl Encodable for CostTable {
|
|||||||
fn append_cost(s: &mut RlpStream, cost: &U256, kind: request::Kind) {
|
fn append_cost(s: &mut RlpStream, cost: &U256, kind: request::Kind) {
|
||||||
s.begin_list(2);
|
s.begin_list(2);
|
||||||
|
|
||||||
// hack around https://github.com/ethcore/parity/issues/4356
|
// hack around https://github.com/paritytech/parity/issues/4356
|
||||||
Encodable::rlp_append(&kind, s);
|
Encodable::rlp_append(&kind, s);
|
||||||
s.append(cost);
|
s.append(cost);
|
||||||
}
|
}
|
||||||
|
@ -278,7 +278,7 @@ impl Encodable for Request {
|
|||||||
fn rlp_append(&self, s: &mut RlpStream) {
|
fn rlp_append(&self, s: &mut RlpStream) {
|
||||||
s.begin_list(2);
|
s.begin_list(2);
|
||||||
|
|
||||||
// hack around https://github.com/ethcore/parity/issues/4356
|
// hack around https://github.com/paritytech/parity/issues/4356
|
||||||
Encodable::rlp_append(&self.kind(), s);
|
Encodable::rlp_append(&self.kind(), s);
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
@ -470,7 +470,7 @@ impl Encodable for Response {
|
|||||||
fn rlp_append(&self, s: &mut RlpStream) {
|
fn rlp_append(&self, s: &mut RlpStream) {
|
||||||
s.begin_list(2);
|
s.begin_list(2);
|
||||||
|
|
||||||
// hack around https://github.com/ethcore/parity/issues/4356
|
// hack around https://github.com/paritytech/parity/issues/4356
|
||||||
Encodable::rlp_append(&self.kind(), s);
|
Encodable::rlp_append(&self.kind(), s);
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -53,7 +53,9 @@
|
|||||||
"enode://5fbfb426fbb46f8b8c1bd3dd140f5b511da558cd37d60844b525909ab82e13a25ee722293c829e52cb65c2305b1637fa9a2ea4d6634a224d5f400bfe244ac0de@162.243.55.45:30303",
|
"enode://5fbfb426fbb46f8b8c1bd3dd140f5b511da558cd37d60844b525909ab82e13a25ee722293c829e52cb65c2305b1637fa9a2ea4d6634a224d5f400bfe244ac0de@162.243.55.45:30303",
|
||||||
"enode://42d8f29d1db5f4b2947cd5c3d76c6d0d3697e6b9b3430c3d41e46b4bb77655433aeedc25d4b4ea9d8214b6a43008ba67199374a9b53633301bca0cd20c6928ab@104.155.176.151:30303",
|
"enode://42d8f29d1db5f4b2947cd5c3d76c6d0d3697e6b9b3430c3d41e46b4bb77655433aeedc25d4b4ea9d8214b6a43008ba67199374a9b53633301bca0cd20c6928ab@104.155.176.151:30303",
|
||||||
"enode://814920f1ec9510aa9ea1c8f79d8b6e6a462045f09caa2ae4055b0f34f7416fca6facd3dd45f1cf1673c0209e0503f02776b8ff94020e98b6679a0dc561b4eba0@104.154.136.117:30303",
|
"enode://814920f1ec9510aa9ea1c8f79d8b6e6a462045f09caa2ae4055b0f34f7416fca6facd3dd45f1cf1673c0209e0503f02776b8ff94020e98b6679a0dc561b4eba0@104.154.136.117:30303",
|
||||||
"enode://72e445f4e89c0f476d404bc40478b0df83a5b500d2d2e850e08eb1af0cd464ab86db6160d0fde64bd77d5f0d33507ae19035671b3c74fec126d6e28787669740@104.198.71.200:30303"
|
"enode://72e445f4e89c0f476d404bc40478b0df83a5b500d2d2e850e08eb1af0cd464ab86db6160d0fde64bd77d5f0d33507ae19035671b3c74fec126d6e28787669740@104.198.71.200:30303",
|
||||||
|
"enode://39abab9d2a41f53298c0c9dc6bbca57b0840c3ba9dccf42aa27316addc1b7e56ade32a0a9f7f52d6c5db4fe74d8824bcedfeaecf1a4e533cacb71cf8100a9442@144.76.238.49:30303",
|
||||||
|
"enode://f50e675a34f471af2438b921914b5f06499c7438f3146f6b8936f1faeb50b8a91d0d0c24fb05a66f05865cd58c24da3e664d0def806172ddd0d4c5bdbf37747e@144.76.238.49:30306"
|
||||||
],
|
],
|
||||||
"accounts": {
|
"accounts": {
|
||||||
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
||||||
|
@ -190,6 +190,8 @@
|
|||||||
"0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
|
"0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
|
||||||
"0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
|
"0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
|
||||||
"0000000000000000000000000000000000000005": { "builtin": { "name": "modexp", "activate_at": "0x7fffffffffffff", "pricing": { "modexp": { "divisor": 20 } } } },
|
"0000000000000000000000000000000000000005": { "builtin": { "name": "modexp", "activate_at": "0x7fffffffffffff", "pricing": { "modexp": { "divisor": 20 } } } },
|
||||||
|
"0000000000000000000000000000000000000006": { "builtin": { "name": "bn128_add", "activate_at": "0x7fffffffffffff", "pricing": { "linear": { "base": 999999, "word": 0 } } } },
|
||||||
|
"0000000000000000000000000000000000000007": { "builtin": { "name": "bn128_mul", "activate_at": "0x7fffffffffffff", "pricing": { "linear": { "base": 999999, "word": 0 } } } },
|
||||||
"3282791d6fd713f1e94f4bfd565eaa78b3a0599d": {
|
"3282791d6fd713f1e94f4bfd565eaa78b3a0599d": {
|
||||||
"balance": "1337000000000000000000"
|
"balance": "1337000000000000000000"
|
||||||
},
|
},
|
||||||
|
@ -23,7 +23,9 @@
|
|||||||
|
|
||||||
"0x00a0a24b9f0e5ec7aa4c7389b8302fd0123194de"
|
"0x00a0a24b9f0e5ec7aa4c7389b8302fd0123194de"
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"validateScoreTransition": 1000000,
|
||||||
|
"eip155Transition": 1000000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -31,7 +33,7 @@
|
|||||||
"maximumExtraDataSize": "0x20",
|
"maximumExtraDataSize": "0x20",
|
||||||
"minGasLimit": "0x1388",
|
"minGasLimit": "0x1388",
|
||||||
"networkID" : "0x2A",
|
"networkID" : "0x2A",
|
||||||
"validateReceipts" : false
|
"validateReceiptsTransition" : 1000000
|
||||||
},
|
},
|
||||||
"genesis": {
|
"genesis": {
|
||||||
"seal": {
|
"seal": {
|
||||||
|
@ -25,8 +25,8 @@
|
|||||||
"maximumExtraDataSize": "0x20",
|
"maximumExtraDataSize": "0x20",
|
||||||
"minGasLimit": "0x1388",
|
"minGasLimit": "0x1388",
|
||||||
"networkID" : "0x3",
|
"networkID" : "0x3",
|
||||||
"forkBlock": 333922,
|
"forkBlock": 641350,
|
||||||
"forkCanonHash": "0x8737eb141d4f05db57af63fc8d3b4d4d8f9cddb0c4e1ab855de8c288fdc1924f",
|
"forkCanonHash": "0x8033403e9fe5811a7b6d6b469905915de1c59207ce2172cbcf5d6ff14fa6a2eb",
|
||||||
"eip98Transition": "0x7fffffffffffff"
|
"eip98Transition": "0x7fffffffffffff"
|
||||||
},
|
},
|
||||||
"genesis": {
|
"genesis": {
|
||||||
@ -44,11 +44,8 @@
|
|||||||
"gasLimit": "0x1000000"
|
"gasLimit": "0x1000000"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
"enode://a22f0977ce02653bf95e38730106356342df48b5222e2c2a1a6f9ef34769bf593bae9ca0a888cf60839edd52efc1b6e393c63a57d76f4c4fe14e641f1f9e637e@128.199.55.137:30303",
|
"enode://20c9ad97c081d63397d7b685a412227a40e23c8bdc6688c6f37e97cfbc22d2b4d1db1510d8f61e6a8866ad7f0e17c02b14182d37ea7c3c8b9c2683aeb6b733a1@52.169.14.227:30303",
|
||||||
"enode://012239fccf3ff1d92b036983a430cb6705c6528c96c0354413f8854802138e5135c084ab36e7c54efb621c46728df8c3a6f4c1db9bb48a1330efe3f82f2dd7a6@52.169.94.142:30303",
|
"enode://6ce05930c72abc632c58e2e4324f7c7ea478cec0ed4fa2528982cf34483094e9cbc9216e7aa349691242576d552a2a56aaeae426c5303ded677ce455ba1acd9d@13.84.180.240:30303"
|
||||||
"enode://1462682e4b7ba2258346d55e25e5b9d264b0db40cee12bdfba4e72b1d7050350ea954c006e9106dd96a128e6e0bd6dffb17eed51f9f99bf7f9cdadfeaf8da4ff@51.15.61.253:30303",
|
|
||||||
"enode://98fbb020c799ae39a828bd75dc2bd5d4721539faf317076b275f91182a5c8900b592e8abfdddceae674a7c3bb40ea00a6ca9ccb7805ab58c4b7b29c61c8f7239@51.15.62.44:30303",
|
|
||||||
"enode://d801dd4e3d15a8bf785931add164bd9c313e3f6b5749d9302b311f2b48064cba5c86c32b1302c27cd983fc89ae07d4d306dd1197610835b8782e95dfb1b3f9ea@51.15.43.255:30303"
|
|
||||||
],
|
],
|
||||||
"accounts": {
|
"accounts": {
|
||||||
"0000000000000000000000000000000000000001": { "balance": "1", "nonce": "0", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
"0000000000000000000000000000000000000001": { "balance": "1", "nonce": "0", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
||||||
|
@ -492,6 +492,16 @@ impl LockedBlock {
|
|||||||
_ => Ok(SealedBlock { block: s.block, uncle_bytes: s.uncle_bytes }),
|
_ => Ok(SealedBlock { block: s.block, uncle_bytes: s.uncle_bytes }),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove state root from transaction receipts to make them EIP-98 compatible.
|
||||||
|
pub fn strip_receipts(self) -> LockedBlock {
|
||||||
|
let mut block = self;
|
||||||
|
for receipt in &mut block.block.receipts {
|
||||||
|
receipt.state_root = None;
|
||||||
|
}
|
||||||
|
block.block.header.set_receipts_root(ordered_trie_root(block.block.receipts.iter().map(|r| r.rlp_bytes().to_vec())));
|
||||||
|
block
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drain for LockedBlock {
|
impl Drain for LockedBlock {
|
||||||
|
@ -27,10 +27,19 @@ use util::{U256, H256, Uint, Hashable, BytesRef};
|
|||||||
use ethkey::{Signature, recover as ec_recover};
|
use ethkey::{Signature, recover as ec_recover};
|
||||||
use ethjson;
|
use ethjson;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Error(pub &'static str);
|
||||||
|
|
||||||
|
impl From<&'static str> for Error {
|
||||||
|
fn from(val: &'static str) -> Self {
|
||||||
|
Error(val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Native implementation of a built-in contract.
|
/// Native implementation of a built-in contract.
|
||||||
pub trait Impl: Send + Sync {
|
pub trait Impl: Send + Sync {
|
||||||
/// execute this built-in on the given input, writing to the given output.
|
/// execute this built-in on the given input, writing to the given output.
|
||||||
fn execute(&self, input: &[u8], output: &mut BytesRef);
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A gas pricing scheme for built-in contracts.
|
/// A gas pricing scheme for built-in contracts.
|
||||||
@ -102,7 +111,9 @@ impl Builtin {
|
|||||||
pub fn cost(&self, input: &[u8]) -> U256 { self.pricer.cost(input) }
|
pub fn cost(&self, input: &[u8]) -> U256 { self.pricer.cost(input) }
|
||||||
|
|
||||||
/// Simple forwarder for execute.
|
/// Simple forwarder for execute.
|
||||||
pub fn execute(&self, input: &[u8], output: &mut BytesRef) { self.native.execute(input, output) }
|
pub fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
|
self.native.execute(input, output)
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether the builtin is activated at the given block number.
|
/// Whether the builtin is activated at the given block number.
|
||||||
pub fn is_active(&self, at: u64) -> bool { at >= self.activate_at }
|
pub fn is_active(&self, at: u64) -> bool { at >= self.activate_at }
|
||||||
@ -145,6 +156,8 @@ fn ethereum_builtin(name: &str) -> Box<Impl> {
|
|||||||
"sha256" => Box::new(Sha256) as Box<Impl>,
|
"sha256" => Box::new(Sha256) as Box<Impl>,
|
||||||
"ripemd160" => Box::new(Ripemd160) as Box<Impl>,
|
"ripemd160" => Box::new(Ripemd160) as Box<Impl>,
|
||||||
"modexp" => Box::new(ModexpImpl) as Box<Impl>,
|
"modexp" => Box::new(ModexpImpl) as Box<Impl>,
|
||||||
|
"bn128_add" => Box::new(Bn128AddImpl) as Box<Impl>,
|
||||||
|
"bn128_mul" => Box::new(Bn128MulImpl) as Box<Impl>,
|
||||||
_ => panic!("invalid builtin name: {}", name),
|
_ => panic!("invalid builtin name: {}", name),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -172,14 +185,21 @@ struct Ripemd160;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ModexpImpl;
|
struct ModexpImpl;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Bn128AddImpl;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Bn128MulImpl;
|
||||||
|
|
||||||
impl Impl for Identity {
|
impl Impl for Identity {
|
||||||
fn execute(&self, input: &[u8], output: &mut BytesRef) {
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
output.write(0, input);
|
output.write(0, input);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Impl for EcRecover {
|
impl Impl for EcRecover {
|
||||||
fn execute(&self, i: &[u8], output: &mut BytesRef) {
|
fn execute(&self, i: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
let len = min(i.len(), 128);
|
let len = min(i.len(), 128);
|
||||||
|
|
||||||
let mut input = [0; 128];
|
let mut input = [0; 128];
|
||||||
@ -192,7 +212,7 @@ impl Impl for EcRecover {
|
|||||||
|
|
||||||
let bit = match v[31] {
|
let bit = match v[31] {
|
||||||
27 | 28 if &v.0[..31] == &[0; 31] => v[31] - 27,
|
27 | 28 if &v.0[..31] == &[0; 31] => v[31] - 27,
|
||||||
_ => return,
|
_ => { return Ok(()); },
|
||||||
};
|
};
|
||||||
|
|
||||||
let s = Signature::from_rsv(&r, &s, bit);
|
let s = Signature::from_rsv(&r, &s, bit);
|
||||||
@ -203,11 +223,13 @@ impl Impl for EcRecover {
|
|||||||
output.write(12, &r[12..r.len()]);
|
output.write(12, &r[12..r.len()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Impl for Sha256 {
|
impl Impl for Sha256 {
|
||||||
fn execute(&self, input: &[u8], output: &mut BytesRef) {
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
let mut sha = Sha256Digest::new();
|
let mut sha = Sha256Digest::new();
|
||||||
sha.input(input);
|
sha.input(input);
|
||||||
|
|
||||||
@ -215,11 +237,13 @@ impl Impl for Sha256 {
|
|||||||
sha.result(&mut out);
|
sha.result(&mut out);
|
||||||
|
|
||||||
output.write(0, &out);
|
output.write(0, &out);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Impl for Ripemd160 {
|
impl Impl for Ripemd160 {
|
||||||
fn execute(&self, input: &[u8], output: &mut BytesRef) {
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
let mut sha = Ripemd160Digest::new();
|
let mut sha = Ripemd160Digest::new();
|
||||||
sha.input(input);
|
sha.input(input);
|
||||||
|
|
||||||
@ -227,11 +251,13 @@ impl Impl for Ripemd160 {
|
|||||||
sha.result(&mut out[12..32]);
|
sha.result(&mut out[12..32]);
|
||||||
|
|
||||||
output.write(0, &out);
|
output.write(0, &out);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Impl for ModexpImpl {
|
impl Impl for ModexpImpl {
|
||||||
fn execute(&self, input: &[u8], output: &mut BytesRef) {
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
let mut reader = input.chain(io::repeat(0));
|
let mut reader = input.chain(io::repeat(0));
|
||||||
let mut buf = [0; 32];
|
let mut buf = [0; 32];
|
||||||
|
|
||||||
@ -294,6 +320,76 @@ impl Impl for ModexpImpl {
|
|||||||
let res_start = mod_len - bytes.len();
|
let res_start = mod_len - bytes.len();
|
||||||
output.write(res_start, &bytes);
|
output.write(res_start, &bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_fr(reader: &mut io::Chain<&[u8], io::Repeat>) -> Result<::bn::Fr, Error> {
|
||||||
|
let mut buf = [0u8; 32];
|
||||||
|
|
||||||
|
reader.read_exact(&mut buf[..]).expect("reading from zero-extended memory cannot fail; qed");
|
||||||
|
::bn::Fr::from_slice(&buf[0..32]).map_err(|_| Error::from("Invalid field element"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_point(reader: &mut io::Chain<&[u8], io::Repeat>) -> Result<::bn::G1, Error> {
|
||||||
|
use bn::{Fq, AffineG1, G1, Group};
|
||||||
|
|
||||||
|
let mut buf = [0u8; 32];
|
||||||
|
|
||||||
|
reader.read_exact(&mut buf[..]).expect("reading from zero-extended memory cannot fail; qed");
|
||||||
|
let px = Fq::from_slice(&buf[0..32]).map_err(|_| Error::from("Invalid point x coordinate"))?;
|
||||||
|
|
||||||
|
reader.read_exact(&mut buf[..]).expect("reading from zero-extended memory cannot fail; qed");
|
||||||
|
let py = Fq::from_slice(&buf[0..32]).map_err(|_| Error::from("Invalid point x coordinate"))?;
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
if px == Fq::zero() && py == Fq::zero() {
|
||||||
|
G1::zero()
|
||||||
|
} else {
|
||||||
|
AffineG1::new(px, py).map_err(|_| Error::from("Invalid curve point"))?.into()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Impl for Bn128AddImpl {
|
||||||
|
// Can fail if any of the 2 points does not belong the bn128 curve
|
||||||
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
|
use bn::AffineG1;
|
||||||
|
|
||||||
|
let mut padded_input = input.chain(io::repeat(0));
|
||||||
|
let p1 = read_point(&mut padded_input)?;
|
||||||
|
let p2 = read_point(&mut padded_input)?;
|
||||||
|
|
||||||
|
let mut write_buf = [0u8; 64];
|
||||||
|
if let Some(sum) = AffineG1::from_jacobian(p1 + p2) {
|
||||||
|
// point not at infinity
|
||||||
|
sum.x().to_big_endian(&mut write_buf[0..32]).expect("Cannot fail since 0..32 is 32-byte length");
|
||||||
|
sum.y().to_big_endian(&mut write_buf[32..64]).expect("Cannot fail since 32..64 is 32-byte length");;
|
||||||
|
}
|
||||||
|
output.write(0, &write_buf);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Impl for Bn128MulImpl {
|
||||||
|
// Can fail if first paramter (bn128 curve point) does not actually belong to the curve
|
||||||
|
fn execute(&self, input: &[u8], output: &mut BytesRef) -> Result<(), Error> {
|
||||||
|
use bn::AffineG1;
|
||||||
|
|
||||||
|
let mut padded_input = input.chain(io::repeat(0));
|
||||||
|
let p = read_point(&mut padded_input)?;
|
||||||
|
let fr = read_fr(&mut padded_input)?;
|
||||||
|
|
||||||
|
let mut write_buf = [0u8; 64];
|
||||||
|
if let Some(sum) = AffineG1::from_jacobian(p * fr) {
|
||||||
|
// point not at infinity
|
||||||
|
sum.x().to_big_endian(&mut write_buf[0..32]).expect("Cannot fail since 0..32 is 32-byte length");
|
||||||
|
sum.y().to_big_endian(&mut write_buf[32..64]).expect("Cannot fail since 32..64 is 32-byte length");;
|
||||||
|
}
|
||||||
|
output.write(0, &write_buf);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -310,15 +406,15 @@ mod tests {
|
|||||||
let i = [0u8, 1, 2, 3];
|
let i = [0u8, 1, 2, 3];
|
||||||
|
|
||||||
let mut o2 = [255u8; 2];
|
let mut o2 = [255u8; 2];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o2[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o2[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(i[0..2], o2);
|
assert_eq!(i[0..2], o2);
|
||||||
|
|
||||||
let mut o4 = [255u8; 4];
|
let mut o4 = [255u8; 4];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o4[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o4[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(i, o4);
|
assert_eq!(i, o4);
|
||||||
|
|
||||||
let mut o8 = [255u8; 8];
|
let mut o8 = [255u8; 8];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(i, o8[..4]);
|
assert_eq!(i, o8[..4]);
|
||||||
assert_eq!([255u8; 4], o8[4..]);
|
assert_eq!([255u8; 4], o8[4..]);
|
||||||
}
|
}
|
||||||
@ -331,19 +427,19 @@ mod tests {
|
|||||||
let i = [0u8; 0];
|
let i = [0u8; 0];
|
||||||
|
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").unwrap())[..]);
|
||||||
|
|
||||||
let mut o8 = [255u8; 8];
|
let mut o8 = [255u8; 8];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o8[..], &(FromHex::from_hex("e3b0c44298fc1c14").unwrap())[..]);
|
assert_eq!(&o8[..], &(FromHex::from_hex("e3b0c44298fc1c14").unwrap())[..]);
|
||||||
|
|
||||||
let mut o34 = [255u8; 34];
|
let mut o34 = [255u8; 34];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o34[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855ffff").unwrap())[..]);
|
assert_eq!(&o34[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855ffff").unwrap())[..]);
|
||||||
|
|
||||||
let mut ov = vec![];
|
let mut ov = vec![];
|
||||||
f.execute(&i[..], &mut BytesRef::Flexible(&mut ov));
|
f.execute(&i[..], &mut BytesRef::Flexible(&mut ov)).expect("Builtin should not fail");
|
||||||
assert_eq!(&ov[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").unwrap())[..]);
|
assert_eq!(&ov[..], &(FromHex::from_hex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").unwrap())[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -355,15 +451,15 @@ mod tests {
|
|||||||
let i = [0u8; 0];
|
let i = [0u8; 0];
|
||||||
|
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("0000000000000000000000009c1185a5c5e9fc54612808977ee8f548b2258d31").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("0000000000000000000000009c1185a5c5e9fc54612808977ee8f548b2258d31").unwrap())[..]);
|
||||||
|
|
||||||
let mut o8 = [255u8; 8];
|
let mut o8 = [255u8; 8];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o8[..], &(FromHex::from_hex("0000000000000000").unwrap())[..]);
|
assert_eq!(&o8[..], &(FromHex::from_hex("0000000000000000").unwrap())[..]);
|
||||||
|
|
||||||
let mut o34 = [255u8; 34];
|
let mut o34 = [255u8; 34];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o34[..], &(FromHex::from_hex("0000000000000000000000009c1185a5c5e9fc54612808977ee8f548b2258d31ffff").unwrap())[..]);
|
assert_eq!(&o34[..], &(FromHex::from_hex("0000000000000000000000009c1185a5c5e9fc54612808977ee8f548b2258d31ffff").unwrap())[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -383,40 +479,40 @@ mod tests {
|
|||||||
let i = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e03").unwrap();
|
let i = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e03").unwrap();
|
||||||
|
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("000000000000000000000000c08b5542d177ac6686946920409741463a15dddb").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("000000000000000000000000c08b5542d177ac6686946920409741463a15dddb").unwrap())[..]);
|
||||||
|
|
||||||
let mut o8 = [255u8; 8];
|
let mut o8 = [255u8; 8];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o8[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o8[..], &(FromHex::from_hex("0000000000000000").unwrap())[..]);
|
assert_eq!(&o8[..], &(FromHex::from_hex("0000000000000000").unwrap())[..]);
|
||||||
|
|
||||||
let mut o34 = [255u8; 34];
|
let mut o34 = [255u8; 34];
|
||||||
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..]));
|
f.execute(&i[..], &mut BytesRef::Fixed(&mut o34[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o34[..], &(FromHex::from_hex("000000000000000000000000c08b5542d177ac6686946920409741463a15dddbffff").unwrap())[..]);
|
assert_eq!(&o34[..], &(FromHex::from_hex("000000000000000000000000c08b5542d177ac6686946920409741463a15dddbffff").unwrap())[..]);
|
||||||
|
|
||||||
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001a650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e03").unwrap();
|
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001a650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e03").unwrap();
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
||||||
|
|
||||||
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b0000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b0000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
||||||
|
|
||||||
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b").unwrap();
|
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001b").unwrap();
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
||||||
|
|
||||||
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001bffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000000000000000000000000000000000000000000000000000000000001b").unwrap();
|
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001bffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000000000000000000000000000000000000000000000000000000000001b").unwrap();
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
||||||
|
|
||||||
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001bffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap();
|
let i_bad = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001bffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap();
|
||||||
let mut o = [255u8; 32];
|
let mut o = [255u8; 32];
|
||||||
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..]));
|
f.execute(&i_bad[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
assert_eq!(&o[..], &(FromHex::from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap())[..]);
|
||||||
|
|
||||||
// TODO: Should this (corrupted version of the above) fail rather than returning some address?
|
// TODO: Should this (corrupted version of the above) fail rather than returning some address?
|
||||||
@ -450,7 +546,7 @@ mod tests {
|
|||||||
let expected = FromHex::from_hex("0000000000000000000000000000000000000000000000000000000000000001").unwrap();
|
let expected = FromHex::from_hex("0000000000000000000000000000000000000000000000000000000000000001").unwrap();
|
||||||
let expected_cost = 1638;
|
let expected_cost = 1638;
|
||||||
|
|
||||||
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..]));
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(output, expected);
|
assert_eq!(output, expected);
|
||||||
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
||||||
}
|
}
|
||||||
@ -469,7 +565,7 @@ mod tests {
|
|||||||
let expected = FromHex::from_hex("0000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
let expected = FromHex::from_hex("0000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
let expected_cost = 1638;
|
let expected_cost = 1638;
|
||||||
|
|
||||||
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..]));
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(output, expected);
|
assert_eq!(output, expected);
|
||||||
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
||||||
}
|
}
|
||||||
@ -489,7 +585,7 @@ mod tests {
|
|||||||
let expected = FromHex::from_hex("3b01b01ac41f2d6e917c6d6a221ce793802469026d9ab7578fa2e79e4da6aaab").unwrap();
|
let expected = FromHex::from_hex("3b01b01ac41f2d6e917c6d6a221ce793802469026d9ab7578fa2e79e4da6aaab").unwrap();
|
||||||
let expected_cost = 102;
|
let expected_cost = 102;
|
||||||
|
|
||||||
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..]));
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(output, expected);
|
assert_eq!(output, expected);
|
||||||
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
||||||
}
|
}
|
||||||
@ -507,12 +603,118 @@ mod tests {
|
|||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
let expected_cost = 0;
|
let expected_cost = 0;
|
||||||
|
|
||||||
f.execute(&input[..], &mut BytesRef::Flexible(&mut output));
|
f.execute(&input[..], &mut BytesRef::Flexible(&mut output)).expect("Builtin should not fail");
|
||||||
assert_eq!(output.len(), 0); // shouldn't have written any output.
|
assert_eq!(output.len(), 0); // shouldn't have written any output.
|
||||||
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
assert_eq!(f.cost(&input[..]), expected_cost.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bn128_add() {
|
||||||
|
use rustc_serialize::hex::FromHex;
|
||||||
|
|
||||||
|
let f = Builtin {
|
||||||
|
pricer: Box::new(Linear { base: 0, word: 0 }),
|
||||||
|
native: ethereum_builtin("bn128_add"),
|
||||||
|
activate_at: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
// zero-points additions
|
||||||
|
{
|
||||||
|
let input = FromHex::from_hex("\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let mut output = vec![0u8; 64];
|
||||||
|
let expected = FromHex::from_hex("\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
|
assert_eq!(output, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// no input, should not fail
|
||||||
|
{
|
||||||
|
let mut empty = [0u8; 0];
|
||||||
|
let input = BytesRef::Fixed(&mut empty);
|
||||||
|
|
||||||
|
let mut output = vec![0u8; 64];
|
||||||
|
let expected = FromHex::from_hex("\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
|
assert_eq!(output, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
// should fail - point not on curve
|
||||||
|
{
|
||||||
|
let input = FromHex::from_hex("\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let mut output = vec![0u8; 64];
|
||||||
|
|
||||||
|
let res = f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..]));
|
||||||
|
assert!(res.is_err(), "There should be built-in error here");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bn128_mul() {
|
||||||
|
use rustc_serialize::hex::FromHex;
|
||||||
|
|
||||||
|
let f = Builtin {
|
||||||
|
pricer: Box::new(Linear { base: 0, word: 0 }),
|
||||||
|
native: ethereum_builtin("bn128_mul"),
|
||||||
|
activate_at: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
// zero-point multiplication
|
||||||
|
{
|
||||||
|
let input = FromHex::from_hex("\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0200000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let mut output = vec![0u8; 64];
|
||||||
|
let expected = FromHex::from_hex("\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000\
|
||||||
|
0000000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..])).expect("Builtin should not fail");
|
||||||
|
assert_eq!(output, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
// should fail - point not on curve
|
||||||
|
{
|
||||||
|
let input = FromHex::from_hex("\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111\
|
||||||
|
1111111111111111111111111111111111111111111111111111111111111111\
|
||||||
|
0f00000000000000000000000000000000000000000000000000000000000000"
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let mut output = vec![0u8; 64];
|
||||||
|
|
||||||
|
let res = f.execute(&input[..], &mut BytesRef::Fixed(&mut output[..]));
|
||||||
|
assert!(res.is_err(), "There should be built-in error here");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn from_unknown_linear() {
|
fn from_unknown_linear() {
|
||||||
@ -549,7 +751,7 @@ mod tests {
|
|||||||
|
|
||||||
let i = [0u8, 1, 2, 3];
|
let i = [0u8, 1, 2, 3];
|
||||||
let mut o = [255u8; 4];
|
let mut o = [255u8; 4];
|
||||||
b.execute(&i[..], &mut BytesRef::Fixed(&mut o[..]));
|
b.execute(&i[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(i, o);
|
assert_eq!(i, o);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -571,7 +773,7 @@ mod tests {
|
|||||||
|
|
||||||
let i = [0u8, 1, 2, 3];
|
let i = [0u8, 1, 2, 3];
|
||||||
let mut o = [255u8; 4];
|
let mut o = [255u8; 4];
|
||||||
b.execute(&i[..], &mut BytesRef::Fixed(&mut o[..]));
|
b.execute(&i[..], &mut BytesRef::Fixed(&mut o[..])).expect("Builtin should not fail");
|
||||||
assert_eq!(i, o);
|
assert_eq!(i, o);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -388,12 +388,16 @@ impl Client {
|
|||||||
let db = self.state_db.lock().boxed_clone_canon(header.parent_hash());
|
let db = self.state_db.lock().boxed_clone_canon(header.parent_hash());
|
||||||
|
|
||||||
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
|
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
|
||||||
let locked_block = enact_result.map_err(|e| {
|
let mut locked_block = enact_result.map_err(|e| {
|
||||||
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
if header.number() < self.engine().params().validate_receipts_transition && header.receipts_root() != locked_block.block().header().receipts_root() {
|
||||||
|
locked_block = locked_block.strip_receipts();
|
||||||
|
}
|
||||||
|
|
||||||
// Final Verification
|
// Final Verification
|
||||||
if let Err(e) = self.verifier.verify_block_final(header, locked_block.block().header(), self.engine().params().validate_receipts) {
|
if let Err(e) = self.verifier.verify_block_final(header, locked_block.block().header()) {
|
||||||
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
|
@ -27,12 +27,13 @@ use block::*;
|
|||||||
use spec::CommonParams;
|
use spec::CommonParams;
|
||||||
use engines::{Engine, Seal, EngineError};
|
use engines::{Engine, Seal, EngineError};
|
||||||
use header::Header;
|
use header::Header;
|
||||||
use error::{Error, BlockError};
|
use error::{Error, TransactionError, BlockError};
|
||||||
use evm::Schedule;
|
use evm::Schedule;
|
||||||
use ethjson;
|
use ethjson;
|
||||||
use io::{IoContext, IoHandler, TimerToken, IoService};
|
use io::{IoContext, IoHandler, TimerToken, IoService};
|
||||||
use env_info::EnvInfo;
|
use env_info::EnvInfo;
|
||||||
use builtin::Builtin;
|
use builtin::Builtin;
|
||||||
|
use transaction::UnverifiedTransaction;
|
||||||
use client::{Client, EngineClient};
|
use client::{Client, EngineClient};
|
||||||
use state::CleanupMode;
|
use state::CleanupMode;
|
||||||
use super::signer::EngineSigner;
|
use super::signer::EngineSigner;
|
||||||
@ -53,6 +54,10 @@ pub struct AuthorityRoundParams {
|
|||||||
pub start_step: Option<u64>,
|
pub start_step: Option<u64>,
|
||||||
/// Valid validators.
|
/// Valid validators.
|
||||||
pub validators: ethjson::spec::ValidatorSet,
|
pub validators: ethjson::spec::ValidatorSet,
|
||||||
|
/// Chain score validation transition block.
|
||||||
|
pub validate_score_transition: u64,
|
||||||
|
/// Number of first block where EIP-155 rules are validated.
|
||||||
|
pub eip155_transition: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ethjson::spec::AuthorityRoundParams> for AuthorityRoundParams {
|
impl From<ethjson::spec::AuthorityRoundParams> for AuthorityRoundParams {
|
||||||
@ -64,6 +69,8 @@ impl From<ethjson::spec::AuthorityRoundParams> for AuthorityRoundParams {
|
|||||||
block_reward: p.block_reward.map_or_else(U256::zero, Into::into),
|
block_reward: p.block_reward.map_or_else(U256::zero, Into::into),
|
||||||
registrar: p.registrar.map_or_else(Address::new, Into::into),
|
registrar: p.registrar.map_or_else(Address::new, Into::into),
|
||||||
start_step: p.start_step.map(Into::into),
|
start_step: p.start_step.map(Into::into),
|
||||||
|
validate_score_transition: p.validate_score_transition.map_or(0, Into::into),
|
||||||
|
eip155_transition: p.eip155_transition.map_or(0, Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -85,6 +92,8 @@ pub struct AuthorityRound {
|
|||||||
validators: Box<ValidatorSet>,
|
validators: Box<ValidatorSet>,
|
||||||
/// Is this Engine just for testing (prevents step calibration).
|
/// Is this Engine just for testing (prevents step calibration).
|
||||||
calibrate_step: bool,
|
calibrate_step: bool,
|
||||||
|
validate_score_transition: u64,
|
||||||
|
eip155_transition: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn header_step(header: &Header) -> Result<usize, ::rlp::DecoderError> {
|
fn header_step(header: &Header) -> Result<usize, ::rlp::DecoderError> {
|
||||||
@ -125,6 +134,8 @@ impl AuthorityRound {
|
|||||||
signer: Default::default(),
|
signer: Default::default(),
|
||||||
validators: new_validator_set(our_params.validators),
|
validators: new_validator_set(our_params.validators),
|
||||||
calibrate_step: our_params.start_step.is_none(),
|
calibrate_step: our_params.start_step.is_none(),
|
||||||
|
validate_score_transition: our_params.validate_score_transition,
|
||||||
|
eip155_transition: our_params.eip155_transition,
|
||||||
});
|
});
|
||||||
// Do not initialize timeouts for tests.
|
// Do not initialize timeouts for tests.
|
||||||
if should_timeout {
|
if should_timeout {
|
||||||
@ -295,13 +306,17 @@ impl Engine for AuthorityRound {
|
|||||||
Err(From::from(BlockError::InvalidSealArity(
|
Err(From::from(BlockError::InvalidSealArity(
|
||||||
Mismatch { expected: self.seal_fields(), found: header.seal().len() }
|
Mismatch { expected: self.seal_fields(), found: header.seal().len() }
|
||||||
)))
|
)))
|
||||||
|
} else if header.number() >= self.validate_score_transition && *header.difficulty() >= U256::from(U128::max_value()) {
|
||||||
|
Err(From::from(BlockError::DifficultyOutOfBounds(
|
||||||
|
OutOfBounds { min: None, max: Some(U256::from(U128::max_value())), found: *header.difficulty() }
|
||||||
|
)))
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_block_unordered(&self, _header: &Header, _block: Option<&[u8]>) -> Result<(), Error> {
|
fn verify_block_unordered(&self, _header: &Header, _block: Option<&[u8]>) -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Do the validator and gas limit validation.
|
/// Do the validator and gas limit validation.
|
||||||
@ -327,7 +342,8 @@ impl Engine for AuthorityRound {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if parent is from a previous step.
|
// Check if parent is from a previous step.
|
||||||
if step == header_step(parent)? {
|
let parent_step = header_step(parent)?;
|
||||||
|
if step == parent_step {
|
||||||
trace!(target: "engine", "Multiple blocks proposed for step {}.", step);
|
trace!(target: "engine", "Multiple blocks proposed for step {}.", step);
|
||||||
self.validators.report_malicious(header.author());
|
self.validators.report_malicious(header.author());
|
||||||
Err(EngineError::DoubleVote(header.author().clone()))?;
|
Err(EngineError::DoubleVote(header.author().clone()))?;
|
||||||
@ -342,6 +358,18 @@ impl Engine for AuthorityRound {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, header: &Header) -> result::Result<(), Error> {
|
||||||
|
t.check_low_s()?;
|
||||||
|
|
||||||
|
if let Some(n) = t.network_id() {
|
||||||
|
if header.number() >= self.eip155_transition && n != self.params().chain_id {
|
||||||
|
return Err(TransactionError::InvalidNetworkId.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn register_client(&self, client: Weak<Client>) {
|
fn register_client(&self, client: Weak<Client>) {
|
||||||
*self.client.write() = Some(client.clone());
|
*self.client.write() = Some(client.clone());
|
||||||
self.validators.register_contract(client);
|
self.validators.register_contract(client);
|
||||||
|
@ -39,7 +39,7 @@ use account_provider::AccountProvider;
|
|||||||
use block::ExecutedBlock;
|
use block::ExecutedBlock;
|
||||||
use builtin::Builtin;
|
use builtin::Builtin;
|
||||||
use env_info::EnvInfo;
|
use env_info::EnvInfo;
|
||||||
use error::Error;
|
use error::{Error, TransactionError};
|
||||||
use spec::CommonParams;
|
use spec::CommonParams;
|
||||||
use evm::Schedule;
|
use evm::Schedule;
|
||||||
use header::Header;
|
use header::Header;
|
||||||
@ -157,6 +157,13 @@ pub trait Engine : Sync + Send {
|
|||||||
// TODO: consider including State in the params.
|
// TODO: consider including State in the params.
|
||||||
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, _header: &Header) -> Result<(), Error> {
|
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, _header: &Header) -> Result<(), Error> {
|
||||||
t.check_low_s()?;
|
t.check_low_s()?;
|
||||||
|
|
||||||
|
if let Some(n) = t.network_id() {
|
||||||
|
if n != self.params().chain_id {
|
||||||
|
return Err(TransactionError::InvalidNetworkId.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,6 +43,8 @@ pub struct EthashParams {
|
|||||||
pub difficulty_bound_divisor: U256,
|
pub difficulty_bound_divisor: U256,
|
||||||
/// Difficulty increment divisor.
|
/// Difficulty increment divisor.
|
||||||
pub difficulty_increment_divisor: u64,
|
pub difficulty_increment_divisor: u64,
|
||||||
|
/// Metropolis difficulty increment divisor.
|
||||||
|
pub metropolis_difficulty_increment_divisor: u64,
|
||||||
/// Block duration.
|
/// Block duration.
|
||||||
pub duration_limit: u64,
|
pub duration_limit: u64,
|
||||||
/// Block reward.
|
/// Block reward.
|
||||||
@ -63,6 +65,8 @@ pub struct EthashParams {
|
|||||||
pub difficulty_hardfork_bound_divisor: U256,
|
pub difficulty_hardfork_bound_divisor: U256,
|
||||||
/// Block on which there is no additional difficulty from the exponential bomb.
|
/// Block on which there is no additional difficulty from the exponential bomb.
|
||||||
pub bomb_defuse_transition: u64,
|
pub bomb_defuse_transition: u64,
|
||||||
|
/// Number of first block where EIP-100 rules begin.
|
||||||
|
pub eip100b_transition: u64,
|
||||||
/// Number of first block where EIP-150 rules begin.
|
/// Number of first block where EIP-150 rules begin.
|
||||||
pub eip150_transition: u64,
|
pub eip150_transition: u64,
|
||||||
/// Number of first block where EIP-155 rules begin.
|
/// Number of first block where EIP-155 rules begin.
|
||||||
@ -96,6 +100,7 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
|
|||||||
minimum_difficulty: p.minimum_difficulty.into(),
|
minimum_difficulty: p.minimum_difficulty.into(),
|
||||||
difficulty_bound_divisor: p.difficulty_bound_divisor.into(),
|
difficulty_bound_divisor: p.difficulty_bound_divisor.into(),
|
||||||
difficulty_increment_divisor: p.difficulty_increment_divisor.map_or(10, Into::into),
|
difficulty_increment_divisor: p.difficulty_increment_divisor.map_or(10, Into::into),
|
||||||
|
metropolis_difficulty_increment_divisor: p.metropolis_difficulty_increment_divisor.map_or(9, Into::into),
|
||||||
duration_limit: p.duration_limit.into(),
|
duration_limit: p.duration_limit.into(),
|
||||||
block_reward: p.block_reward.into(),
|
block_reward: p.block_reward.into(),
|
||||||
registrar: p.registrar.map_or_else(Address::new, Into::into),
|
registrar: p.registrar.map_or_else(Address::new, Into::into),
|
||||||
@ -106,6 +111,7 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
|
|||||||
difficulty_hardfork_transition: p.difficulty_hardfork_transition.map_or(u64::max_value(), Into::into),
|
difficulty_hardfork_transition: p.difficulty_hardfork_transition.map_or(u64::max_value(), Into::into),
|
||||||
difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into),
|
difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into),
|
||||||
bomb_defuse_transition: p.bomb_defuse_transition.map_or(u64::max_value(), Into::into),
|
bomb_defuse_transition: p.bomb_defuse_transition.map_or(u64::max_value(), Into::into),
|
||||||
|
eip100b_transition: p.eip100b_transition.map_or(u64::max_value(), Into::into),
|
||||||
eip150_transition: p.eip150_transition.map_or(0, Into::into),
|
eip150_transition: p.eip150_transition.map_or(0, Into::into),
|
||||||
eip155_transition: p.eip155_transition.map_or(0, Into::into),
|
eip155_transition: p.eip155_transition.map_or(0, Into::into),
|
||||||
eip160_transition: p.eip160_transition.map_or(0, Into::into),
|
eip160_transition: p.eip160_transition.map_or(0, Into::into),
|
||||||
@ -406,6 +412,8 @@ impl Ethash {
|
|||||||
panic!("Can't calculate genesis block difficulty");
|
panic!("Can't calculate genesis block difficulty");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let parent_has_uncles = parent.uncles_hash() != &sha3::SHA3_EMPTY_LIST_RLP;
|
||||||
|
|
||||||
let min_difficulty = self.ethash_params.minimum_difficulty;
|
let min_difficulty = self.ethash_params.minimum_difficulty;
|
||||||
let difficulty_hardfork = header.number() >= self.ethash_params.difficulty_hardfork_transition;
|
let difficulty_hardfork = header.number() >= self.ethash_params.difficulty_hardfork_transition;
|
||||||
let difficulty_bound_divisor = match difficulty_hardfork {
|
let difficulty_bound_divisor = match difficulty_hardfork {
|
||||||
@ -417,19 +425,27 @@ impl Ethash {
|
|||||||
|
|
||||||
let mut target = if header.number() < frontier_limit {
|
let mut target = if header.number() < frontier_limit {
|
||||||
if header.timestamp() >= parent.timestamp() + duration_limit {
|
if header.timestamp() >= parent.timestamp() + duration_limit {
|
||||||
parent.difficulty().clone() - (parent.difficulty().clone() / difficulty_bound_divisor)
|
*parent.difficulty() - (*parent.difficulty() / difficulty_bound_divisor)
|
||||||
} else {
|
} else {
|
||||||
parent.difficulty().clone() + (parent.difficulty().clone() / difficulty_bound_divisor)
|
*parent.difficulty() + (*parent.difficulty() / difficulty_bound_divisor)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp());
|
trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp());
|
||||||
//block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)
|
//block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)
|
||||||
let diff_inc = (header.timestamp() - parent.timestamp()) / self.ethash_params.difficulty_increment_divisor;
|
let (increment_divisor, threshold) = if header.number() < self.ethash_params.eip100b_transition {
|
||||||
if diff_inc <= 1 {
|
(self.ethash_params.difficulty_increment_divisor, 1)
|
||||||
parent.difficulty().clone() + parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(1 - diff_inc)
|
} else if parent_has_uncles {
|
||||||
|
(self.ethash_params.metropolis_difficulty_increment_divisor, 2)
|
||||||
} else {
|
} else {
|
||||||
parent.difficulty().clone() - parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(min(diff_inc - 1, 99))
|
(self.ethash_params.metropolis_difficulty_increment_divisor, 1)
|
||||||
|
};
|
||||||
|
|
||||||
|
let diff_inc = (header.timestamp() - parent.timestamp()) / increment_divisor;
|
||||||
|
if diff_inc <= threshold {
|
||||||
|
*parent.difficulty() + *parent.difficulty() / difficulty_bound_divisor * (threshold - diff_inc).into()
|
||||||
|
} else {
|
||||||
|
*parent.difficulty() - *parent.difficulty() / difficulty_bound_divisor * min(diff_inc - threshold, 99).into()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
target = max(min_difficulty, target);
|
target = max(min_difficulty, target);
|
||||||
|
@ -20,6 +20,7 @@ use std::{ops, cmp, fmt};
|
|||||||
use util::{U128, U256, U512, Uint, trie};
|
use util::{U128, U256, U512, Uint, trie};
|
||||||
use action_params::ActionParams;
|
use action_params::ActionParams;
|
||||||
use evm::Ext;
|
use evm::Ext;
|
||||||
|
use builtin;
|
||||||
|
|
||||||
/// Evm errors.
|
/// Evm errors.
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
@ -59,6 +60,8 @@ pub enum Error {
|
|||||||
/// What was the stack limit
|
/// What was the stack limit
|
||||||
limit: usize
|
limit: usize
|
||||||
},
|
},
|
||||||
|
/// Built-in contract failed on given input
|
||||||
|
BuiltIn(&'static str),
|
||||||
/// Returned on evm internal error. Should never be ignored during development.
|
/// Returned on evm internal error. Should never be ignored during development.
|
||||||
/// Likely to cause consensus issues.
|
/// Likely to cause consensus issues.
|
||||||
Internal(String),
|
Internal(String),
|
||||||
@ -70,6 +73,12 @@ impl From<Box<trie::TrieError>> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<builtin::Error> for Error {
|
||||||
|
fn from(err: builtin::Error) -> Self {
|
||||||
|
Error::BuiltIn(err.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
impl fmt::Display for Error {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
use self::Error::*;
|
use self::Error::*;
|
||||||
@ -79,6 +88,7 @@ impl fmt::Display for Error {
|
|||||||
BadInstruction { .. } => "Bad instruction",
|
BadInstruction { .. } => "Bad instruction",
|
||||||
StackUnderflow { .. } => "Stack underflow",
|
StackUnderflow { .. } => "Stack underflow",
|
||||||
OutOfStack { .. } => "Out of stack",
|
OutOfStack { .. } => "Out of stack",
|
||||||
|
BuiltIn { .. } => "Built-in failed",
|
||||||
Internal(ref msg) => msg,
|
Internal(ref msg) => msg,
|
||||||
};
|
};
|
||||||
message.fmt(f)
|
message.fmt(f)
|
||||||
|
@ -276,25 +276,31 @@ impl<'a, B: 'a + StateBackend> Executive<'a, B> {
|
|||||||
|
|
||||||
let cost = builtin.cost(data);
|
let cost = builtin.cost(data);
|
||||||
if cost <= params.gas {
|
if cost <= params.gas {
|
||||||
builtin.execute(data, &mut output);
|
if let Err(e) = builtin.execute(data, &mut output) {
|
||||||
self.state.discard_checkpoint();
|
self.state.revert_to_checkpoint();
|
||||||
|
let evm_err: evm::evm::Error = e.into();
|
||||||
|
tracer.trace_failed_call(trace_info, vec![], evm_err.clone().into());
|
||||||
|
Err(evm_err)
|
||||||
|
} else {
|
||||||
|
self.state.discard_checkpoint();
|
||||||
|
|
||||||
// trace only top level calls to builtins to avoid DDoS attacks
|
// trace only top level calls to builtins to avoid DDoS attacks
|
||||||
if self.depth == 0 {
|
if self.depth == 0 {
|
||||||
let mut trace_output = tracer.prepare_trace_output();
|
let mut trace_output = tracer.prepare_trace_output();
|
||||||
if let Some(mut out) = trace_output.as_mut() {
|
if let Some(mut out) = trace_output.as_mut() {
|
||||||
*out = output.to_owned();
|
*out = output.to_owned();
|
||||||
|
}
|
||||||
|
|
||||||
|
tracer.trace_call(
|
||||||
|
trace_info,
|
||||||
|
cost,
|
||||||
|
trace_output,
|
||||||
|
vec![]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
tracer.trace_call(
|
Ok(params.gas - cost)
|
||||||
trace_info,
|
|
||||||
cost,
|
|
||||||
trace_output,
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(params.gas - cost)
|
|
||||||
} else {
|
} else {
|
||||||
// just drain the whole gas
|
// just drain the whole gas
|
||||||
self.state.revert_to_checkpoint();
|
self.state.revert_to_checkpoint();
|
||||||
@ -497,6 +503,7 @@ impl<'a, B: 'a + StateBackend> Executive<'a, B> {
|
|||||||
| Err(evm::Error::BadJumpDestination {..})
|
| Err(evm::Error::BadJumpDestination {..})
|
||||||
| Err(evm::Error::BadInstruction {.. })
|
| Err(evm::Error::BadInstruction {.. })
|
||||||
| Err(evm::Error::StackUnderflow {..})
|
| Err(evm::Error::StackUnderflow {..})
|
||||||
|
| Err(evm::Error::BuiltIn {..})
|
||||||
| Err(evm::Error::OutOfStack {..}) => {
|
| Err(evm::Error::OutOfStack {..}) => {
|
||||||
self.state.revert_to_checkpoint();
|
self.state.revert_to_checkpoint();
|
||||||
},
|
},
|
||||||
|
@ -56,7 +56,7 @@
|
|||||||
//! export LIBRARY_PATH=/usr/local/lib
|
//! export LIBRARY_PATH=/usr/local/lib
|
||||||
//!
|
//!
|
||||||
//! # download and build parity
|
//! # download and build parity
|
||||||
//! git clone https://github.com/ethcore/parity
|
//! git clone https://github.com/paritytech/parity
|
||||||
//! cd parity
|
//! cd parity
|
||||||
//! multirust override beta
|
//! multirust override beta
|
||||||
//! cargo build --release
|
//! cargo build --release
|
||||||
@ -73,7 +73,7 @@
|
|||||||
//! export LIBRARY_PATH=/usr/local/lib
|
//! export LIBRARY_PATH=/usr/local/lib
|
||||||
//!
|
//!
|
||||||
//! # download and build parity
|
//! # download and build parity
|
||||||
//! git clone https://github.com/ethcore/parity
|
//! git clone https://github.com/paritytech/parity
|
||||||
//! cd parity
|
//! cd parity
|
||||||
//! multirust override beta
|
//! multirust override beta
|
||||||
//! cargo build --release
|
//! cargo build --release
|
||||||
@ -108,6 +108,8 @@ extern crate hardware_wallet;
|
|||||||
extern crate stats;
|
extern crate stats;
|
||||||
extern crate ethcore_logger;
|
extern crate ethcore_logger;
|
||||||
extern crate num;
|
extern crate num;
|
||||||
|
extern crate bn;
|
||||||
|
extern crate itertools;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
//! Local Transactions List.
|
//! Local Transactions List.
|
||||||
|
|
||||||
use linked_hash_map::LinkedHashMap;
|
use linked_hash_map::LinkedHashMap;
|
||||||
use transaction::SignedTransaction;
|
use transaction::{SignedTransaction, PendingTransaction};
|
||||||
use error::TransactionError;
|
use error::TransactionError;
|
||||||
use util::{U256, H256};
|
use util::{U256, H256};
|
||||||
|
|
||||||
@ -40,6 +40,8 @@ pub enum Status {
|
|||||||
Rejected(SignedTransaction, TransactionError),
|
Rejected(SignedTransaction, TransactionError),
|
||||||
/// Transaction is invalid.
|
/// Transaction is invalid.
|
||||||
Invalid(SignedTransaction),
|
Invalid(SignedTransaction),
|
||||||
|
/// Transaction was canceled.
|
||||||
|
Canceled(PendingTransaction),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Status {
|
impl Status {
|
||||||
@ -99,6 +101,12 @@ impl LocalTransactionsList {
|
|||||||
self.clear_old();
|
self.clear_old();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn mark_canceled(&mut self, tx: PendingTransaction) {
|
||||||
|
warn!(target: "own_tx", "Transaction canceled (hash {:?})", tx.hash());
|
||||||
|
self.transactions.insert(tx.hash(), Status::Canceled(tx));
|
||||||
|
self.clear_old();
|
||||||
|
}
|
||||||
|
|
||||||
pub fn mark_dropped(&mut self, tx: SignedTransaction) {
|
pub fn mark_dropped(&mut self, tx: SignedTransaction) {
|
||||||
warn!(target: "own_tx", "Transaction dropped (hash {:?})", tx.hash());
|
warn!(target: "own_tx", "Transaction dropped (hash {:?})", tx.hash());
|
||||||
self.transactions.insert(tx.hash(), Status::Dropped(tx));
|
self.transactions.insert(tx.hash(), Status::Dropped(tx));
|
||||||
|
@ -29,7 +29,7 @@ use transaction::{Action, UnverifiedTransaction, PendingTransaction, SignedTrans
|
|||||||
use receipt::{Receipt, RichReceipt};
|
use receipt::{Receipt, RichReceipt};
|
||||||
use spec::Spec;
|
use spec::Spec;
|
||||||
use engines::{Engine, Seal};
|
use engines::{Engine, Seal};
|
||||||
use miner::{MinerService, MinerStatus, TransactionQueue, TransactionQueueDetailsProvider, PrioritizationStrategy,
|
use miner::{MinerService, MinerStatus, TransactionQueue, RemovalReason, TransactionQueueDetailsProvider, PrioritizationStrategy,
|
||||||
AccountDetails, TransactionOrigin};
|
AccountDetails, TransactionOrigin};
|
||||||
use miner::banning_queue::{BanningTransactionQueue, Threshold};
|
use miner::banning_queue::{BanningTransactionQueue, Threshold};
|
||||||
use miner::work_notify::{WorkPoster, NotifyWork};
|
use miner::work_notify::{WorkPoster, NotifyWork};
|
||||||
@ -430,7 +430,7 @@ impl Miner {
|
|||||||
{
|
{
|
||||||
let mut queue = self.transaction_queue.write();
|
let mut queue = self.transaction_queue.write();
|
||||||
for hash in invalid_transactions {
|
for hash in invalid_transactions {
|
||||||
queue.remove_invalid(&hash, &fetch_nonce);
|
queue.remove(&hash, &fetch_nonce, RemovalReason::Invalid);
|
||||||
}
|
}
|
||||||
for hash in transactions_to_penalize {
|
for hash in transactions_to_penalize {
|
||||||
queue.penalize(&hash);
|
queue.penalize(&hash);
|
||||||
@ -1021,7 +1021,7 @@ impl MinerService for Miner {
|
|||||||
let tx = queue.find(hash);
|
let tx = queue.find(hash);
|
||||||
if tx.is_some() {
|
if tx.is_some() {
|
||||||
let fetch_nonce = |a: &Address| chain.latest_nonce(a);
|
let fetch_nonce = |a: &Address| chain.latest_nonce(a);
|
||||||
queue.remove_invalid(hash, &fetch_nonce);
|
queue.remove(hash, &fetch_nonce, RemovalReason::Canceled);
|
||||||
}
|
}
|
||||||
tx
|
tx
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,7 @@ mod stratum;
|
|||||||
pub use self::external::{ExternalMiner, ExternalMinerService};
|
pub use self::external::{ExternalMiner, ExternalMinerService};
|
||||||
|
|
||||||
pub use self::miner::{Miner, MinerOptions, Banning, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
|
pub use self::miner::{Miner, MinerOptions, Banning, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
|
||||||
pub use self::transaction_queue::{TransactionQueue, TransactionDetailsProvider as TransactionQueueDetailsProvider,
|
pub use self::transaction_queue::{TransactionQueue, RemovalReason, TransactionDetailsProvider as TransactionQueueDetailsProvider,
|
||||||
PrioritizationStrategy, AccountDetails, TransactionOrigin};
|
PrioritizationStrategy, AccountDetails, TransactionOrigin};
|
||||||
pub use self::local_transactions::{Status as LocalTransactionStatus};
|
pub use self::local_transactions::{Status as LocalTransactionStatus};
|
||||||
pub use client::TransactionImportResult;
|
pub use client::TransactionImportResult;
|
||||||
|
@ -31,7 +31,7 @@
|
|||||||
//!
|
//!
|
||||||
//! use util::{Uint, U256, Address};
|
//! use util::{Uint, U256, Address};
|
||||||
//! use ethkey::{Random, Generator};
|
//! use ethkey::{Random, Generator};
|
||||||
//! use ethcore::miner::{TransactionQueue, TransactionQueueDetailsProvider, AccountDetails, TransactionOrigin};
|
//! use ethcore::miner::{TransactionQueue, RemovalReason, TransactionQueueDetailsProvider, AccountDetails, TransactionOrigin};
|
||||||
//! use ethcore::transaction::*;
|
//! use ethcore::transaction::*;
|
||||||
//! use rustc_serialize::hex::FromHex;
|
//! use rustc_serialize::hex::FromHex;
|
||||||
//!
|
//!
|
||||||
@ -80,7 +80,7 @@
|
|||||||
//!
|
//!
|
||||||
//! // And when transaction is removed (but nonce haven't changed)
|
//! // And when transaction is removed (but nonce haven't changed)
|
||||||
//! // it will move subsequent transactions to future
|
//! // it will move subsequent transactions to future
|
||||||
//! txq.remove_invalid(&st1.hash(), &|_| 10.into());
|
//! txq.remove(&st1.hash(), &|_| 10.into(), RemovalReason::Invalid);
|
||||||
//! assert_eq!(txq.status().pending, 0);
|
//! assert_eq!(txq.status().pending, 0);
|
||||||
//! assert_eq!(txq.status().future, 1);
|
//! assert_eq!(txq.status().future, 1);
|
||||||
//! assert_eq!(txq.top_transactions().len(), 0);
|
//! assert_eq!(txq.top_transactions().len(), 0);
|
||||||
@ -510,6 +510,15 @@ pub enum PrioritizationStrategy {
|
|||||||
GasFactorAndGasPrice,
|
GasFactorAndGasPrice,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Reason to remove single transaction from the queue.
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum RemovalReason {
|
||||||
|
/// Transaction is invalid
|
||||||
|
Invalid,
|
||||||
|
/// Transaction was canceled
|
||||||
|
Canceled,
|
||||||
|
}
|
||||||
|
|
||||||
/// Point in time when transaction was inserted.
|
/// Point in time when transaction was inserted.
|
||||||
pub type QueuingInstant = BlockNumber;
|
pub type QueuingInstant = BlockNumber;
|
||||||
const DEFAULT_QUEUING_PERIOD: BlockNumber = 128;
|
const DEFAULT_QUEUING_PERIOD: BlockNumber = 128;
|
||||||
@ -897,7 +906,7 @@ impl TransactionQueue {
|
|||||||
.expect("We fetch details for all senders from both current and future")
|
.expect("We fetch details for all senders from both current and future")
|
||||||
.nonce;
|
.nonce;
|
||||||
for hash in invalid {
|
for hash in invalid {
|
||||||
self.remove_invalid(&hash, &fetch_nonce);
|
self.remove(&hash, &fetch_nonce, RemovalReason::Invalid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -945,7 +954,7 @@ impl TransactionQueue {
|
|||||||
/// so transactions left in queue are processed according to client nonce.
|
/// so transactions left in queue are processed according to client nonce.
|
||||||
///
|
///
|
||||||
/// If gap is introduced marks subsequent transactions as future
|
/// If gap is introduced marks subsequent transactions as future
|
||||||
pub fn remove_invalid<F>(&mut self, transaction_hash: &H256, fetch_nonce: &F)
|
pub fn remove<F>(&mut self, transaction_hash: &H256, fetch_nonce: &F, reason: RemovalReason)
|
||||||
where F: Fn(&Address) -> U256 {
|
where F: Fn(&Address) -> U256 {
|
||||||
|
|
||||||
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
|
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
|
||||||
@ -964,7 +973,14 @@ impl TransactionQueue {
|
|||||||
|
|
||||||
// Mark in locals
|
// Mark in locals
|
||||||
if self.local_transactions.contains(transaction_hash) {
|
if self.local_transactions.contains(transaction_hash) {
|
||||||
self.local_transactions.mark_invalid(transaction.transaction.into());
|
match reason {
|
||||||
|
RemovalReason::Invalid => self.local_transactions.mark_invalid(
|
||||||
|
transaction.transaction.into()
|
||||||
|
),
|
||||||
|
RemovalReason::Canceled => self.local_transactions.mark_canceled(
|
||||||
|
PendingTransaction::new(transaction.transaction, transaction.condition)
|
||||||
|
),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove from future
|
// Remove from future
|
||||||
@ -2277,7 +2293,7 @@ pub mod test {
|
|||||||
assert_eq!(txq.status().pending, 3);
|
assert_eq!(txq.status().pending, 3);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
txq.remove_invalid(&tx.hash(), &|_| default_nonce());
|
txq.remove(&tx.hash(), &|_| default_nonce(), RemovalReason::Invalid);
|
||||||
|
|
||||||
// then
|
// then
|
||||||
let stats = txq.status();
|
let stats = txq.status();
|
||||||
@ -2420,7 +2436,7 @@ pub mod test {
|
|||||||
assert_eq!(txq.status().pending, 2);
|
assert_eq!(txq.status().pending, 2);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
txq.remove_invalid(&tx1.hash(), &|_| default_nonce());
|
txq.remove(&tx1.hash(), &|_| default_nonce(), RemovalReason::Invalid);
|
||||||
assert_eq!(txq.status().pending, 0);
|
assert_eq!(txq.status().pending, 0);
|
||||||
assert_eq!(txq.status().future, 1);
|
assert_eq!(txq.status().future, 1);
|
||||||
txq.add(tx1.clone(), TransactionOrigin::External, 0, None, &default_tx_provider()).unwrap();
|
txq.add(tx1.clone(), TransactionOrigin::External, 0, None, &default_tx_provider()).unwrap();
|
||||||
@ -2518,7 +2534,7 @@ pub mod test {
|
|||||||
assert_eq!(txq.status().future, 2);
|
assert_eq!(txq.status().future, 2);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
txq.remove_invalid(&tx1.hash(), &|_| default_nonce() + 1.into());
|
txq.remove(&tx1.hash(), &|_| default_nonce() + 1.into(), RemovalReason::Invalid);
|
||||||
|
|
||||||
// then
|
// then
|
||||||
let stats = txq.status();
|
let stats = txq.status();
|
||||||
|
@ -23,6 +23,7 @@ use snapshot::Error;
|
|||||||
use util::{U256, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
|
use util::{U256, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
|
||||||
use util::trie::{TrieDB, Trie};
|
use util::trie::{TrieDB, Trie};
|
||||||
use rlp::{RlpStream, UntrustedRlp};
|
use rlp::{RlpStream, UntrustedRlp};
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
@ -60,55 +61,53 @@ impl CodeState {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// walk the account's storage trie, returning an RLP item containing the
|
// walk the account's storage trie, returning a vector of RLP items containing the
|
||||||
// account properties and the storage.
|
// account properties and the storage. Each item contains at most `max_storage_items`
|
||||||
pub fn to_fat_rlp(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashSet<H256>) -> Result<Bytes, Error> {
|
// storage records split according to snapshot format definition.
|
||||||
|
pub fn to_fat_rlps(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashSet<H256>, max_storage_items: usize) -> Result<Vec<Bytes>, Error> {
|
||||||
if acc == &ACC_EMPTY {
|
if acc == &ACC_EMPTY {
|
||||||
return Ok(::rlp::NULL_RLP.to_vec());
|
return Ok(vec![::rlp::NULL_RLP.to_vec()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
let db = TrieDB::new(acct_db, &acc.storage_root)?;
|
let db = TrieDB::new(acct_db, &acc.storage_root)?;
|
||||||
|
|
||||||
let mut pairs = Vec::new();
|
let chunks = db.iter()?.chunks(max_storage_items);
|
||||||
|
let pair_chunks = chunks.into_iter().map(|chunk| chunk.collect());
|
||||||
|
pair_chunks.pad_using(1, |_| Vec::new(), ).map(|pairs| {
|
||||||
|
let mut stream = RlpStream::new_list(pairs.len());
|
||||||
|
|
||||||
for item in db.iter()? {
|
for r in pairs {
|
||||||
let (k, v) = item?;
|
let (k, v) = r?;
|
||||||
pairs.push((k, v));
|
stream.begin_list(2).append(&k).append(&&*v);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut stream = RlpStream::new_list(pairs.len());
|
let pairs_rlp = stream.out();
|
||||||
|
|
||||||
for (k, v) in pairs {
|
let mut account_stream = RlpStream::new_list(5);
|
||||||
stream.begin_list(2).append(&k).append(&&*v);
|
account_stream.append(&acc.nonce)
|
||||||
}
|
.append(&acc.balance);
|
||||||
|
|
||||||
let pairs_rlp = stream.out();
|
// [has_code, code_hash].
|
||||||
|
if acc.code_hash == SHA3_EMPTY {
|
||||||
let mut account_stream = RlpStream::new_list(5);
|
account_stream.append(&CodeState::Empty.raw()).append_empty_data();
|
||||||
account_stream.append(&acc.nonce)
|
} else if used_code.contains(&acc.code_hash) {
|
||||||
.append(&acc.balance);
|
account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash);
|
||||||
|
} else {
|
||||||
// [has_code, code_hash].
|
match acct_db.get(&acc.code_hash) {
|
||||||
if acc.code_hash == SHA3_EMPTY {
|
Some(c) => {
|
||||||
account_stream.append(&CodeState::Empty.raw()).append_empty_data();
|
used_code.insert(acc.code_hash.clone());
|
||||||
} else if used_code.contains(&acc.code_hash) {
|
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
|
||||||
account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash);
|
}
|
||||||
} else {
|
None => {
|
||||||
match acct_db.get(&acc.code_hash) {
|
warn!("code lookup failed during snapshot");
|
||||||
Some(c) => {
|
account_stream.append(&false).append_empty_data();
|
||||||
used_code.insert(acc.code_hash.clone());
|
}
|
||||||
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
warn!("code lookup failed during snapshot");
|
|
||||||
account_stream.append(&false).append_empty_data();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
account_stream.append_raw(&pairs_rlp, 1);
|
account_stream.append_raw(&pairs_rlp, 1);
|
||||||
|
Ok(account_stream.out())
|
||||||
Ok(account_stream.out())
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// decode a fat rlp, and rebuild the storage trie as we go.
|
// decode a fat rlp, and rebuild the storage trie as we go.
|
||||||
@ -117,6 +116,7 @@ pub fn to_fat_rlp(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashS
|
|||||||
pub fn from_fat_rlp(
|
pub fn from_fat_rlp(
|
||||||
acct_db: &mut AccountDBMut,
|
acct_db: &mut AccountDBMut,
|
||||||
rlp: UntrustedRlp,
|
rlp: UntrustedRlp,
|
||||||
|
mut storage_root: H256,
|
||||||
) -> Result<(BasicAccount, Option<Bytes>), Error> {
|
) -> Result<(BasicAccount, Option<Bytes>), Error> {
|
||||||
use util::{TrieDBMut, TrieMut};
|
use util::{TrieDBMut, TrieMut};
|
||||||
|
|
||||||
@ -148,10 +148,12 @@ pub fn from_fat_rlp(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut storage_root = H256::zero();
|
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut storage_trie = TrieDBMut::new(acct_db, &mut storage_root);
|
let mut storage_trie = if storage_root.is_zero() {
|
||||||
|
TrieDBMut::new(acct_db, &mut storage_root)
|
||||||
|
} else {
|
||||||
|
TrieDBMut::from_existing(acct_db, &mut storage_root)?
|
||||||
|
};
|
||||||
let pairs = rlp.at(4)?;
|
let pairs = rlp.at(4)?;
|
||||||
for pair_rlp in pairs.iter() {
|
for pair_rlp in pairs.iter() {
|
||||||
let k: Bytes = pair_rlp.val_at(0)?;
|
let k: Bytes = pair_rlp.val_at(0)?;
|
||||||
@ -184,7 +186,7 @@ mod tests {
|
|||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use super::{ACC_EMPTY, to_fat_rlp, from_fat_rlp};
|
use super::{ACC_EMPTY, to_fat_rlps, from_fat_rlp};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn encoding_basic() {
|
fn encoding_basic() {
|
||||||
@ -201,9 +203,9 @@ mod tests {
|
|||||||
let thin_rlp = ::rlp::encode(&account);
|
let thin_rlp = ::rlp::encode(&account);
|
||||||
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
|
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
|
||||||
|
|
||||||
let fat_rlp = to_fat_rlp(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
|
let fat_rlps = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), usize::max_value()).unwrap();
|
||||||
let fat_rlp = UntrustedRlp::new(&fat_rlp);
|
let fat_rlp = UntrustedRlp::new(&fat_rlps[0]);
|
||||||
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
|
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, H256::zero()).unwrap().0, account);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -226,9 +228,40 @@ mod tests {
|
|||||||
let thin_rlp = ::rlp::encode(&account);
|
let thin_rlp = ::rlp::encode(&account);
|
||||||
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
|
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
|
||||||
|
|
||||||
let fat_rlp = to_fat_rlp(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
|
let fat_rlp = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), usize::max_value()).unwrap();
|
||||||
let fat_rlp = UntrustedRlp::new(&fat_rlp);
|
let fat_rlp = UntrustedRlp::new(&fat_rlp[0]);
|
||||||
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
|
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, H256::zero()).unwrap().0, account);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn encoding_storage_split() {
|
||||||
|
let mut db = get_temp_state_db();
|
||||||
|
let addr = Address::random();
|
||||||
|
|
||||||
|
let account = {
|
||||||
|
let acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr);
|
||||||
|
let mut root = SHA3_NULL_RLP;
|
||||||
|
fill_storage(acct_db, &mut root, &mut H256::zero());
|
||||||
|
BasicAccount {
|
||||||
|
nonce: 25.into(),
|
||||||
|
balance: 987654321.into(),
|
||||||
|
storage_root: root,
|
||||||
|
code_hash: SHA3_EMPTY,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let thin_rlp = ::rlp::encode(&account);
|
||||||
|
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
|
||||||
|
|
||||||
|
let fat_rlps = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), 100).unwrap();
|
||||||
|
let mut root = SHA3_NULL_RLP;
|
||||||
|
let mut restored_account = None;
|
||||||
|
for rlp in fat_rlps {
|
||||||
|
let fat_rlp = UntrustedRlp::new(&rlp);
|
||||||
|
restored_account = Some(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, root).unwrap().0);
|
||||||
|
root = restored_account.as_ref().unwrap().storage_root.clone();
|
||||||
|
}
|
||||||
|
assert_eq!(restored_account, Some(account));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -264,18 +297,18 @@ mod tests {
|
|||||||
|
|
||||||
let mut used_code = HashSet::new();
|
let mut used_code = HashSet::new();
|
||||||
|
|
||||||
let fat_rlp1 = to_fat_rlp(&account1, &AccountDB::new(db.as_hashdb(), &addr1), &mut used_code).unwrap();
|
let fat_rlp1 = to_fat_rlps(&account1, &AccountDB::new(db.as_hashdb(), &addr1), &mut used_code, usize::max_value()).unwrap();
|
||||||
let fat_rlp2 = to_fat_rlp(&account2, &AccountDB::new(db.as_hashdb(), &addr2), &mut used_code).unwrap();
|
let fat_rlp2 = to_fat_rlps(&account2, &AccountDB::new(db.as_hashdb(), &addr2), &mut used_code, usize::max_value()).unwrap();
|
||||||
assert_eq!(used_code.len(), 1);
|
assert_eq!(used_code.len(), 1);
|
||||||
|
|
||||||
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1);
|
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1[0]);
|
||||||
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2);
|
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2[0]);
|
||||||
|
|
||||||
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2).unwrap();
|
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2, H256::zero()).unwrap();
|
||||||
assert!(maybe_code.is_none());
|
assert!(maybe_code.is_none());
|
||||||
assert_eq!(acc, account2);
|
assert_eq!(acc, account2);
|
||||||
|
|
||||||
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1).unwrap();
|
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1, H256::zero()).unwrap();
|
||||||
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
|
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
|
||||||
assert_eq!(acc, account1);
|
assert_eq!(acc, account1);
|
||||||
}
|
}
|
||||||
@ -285,7 +318,7 @@ mod tests {
|
|||||||
let mut db = get_temp_state_db();
|
let mut db = get_temp_state_db();
|
||||||
let mut used_code = HashSet::new();
|
let mut used_code = HashSet::new();
|
||||||
|
|
||||||
assert_eq!(to_fat_rlp(&ACC_EMPTY, &AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec());
|
assert_eq!(to_fat_rlps(&ACC_EMPTY, &AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code, usize::max_value()).unwrap(), vec![::rlp::NULL_RLP.to_vec()]);
|
||||||
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP)).unwrap(), (ACC_EMPTY, None));
|
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP), H256::zero()).unwrap(), (ACC_EMPTY, None));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,8 @@ pub enum Error {
|
|||||||
Decoder(DecoderError),
|
Decoder(DecoderError),
|
||||||
/// Io error.
|
/// Io error.
|
||||||
Io(::std::io::Error),
|
Io(::std::io::Error),
|
||||||
|
/// Snapshot version is not supported.
|
||||||
|
VersionNotSupported(u64),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
impl fmt::Display for Error {
|
||||||
@ -73,6 +75,7 @@ impl fmt::Display for Error {
|
|||||||
Error::Io(ref err) => err.fmt(f),
|
Error::Io(ref err) => err.fmt(f),
|
||||||
Error::Decoder(ref err) => err.fmt(f),
|
Error::Decoder(ref err) => err.fmt(f),
|
||||||
Error::Trie(ref err) => err.fmt(f),
|
Error::Trie(ref err) => err.fmt(f),
|
||||||
|
Error::VersionNotSupported(ref ver) => write!(f, "Snapshot version {} is not supprted.", ver),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,6 +31,8 @@ use rlp::{self, Encodable, RlpStream, UntrustedRlp};
|
|||||||
|
|
||||||
use super::ManifestData;
|
use super::ManifestData;
|
||||||
|
|
||||||
|
const SNAPSHOT_VERSION: u64 = 2;
|
||||||
|
|
||||||
/// Something which can write snapshots.
|
/// Something which can write snapshots.
|
||||||
/// Writing the same chunk multiple times will lead to implementation-defined
|
/// Writing the same chunk multiple times will lead to implementation-defined
|
||||||
/// behavior, and is not advised.
|
/// behavior, and is not advised.
|
||||||
@ -118,8 +120,9 @@ impl SnapshotWriter for PackedWriter {
|
|||||||
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
|
fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
|
||||||
// we ignore the hashes fields of the manifest under the assumption that
|
// we ignore the hashes fields of the manifest under the assumption that
|
||||||
// they are consistent with ours.
|
// they are consistent with ours.
|
||||||
let mut stream = RlpStream::new_list(5);
|
let mut stream = RlpStream::new_list(6);
|
||||||
stream
|
stream
|
||||||
|
.append(&SNAPSHOT_VERSION)
|
||||||
.append_list(&self.state_hashes)
|
.append_list(&self.state_hashes)
|
||||||
.append_list(&self.block_hashes)
|
.append_list(&self.block_hashes)
|
||||||
.append(&manifest.state_root)
|
.append(&manifest.state_root)
|
||||||
@ -221,7 +224,7 @@ impl PackedReader {
|
|||||||
/// Create a new `PackedReader` for the file at the given path.
|
/// Create a new `PackedReader` for the file at the given path.
|
||||||
/// This will fail if any io errors are encountered or the file
|
/// This will fail if any io errors are encountered or the file
|
||||||
/// is not a valid packed snapshot.
|
/// is not a valid packed snapshot.
|
||||||
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> {
|
pub fn new(path: &Path) -> Result<Option<Self>, ::snapshot::error::Error> {
|
||||||
let mut file = File::open(path)?;
|
let mut file = File::open(path)?;
|
||||||
let file_len = file.metadata()?.len();
|
let file_len = file.metadata()?.len();
|
||||||
if file_len < 8 {
|
if file_len < 8 {
|
||||||
@ -255,15 +258,26 @@ impl PackedReader {
|
|||||||
|
|
||||||
let rlp = UntrustedRlp::new(&manifest_buf);
|
let rlp = UntrustedRlp::new(&manifest_buf);
|
||||||
|
|
||||||
let state: Vec<ChunkInfo> = rlp.list_at(0)?;
|
let (start, version) = if rlp.item_count()? == 5 {
|
||||||
let blocks: Vec<ChunkInfo> = rlp.list_at(1)?;
|
(0, 1)
|
||||||
|
} else {
|
||||||
|
(1, rlp.val_at(0)?)
|
||||||
|
};
|
||||||
|
|
||||||
|
if version > SNAPSHOT_VERSION {
|
||||||
|
return Err(::snapshot::error::Error::VersionNotSupported(version));
|
||||||
|
}
|
||||||
|
|
||||||
|
let state: Vec<ChunkInfo> = rlp.list_at(0 + start)?;
|
||||||
|
let blocks: Vec<ChunkInfo> = rlp.list_at(1 + start)?;
|
||||||
|
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: version,
|
||||||
state_hashes: state.iter().map(|c| c.0).collect(),
|
state_hashes: state.iter().map(|c| c.0).collect(),
|
||||||
block_hashes: blocks.iter().map(|c| c.0).collect(),
|
block_hashes: blocks.iter().map(|c| c.0).collect(),
|
||||||
state_root: rlp.val_at(2)?,
|
state_root: rlp.val_at(2 + start)?,
|
||||||
block_number: rlp.val_at(3)?,
|
block_number: rlp.val_at(3 + start)?,
|
||||||
block_hash: rlp.val_at(4)?,
|
block_hash: rlp.val_at(4 + start)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Some(PackedReader {
|
Ok(Some(PackedReader {
|
||||||
@ -346,7 +360,7 @@ mod tests {
|
|||||||
use util::sha3::Hashable;
|
use util::sha3::Hashable;
|
||||||
|
|
||||||
use snapshot::ManifestData;
|
use snapshot::ManifestData;
|
||||||
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
|
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader, SNAPSHOT_VERSION};
|
||||||
|
|
||||||
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
|
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
|
||||||
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
|
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
|
||||||
@ -372,6 +386,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: SNAPSHOT_VERSION,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: block_hashes,
|
block_hashes: block_hashes,
|
||||||
state_root: b"notarealroot".sha3(),
|
state_root: b"notarealroot".sha3(),
|
||||||
@ -410,6 +425,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: SNAPSHOT_VERSION,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: block_hashes,
|
block_hashes: block_hashes,
|
||||||
state_root: b"notarealroot".sha3(),
|
state_root: b"notarealroot".sha3(),
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
//! Snapshot creation, restoration, and network service.
|
//! Snapshot creation, restoration, and network service.
|
||||||
//!
|
//!
|
||||||
//! Documentation of the format can be found at
|
//! Documentation of the format can be found at
|
||||||
//! https://github.com/ethcore/parity/wiki/%22PV64%22-Snapshot-Format
|
//! https://github.com/paritytech/parity/wiki/%22PV64%22-Snapshot-Format
|
||||||
|
|
||||||
use std::collections::{HashMap, HashSet, VecDeque};
|
use std::collections::{HashMap, HashSet, VecDeque};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -56,6 +56,7 @@ pub use self::traits::SnapshotService;
|
|||||||
pub use self::watcher::Watcher;
|
pub use self::watcher::Watcher;
|
||||||
pub use types::snapshot_manifest::ManifestData;
|
pub use types::snapshot_manifest::ManifestData;
|
||||||
pub use types::restoration_status::RestorationStatus;
|
pub use types::restoration_status::RestorationStatus;
|
||||||
|
pub use types::basic_account::BasicAccount;
|
||||||
|
|
||||||
pub mod io;
|
pub mod io;
|
||||||
pub mod service;
|
pub mod service;
|
||||||
@ -82,6 +83,9 @@ mod traits {
|
|||||||
// Try to have chunks be around 4MB (before compression)
|
// Try to have chunks be around 4MB (before compression)
|
||||||
const PREFERRED_CHUNK_SIZE: usize = 4 * 1024 * 1024;
|
const PREFERRED_CHUNK_SIZE: usize = 4 * 1024 * 1024;
|
||||||
|
|
||||||
|
// Try to have chunks be around 4MB (before compression)
|
||||||
|
const MAX_STORAGE_ENTRIES_PER_ACCOUNT_RECORD: usize = 80_000;
|
||||||
|
|
||||||
// How many blocks to include in a snapshot, starting from the head of the chain.
|
// How many blocks to include in a snapshot, starting from the head of the chain.
|
||||||
const SNAPSHOT_BLOCKS: u64 = 30000;
|
const SNAPSHOT_BLOCKS: u64 = 30000;
|
||||||
|
|
||||||
@ -147,6 +151,7 @@ pub fn take_snapshot<W: SnapshotWriter + Send>(
|
|||||||
info!("produced {} state chunks and {} block chunks.", state_hashes.len(), block_hashes.len());
|
info!("produced {} state chunks and {} block chunks.", state_hashes.len(), block_hashes.len());
|
||||||
|
|
||||||
let manifest_data = ManifestData {
|
let manifest_data = ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: block_hashes,
|
block_hashes: block_hashes,
|
||||||
state_root: *state_root,
|
state_root: *state_root,
|
||||||
@ -300,14 +305,14 @@ impl<'a> StateChunker<'a> {
|
|||||||
//
|
//
|
||||||
// If the buffer is greater than the desired chunk size,
|
// If the buffer is greater than the desired chunk size,
|
||||||
// this will write out the data to disk.
|
// this will write out the data to disk.
|
||||||
fn push(&mut self, account_hash: Bytes, data: Bytes) -> Result<(), Error> {
|
fn push(&mut self, account_hash: Bytes, data: Bytes, force_chunk: bool) -> Result<(), Error> {
|
||||||
let pair = {
|
let pair = {
|
||||||
let mut stream = RlpStream::new_list(2);
|
let mut stream = RlpStream::new_list(2);
|
||||||
stream.append(&account_hash).append_raw(&data, 1);
|
stream.append(&account_hash).append_raw(&data, 1);
|
||||||
stream.out()
|
stream.out()
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.cur_size + pair.len() >= PREFERRED_CHUNK_SIZE {
|
if force_chunk || self.cur_size + pair.len() >= PREFERRED_CHUNK_SIZE {
|
||||||
self.write_chunk()?;
|
self.write_chunk()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -372,8 +377,10 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
|
|||||||
|
|
||||||
let account_db = AccountDB::from_hash(db, account_key_hash);
|
let account_db = AccountDB::from_hash(db, account_key_hash);
|
||||||
|
|
||||||
let fat_rlp = account::to_fat_rlp(&account, &account_db, &mut used_code)?;
|
let fat_rlps = account::to_fat_rlps(&account, &account_db, &mut used_code, MAX_STORAGE_ENTRIES_PER_ACCOUNT_RECORD)?;
|
||||||
chunker.push(account_key, fat_rlp)?;
|
for (i, fat_rlp) in fat_rlps.into_iter().enumerate() {
|
||||||
|
chunker.push(account_key.clone(), fat_rlp, i > 0)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if chunker.cur_size != 0 {
|
if chunker.cur_size != 0 {
|
||||||
@ -390,6 +397,7 @@ pub struct StateRebuilder {
|
|||||||
known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code.
|
known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code.
|
||||||
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
|
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
|
||||||
bloom: Bloom,
|
bloom: Bloom,
|
||||||
|
known_storage_roots: HashMap<H256, H256>, // maps account hashes to last known storage root. Only filled for last account per chunk.
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StateRebuilder {
|
impl StateRebuilder {
|
||||||
@ -401,6 +409,7 @@ impl StateRebuilder {
|
|||||||
known_code: HashMap::new(),
|
known_code: HashMap::new(),
|
||||||
missing_code: HashMap::new(),
|
missing_code: HashMap::new(),
|
||||||
bloom: StateDB::load_bloom(&*db),
|
bloom: StateDB::load_bloom(&*db),
|
||||||
|
known_storage_roots: HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -418,6 +427,7 @@ impl StateRebuilder {
|
|||||||
rlp,
|
rlp,
|
||||||
&mut pairs,
|
&mut pairs,
|
||||||
&self.known_code,
|
&self.known_code,
|
||||||
|
&mut self.known_storage_roots,
|
||||||
flag
|
flag
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -464,14 +474,18 @@ impl StateRebuilder {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check for accounts missing code. Once all chunks have been fed, there should
|
/// Finalize the restoration. Check for accounts missing code and make a dummy
|
||||||
/// be none.
|
/// journal entry.
|
||||||
pub fn check_missing(self) -> Result<(), Error> {
|
/// Once all chunks have been fed, there should be nothing missing.
|
||||||
|
pub fn finalize(mut self, era: u64, id: H256) -> Result<(), ::error::Error> {
|
||||||
let missing = self.missing_code.keys().cloned().collect::<Vec<_>>();
|
let missing = self.missing_code.keys().cloned().collect::<Vec<_>>();
|
||||||
match missing.is_empty() {
|
if !missing.is_empty() { return Err(Error::MissingCode(missing).into()) }
|
||||||
true => Ok(()),
|
|
||||||
false => Err(Error::MissingCode(missing)),
|
let mut batch = self.db.backing().transaction();
|
||||||
}
|
self.db.journal_under(&mut batch, era, &id)?;
|
||||||
|
self.db.backing().write_buffered(batch);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the state root of the rebuilder.
|
/// Get the state root of the rebuilder.
|
||||||
@ -492,10 +506,11 @@ fn rebuild_accounts(
|
|||||||
account_fat_rlps: UntrustedRlp,
|
account_fat_rlps: UntrustedRlp,
|
||||||
out_chunk: &mut [(H256, Bytes)],
|
out_chunk: &mut [(H256, Bytes)],
|
||||||
known_code: &HashMap<H256, H256>,
|
known_code: &HashMap<H256, H256>,
|
||||||
|
known_storage_roots: &mut HashMap<H256, H256>,
|
||||||
abort_flag: &AtomicBool,
|
abort_flag: &AtomicBool,
|
||||||
) -> Result<RebuiltStatus, ::error::Error> {
|
) -> Result<RebuiltStatus, ::error::Error> {
|
||||||
let mut status = RebuiltStatus::default();
|
let mut status = RebuiltStatus::default();
|
||||||
for (account_rlp, out) in account_fat_rlps.into_iter().zip(out_chunk) {
|
for (account_rlp, out) in account_fat_rlps.into_iter().zip(out_chunk.iter_mut()) {
|
||||||
if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
|
if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
|
||||||
|
|
||||||
let hash: H256 = account_rlp.val_at(0)?;
|
let hash: H256 = account_rlp.val_at(0)?;
|
||||||
@ -506,7 +521,8 @@ fn rebuild_accounts(
|
|||||||
// fill out the storage trie and code while decoding.
|
// fill out the storage trie and code while decoding.
|
||||||
let (acc, maybe_code) = {
|
let (acc, maybe_code) = {
|
||||||
let mut acct_db = AccountDBMut::from_hash(db, hash);
|
let mut acct_db = AccountDBMut::from_hash(db, hash);
|
||||||
account::from_fat_rlp(&mut acct_db, fat_rlp)?
|
let storage_root = known_storage_roots.get(&hash).cloned().unwrap_or(H256::zero());
|
||||||
|
account::from_fat_rlp(&mut acct_db, fat_rlp, storage_root)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let code_hash = acc.code_hash.clone();
|
let code_hash = acc.code_hash.clone();
|
||||||
@ -538,6 +554,12 @@ fn rebuild_accounts(
|
|||||||
|
|
||||||
*out = (hash, thin_rlp);
|
*out = (hash, thin_rlp);
|
||||||
}
|
}
|
||||||
|
if let Some(&(ref hash, ref rlp)) = out_chunk.iter().last() {
|
||||||
|
known_storage_roots.insert(*hash, ::rlp::decode::<BasicAccount>(rlp).storage_root);
|
||||||
|
}
|
||||||
|
if let Some(&(ref hash, ref rlp)) = out_chunk.iter().next() {
|
||||||
|
known_storage_roots.insert(*hash, ::rlp::decode::<BasicAccount>(rlp).storage_root);
|
||||||
|
}
|
||||||
Ok(status)
|
Ok(status)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ impl Restoration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// check for missing code.
|
// check for missing code.
|
||||||
self.state.check_missing()?;
|
self.state.finalize(self.manifest.block_number, self.manifest.block_hash)?;
|
||||||
|
|
||||||
// connect out-of-order chunks and verify chain integrity.
|
// connect out-of-order chunks and verify chain integrity.
|
||||||
self.blocks.finalize(self.canonical_hashes)?;
|
self.blocks.finalize(self.canonical_hashes)?;
|
||||||
@ -656,6 +656,7 @@ mod tests {
|
|||||||
assert_eq!(service.status(), RestorationStatus::Inactive);
|
assert_eq!(service.status(), RestorationStatus::Inactive);
|
||||||
|
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: vec![],
|
state_hashes: vec![],
|
||||||
block_hashes: vec![],
|
block_hashes: vec![],
|
||||||
state_root: Default::default(),
|
state_root: Default::default(),
|
||||||
|
@ -63,6 +63,7 @@ fn chunk_and_restore(amount: u64) {
|
|||||||
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
|
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
|
||||||
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
|
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
|
||||||
let manifest = ::snapshot::ManifestData {
|
let manifest = ::snapshot::ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: Vec::new(),
|
state_hashes: Vec::new(),
|
||||||
block_hashes: block_hashes,
|
block_hashes: block_hashes,
|
||||||
state_root: ::util::sha3::SHA3_NULL_RLP,
|
state_root: ::util::sha3::SHA3_NULL_RLP,
|
||||||
@ -125,6 +126,7 @@ fn checks_flag() {
|
|||||||
let chain = BlockChain::new(Default::default(), &genesis, db.clone());
|
let chain = BlockChain::new(Default::default(), &genesis, db.clone());
|
||||||
|
|
||||||
let manifest = ::snapshot::ManifestData {
|
let manifest = ::snapshot::ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: Vec::new(),
|
state_hashes: Vec::new(),
|
||||||
block_hashes: Vec::new(),
|
block_hashes: Vec::new(),
|
||||||
state_root: ::util::sha3::SHA3_NULL_RLP,
|
state_root: ::util::sha3::SHA3_NULL_RLP,
|
||||||
|
@ -27,6 +27,7 @@ use super::ManifestData;
|
|||||||
#[test]
|
#[test]
|
||||||
fn manifest_rlp() {
|
fn manifest_rlp() {
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: 2,
|
||||||
block_hashes: Vec::new(),
|
block_hashes: Vec::new(),
|
||||||
state_hashes: Vec::new(),
|
state_hashes: Vec::new(),
|
||||||
block_number: 1234567,
|
block_number: 1234567,
|
||||||
|
@ -122,6 +122,7 @@ fn guards_delete_folders() {
|
|||||||
path.push("restoration");
|
path.push("restoration");
|
||||||
|
|
||||||
let manifest = ManifestData {
|
let manifest = ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: vec![],
|
state_hashes: vec![],
|
||||||
block_hashes: vec![],
|
block_hashes: vec![],
|
||||||
block_number: 0,
|
block_number: 0,
|
||||||
|
@ -58,10 +58,11 @@ fn snap_and_restore() {
|
|||||||
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
|
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
|
||||||
|
|
||||||
writer.into_inner().finish(::snapshot::ManifestData {
|
writer.into_inner().finish(::snapshot::ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: Vec::new(),
|
block_hashes: Vec::new(),
|
||||||
state_root: state_root,
|
state_root: state_root,
|
||||||
block_number: 0,
|
block_number: 1000,
|
||||||
block_hash: H256::default(),
|
block_hash: H256::default(),
|
||||||
}).unwrap();
|
}).unwrap();
|
||||||
|
|
||||||
@ -69,7 +70,7 @@ fn snap_and_restore() {
|
|||||||
db_path.push("db");
|
db_path.push("db");
|
||||||
let db = {
|
let db = {
|
||||||
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
||||||
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive);
|
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
|
||||||
let reader = PackedReader::new(&snap_file).unwrap().unwrap();
|
let reader = PackedReader::new(&snap_file).unwrap().unwrap();
|
||||||
|
|
||||||
let flag = AtomicBool::new(true);
|
let flag = AtomicBool::new(true);
|
||||||
@ -82,12 +83,13 @@ fn snap_and_restore() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(rebuilder.state_root(), state_root);
|
assert_eq!(rebuilder.state_root(), state_root);
|
||||||
rebuilder.check_missing().unwrap();
|
rebuilder.finalize(1000, H256::default()).unwrap();
|
||||||
|
|
||||||
new_db
|
new_db
|
||||||
};
|
};
|
||||||
|
|
||||||
let new_db = journaldb::new(db, Algorithm::Archive, ::db::COL_STATE);
|
let new_db = journaldb::new(db, Algorithm::OverlayRecent, ::db::COL_STATE);
|
||||||
|
assert_eq!(new_db.earliest_era(), Some(1000));
|
||||||
|
|
||||||
compare_dbs(&old_db, new_db.as_hashdb());
|
compare_dbs(&old_db, new_db.as_hashdb());
|
||||||
}
|
}
|
||||||
@ -120,10 +122,10 @@ fn get_code_from_prev_chunk() {
|
|||||||
let mut db = MemoryDB::new();
|
let mut db = MemoryDB::new();
|
||||||
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
|
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
|
||||||
|
|
||||||
let fat_rlp = account::to_fat_rlp(&acc, &AccountDB::from_hash(&db, hash), &mut used_code).unwrap();
|
let fat_rlp = account::to_fat_rlps(&acc, &AccountDB::from_hash(&db, hash), &mut used_code, usize::max_value()).unwrap();
|
||||||
|
|
||||||
let mut stream = RlpStream::new_list(1);
|
let mut stream = RlpStream::new_list(1);
|
||||||
stream.begin_list(2).append(&hash).append_raw(&fat_rlp, 1);
|
stream.begin_list(2).append(&hash).append_raw(&fat_rlp[0], 1);
|
||||||
stream.out()
|
stream.out()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -134,13 +136,18 @@ fn get_code_from_prev_chunk() {
|
|||||||
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
|
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
|
||||||
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
||||||
|
|
||||||
let mut rebuilder = StateRebuilder::new(new_db, Algorithm::Archive);
|
{
|
||||||
let flag = AtomicBool::new(true);
|
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
|
||||||
|
let flag = AtomicBool::new(true);
|
||||||
|
|
||||||
rebuilder.feed(&chunk1, &flag).unwrap();
|
rebuilder.feed(&chunk1, &flag).unwrap();
|
||||||
rebuilder.feed(&chunk2, &flag).unwrap();
|
rebuilder.feed(&chunk2, &flag).unwrap();
|
||||||
|
|
||||||
rebuilder.check_missing().unwrap();
|
rebuilder.finalize(1000, H256::random()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let state_db = journaldb::new(new_db, Algorithm::OverlayRecent, ::db::COL_STATE);
|
||||||
|
assert_eq!(state_db.earliest_era(), Some(1000));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -164,6 +171,7 @@ fn checks_flag() {
|
|||||||
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
|
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
|
||||||
|
|
||||||
writer.into_inner().finish(::snapshot::ManifestData {
|
writer.into_inner().finish(::snapshot::ManifestData {
|
||||||
|
version: 2,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: Vec::new(),
|
block_hashes: Vec::new(),
|
||||||
state_root: state_root,
|
state_root: state_root,
|
||||||
@ -175,7 +183,7 @@ fn checks_flag() {
|
|||||||
db_path.push("db");
|
db_path.push("db");
|
||||||
{
|
{
|
||||||
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
|
||||||
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive);
|
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
|
||||||
let reader = PackedReader::new(&snap_file).unwrap().unwrap();
|
let reader = PackedReader::new(&snap_file).unwrap().unwrap();
|
||||||
|
|
||||||
let flag = AtomicBool::new(false);
|
let flag = AtomicBool::new(false);
|
||||||
|
@ -56,7 +56,7 @@ pub struct CommonParams {
|
|||||||
/// Number of first block where EIP-98 rules begin.
|
/// Number of first block where EIP-98 rules begin.
|
||||||
pub eip98_transition: BlockNumber,
|
pub eip98_transition: BlockNumber,
|
||||||
/// Validate block receipts root.
|
/// Validate block receipts root.
|
||||||
pub validate_receipts: bool,
|
pub validate_receipts_transition: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ethjson::spec::Params> for CommonParams {
|
impl From<ethjson::spec::Params> for CommonParams {
|
||||||
@ -70,7 +70,7 @@ impl From<ethjson::spec::Params> for CommonParams {
|
|||||||
min_gas_limit: p.min_gas_limit.into(),
|
min_gas_limit: p.min_gas_limit.into(),
|
||||||
fork_block: if let (Some(n), Some(h)) = (p.fork_block, p.fork_hash) { Some((n.into(), h.into())) } else { None },
|
fork_block: if let (Some(n), Some(h)) = (p.fork_block, p.fork_hash) { Some((n.into(), h.into())) } else { None },
|
||||||
eip98_transition: p.eip98_transition.map_or(0, Into::into),
|
eip98_transition: p.eip98_transition.map_or(0, Into::into),
|
||||||
validate_receipts: p.validate_receipts.unwrap_or(true),
|
validate_receipts_transition: p.validate_receipts_transition.map_or(0, Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -350,7 +350,7 @@ impl Spec {
|
|||||||
/// Account "0".sha3() and "1".sha3() are a authorities.
|
/// Account "0".sha3() and "1".sha3() are a authorities.
|
||||||
pub fn new_test_tendermint() -> Self { load_bundled!("tendermint") }
|
pub fn new_test_tendermint() -> Self { load_bundled!("tendermint") }
|
||||||
|
|
||||||
/// TestList.sol used in both specs: https://github.com/ethcore/contracts/pull/30/files
|
/// TestList.sol used in both specs: https://github.com/paritytech/contracts/pull/30/files
|
||||||
/// Accounts with secrets "0".sha3() and "1".sha3() are initially the validators.
|
/// Accounts with secrets "0".sha3() and "1".sha3() are initially the validators.
|
||||||
/// Create a new Spec with BasicAuthority which uses a contract at address 5 to determine the current validators using `getValidators`.
|
/// Create a new Spec with BasicAuthority which uses a contract at address 5 to determine the current validators using `getValidators`.
|
||||||
/// Second validator can be removed with "0xbfc708a000000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1" and added back in using "0x4d238c8e00000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".
|
/// Second validator can be removed with "0xbfc708a000000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1" and added back in using "0x4d238c8e00000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".
|
||||||
@ -374,7 +374,7 @@ mod tests {
|
|||||||
use state::State;
|
use state::State;
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
// https://github.com/ethcore/parity/issues/1840
|
// https://github.com/paritytech/parity/issues/1840
|
||||||
#[test]
|
#[test]
|
||||||
fn test_load_empty() {
|
fn test_load_empty() {
|
||||||
assert!(Spec::load(&[] as &[u8]).is_err());
|
assert!(Spec::load(&[] as &[u8]).is_err());
|
||||||
|
@ -601,7 +601,7 @@ impl<B: Backend> State<B> {
|
|||||||
|
|
||||||
let e = self.execute(env_info, engine, t, tracing)?;
|
let e = self.execute(env_info, engine, t, tracing)?;
|
||||||
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
|
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
|
||||||
let state_root = if env_info.number < engine.params().eip98_transition {
|
let state_root = if env_info.number < engine.params().eip98_transition || env_info.number < engine.params().validate_receipts_transition {
|
||||||
self.commit()?;
|
self.commit()?;
|
||||||
Some(self.root().clone())
|
Some(self.root().clone())
|
||||||
} else {
|
} else {
|
||||||
|
@ -438,6 +438,7 @@ pub fn get_default_ethash_params() -> EthashParams{
|
|||||||
minimum_difficulty: U256::from(131072),
|
minimum_difficulty: U256::from(131072),
|
||||||
difficulty_bound_divisor: U256::from(2048),
|
difficulty_bound_divisor: U256::from(2048),
|
||||||
difficulty_increment_divisor: 10,
|
difficulty_increment_divisor: 10,
|
||||||
|
metropolis_difficulty_increment_divisor: 9,
|
||||||
duration_limit: 13,
|
duration_limit: 13,
|
||||||
block_reward: U256::from(0),
|
block_reward: U256::from(0),
|
||||||
registrar: "0000000000000000000000000000000000000001".into(),
|
registrar: "0000000000000000000000000000000000000001".into(),
|
||||||
@ -448,6 +449,7 @@ pub fn get_default_ethash_params() -> EthashParams{
|
|||||||
difficulty_hardfork_transition: u64::max_value(),
|
difficulty_hardfork_transition: u64::max_value(),
|
||||||
difficulty_hardfork_bound_divisor: U256::from(0),
|
difficulty_hardfork_bound_divisor: U256::from(0),
|
||||||
bomb_defuse_transition: u64::max_value(),
|
bomb_defuse_transition: u64::max_value(),
|
||||||
|
eip100b_transition: u64::max_value(),
|
||||||
eip150_transition: u64::max_value(),
|
eip150_transition: u64::max_value(),
|
||||||
eip155_transition: u64::max_value(),
|
eip155_transition: u64::max_value(),
|
||||||
eip160_transition: u64::max_value(),
|
eip160_transition: u64::max_value(),
|
||||||
|
@ -49,7 +49,7 @@ pub struct BlockChainInfo {
|
|||||||
impl BlockChainInfo {
|
impl BlockChainInfo {
|
||||||
/// Determine the security model for the current state.
|
/// Determine the security model for the current state.
|
||||||
pub fn security_level(&self) -> SecurityLevel {
|
pub fn security_level(&self) -> SecurityLevel {
|
||||||
// TODO: Detect SecurityLevel::FullState : https://github.com/ethcore/parity/issues/3834
|
// TODO: Detect SecurityLevel::FullState : https://github.com/paritytech/parity/issues/3834
|
||||||
if self.ancient_block_number.is_none() || self.first_block_number.is_none() {
|
if self.ancient_block_number.is_none() || self.first_block_number.is_none() {
|
||||||
SecurityLevel::FullProofOfWork
|
SecurityLevel::FullProofOfWork
|
||||||
} else {
|
} else {
|
||||||
|
@ -24,6 +24,8 @@ use util::Bytes;
|
|||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
#[cfg_attr(feature = "ipc", binary)]
|
#[cfg_attr(feature = "ipc", binary)]
|
||||||
pub struct ManifestData {
|
pub struct ManifestData {
|
||||||
|
/// Snapshot format version.
|
||||||
|
pub version: u64,
|
||||||
/// List of state chunk hashes.
|
/// List of state chunk hashes.
|
||||||
pub state_hashes: Vec<H256>,
|
pub state_hashes: Vec<H256>,
|
||||||
/// List of block chunk hashes.
|
/// List of block chunk hashes.
|
||||||
@ -39,7 +41,8 @@ pub struct ManifestData {
|
|||||||
impl ManifestData {
|
impl ManifestData {
|
||||||
/// Encode the manifest data to rlp.
|
/// Encode the manifest data to rlp.
|
||||||
pub fn into_rlp(self) -> Bytes {
|
pub fn into_rlp(self) -> Bytes {
|
||||||
let mut stream = RlpStream::new_list(5);
|
let mut stream = RlpStream::new_list(6);
|
||||||
|
stream.append(&self.version);
|
||||||
stream.append_list(&self.state_hashes);
|
stream.append_list(&self.state_hashes);
|
||||||
stream.append_list(&self.block_hashes);
|
stream.append_list(&self.block_hashes);
|
||||||
stream.append(&self.state_root);
|
stream.append(&self.state_root);
|
||||||
@ -52,14 +55,20 @@ impl ManifestData {
|
|||||||
/// Try to restore manifest data from raw bytes, interpreted as RLP.
|
/// Try to restore manifest data from raw bytes, interpreted as RLP.
|
||||||
pub fn from_rlp(raw: &[u8]) -> Result<Self, DecoderError> {
|
pub fn from_rlp(raw: &[u8]) -> Result<Self, DecoderError> {
|
||||||
let decoder = UntrustedRlp::new(raw);
|
let decoder = UntrustedRlp::new(raw);
|
||||||
|
let (start, version) = if decoder.item_count()? == 5 {
|
||||||
|
(0, 1)
|
||||||
|
} else {
|
||||||
|
(1, decoder.val_at(0)?)
|
||||||
|
};
|
||||||
|
|
||||||
let state_hashes: Vec<H256> = decoder.list_at(0)?;
|
let state_hashes: Vec<H256> = decoder.list_at(start + 0)?;
|
||||||
let block_hashes: Vec<H256> = decoder.list_at(1)?;
|
let block_hashes: Vec<H256> = decoder.list_at(start + 1)?;
|
||||||
let state_root: H256 = decoder.val_at(2)?;
|
let state_root: H256 = decoder.val_at(start + 2)?;
|
||||||
let block_number: u64 = decoder.val_at(3)?;
|
let block_number: u64 = decoder.val_at(start + 3)?;
|
||||||
let block_hash: H256 = decoder.val_at(4)?;
|
let block_hash: H256 = decoder.val_at(start + 4)?;
|
||||||
|
|
||||||
Ok(ManifestData {
|
Ok(ManifestData {
|
||||||
|
version: version,
|
||||||
state_hashes: state_hashes,
|
state_hashes: state_hashes,
|
||||||
block_hashes: block_hashes,
|
block_hashes: block_hashes,
|
||||||
state_root: state_root,
|
state_root: state_root,
|
||||||
|
@ -35,6 +35,8 @@ pub enum Error {
|
|||||||
StackUnderflow,
|
StackUnderflow,
|
||||||
/// When execution would exceed defined Stack Limit
|
/// When execution would exceed defined Stack Limit
|
||||||
OutOfStack,
|
OutOfStack,
|
||||||
|
/// When builtin contract failed on input data
|
||||||
|
BuiltIn,
|
||||||
/// Returned on evm internal error. Should never be ignored during development.
|
/// Returned on evm internal error. Should never be ignored during development.
|
||||||
/// Likely to cause consensus issues.
|
/// Likely to cause consensus issues.
|
||||||
Internal,
|
Internal,
|
||||||
@ -48,6 +50,7 @@ impl<'a> From<&'a EvmError> for Error {
|
|||||||
EvmError::BadInstruction { .. } => Error::BadInstruction,
|
EvmError::BadInstruction { .. } => Error::BadInstruction,
|
||||||
EvmError::StackUnderflow { .. } => Error::StackUnderflow,
|
EvmError::StackUnderflow { .. } => Error::StackUnderflow,
|
||||||
EvmError::OutOfStack { .. } => Error::OutOfStack,
|
EvmError::OutOfStack { .. } => Error::OutOfStack,
|
||||||
|
EvmError::BuiltIn { .. } => Error::BuiltIn,
|
||||||
EvmError::Internal(_) => Error::Internal,
|
EvmError::Internal(_) => Error::Internal,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -68,6 +71,7 @@ impl fmt::Display for Error {
|
|||||||
BadInstruction => "Bad instruction",
|
BadInstruction => "Bad instruction",
|
||||||
StackUnderflow => "Stack underflow",
|
StackUnderflow => "Stack underflow",
|
||||||
OutOfStack => "Out of stack",
|
OutOfStack => "Out of stack",
|
||||||
|
BuiltIn => "Built-in failed",
|
||||||
Internal => "Internal error",
|
Internal => "Internal error",
|
||||||
};
|
};
|
||||||
message.fmt(f)
|
message.fmt(f)
|
||||||
@ -84,6 +88,7 @@ impl Encodable for Error {
|
|||||||
StackUnderflow => 3,
|
StackUnderflow => 3,
|
||||||
OutOfStack => 4,
|
OutOfStack => 4,
|
||||||
Internal => 5,
|
Internal => 5,
|
||||||
|
BuiltIn => 6,
|
||||||
};
|
};
|
||||||
|
|
||||||
s.append_internal(&value);
|
s.append_internal(&value);
|
||||||
@ -101,6 +106,7 @@ impl Decodable for Error {
|
|||||||
3 => Ok(StackUnderflow),
|
3 => Ok(StackUnderflow),
|
||||||
4 => Ok(OutOfStack),
|
4 => Ok(OutOfStack),
|
||||||
5 => Ok(Internal),
|
5 => Ok(Internal),
|
||||||
|
6 => Ok(BuiltIn),
|
||||||
_ => Err(DecoderError::Custom("Invalid error type")),
|
_ => Err(DecoderError::Custom("Invalid error type")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ impl Verifier for CanonVerifier {
|
|||||||
verification::verify_block_family(header, bytes, engine, bc)
|
verification::verify_block_family(header, bytes, engine, bc)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_block_final(&self, expected: &Header, got: &Header, receipts: bool) -> Result<(), Error> {
|
fn verify_block_final(&self, expected: &Header, got: &Header) -> Result<(), Error> {
|
||||||
verification::verify_block_final(expected, got, receipts)
|
verification::verify_block_final(expected, got)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ impl Verifier for NoopVerifier {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_block_final(&self, _expected: &Header, _got: &Header, _receipts: bool) -> Result<(), Error> {
|
fn verify_block_final(&self, _expected: &Header, _got: &Header) -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,7 +178,7 @@ pub fn verify_block_family(header: &Header, bytes: &[u8], engine: &Engine, bc: &
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Phase 4 verification. Check block information against transaction enactment results,
|
/// Phase 4 verification. Check block information against transaction enactment results,
|
||||||
pub fn verify_block_final(expected: &Header, got: &Header, check_receipts: bool) -> Result<(), Error> {
|
pub fn verify_block_final(expected: &Header, got: &Header) -> Result<(), Error> {
|
||||||
if expected.gas_used() != got.gas_used() {
|
if expected.gas_used() != got.gas_used() {
|
||||||
return Err(From::from(BlockError::InvalidGasUsed(Mismatch { expected: expected.gas_used().clone(), found: got.gas_used().clone() })))
|
return Err(From::from(BlockError::InvalidGasUsed(Mismatch { expected: expected.gas_used().clone(), found: got.gas_used().clone() })))
|
||||||
}
|
}
|
||||||
@ -188,7 +188,7 @@ pub fn verify_block_final(expected: &Header, got: &Header, check_receipts: bool)
|
|||||||
if expected.state_root() != got.state_root() {
|
if expected.state_root() != got.state_root() {
|
||||||
return Err(From::from(BlockError::InvalidStateRoot(Mismatch { expected: expected.state_root().clone(), found: got.state_root().clone() })))
|
return Err(From::from(BlockError::InvalidStateRoot(Mismatch { expected: expected.state_root().clone(), found: got.state_root().clone() })))
|
||||||
}
|
}
|
||||||
if check_receipts && expected.receipts_root() != got.receipts_root() {
|
if expected.receipts_root() != got.receipts_root() {
|
||||||
return Err(From::from(BlockError::InvalidReceiptsRoot(Mismatch { expected: expected.receipts_root().clone(), found: got.receipts_root().clone() })))
|
return Err(From::from(BlockError::InvalidReceiptsRoot(Mismatch { expected: expected.receipts_root().clone(), found: got.receipts_root().clone() })))
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -26,5 +26,5 @@ pub trait Verifier: Send + Sync {
|
|||||||
/// Verify a block relative to its parent and uncles.
|
/// Verify a block relative to its parent and uncles.
|
||||||
fn verify_block_family(&self, header: &Header, bytes: &[u8], engine: &Engine, bc: &BlockProvider) -> Result<(), Error>;
|
fn verify_block_family(&self, header: &Header, bytes: &[u8], engine: &Engine, bc: &BlockProvider) -> Result<(), Error>;
|
||||||
/// Do a final verification check for an enacted header vs its expected counterpart.
|
/// Do a final verification check for an enacted header vs its expected counterpart.
|
||||||
fn verify_block_final(&self, expected: &Header, got: &Header, receipts: bool) -> Result<(), Error>;
|
fn verify_block_final(&self, expected: &Header, got: &Header) -> Result<(), Error>;
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
rust-crypto = "0.2.36"
|
rust-crypto = "0.2.36"
|
||||||
tiny-keccak = "1.0"
|
tiny-keccak = "1.0"
|
||||||
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
|
||||||
ethkey = { path = "../ethkey" }
|
ethkey = { path = "../ethkey" }
|
||||||
ethcore-bigint = { path = "../util/bigint" }
|
ethcore-bigint = { path = "../util/bigint" }
|
||||||
|
|
||||||
|
@ -7,9 +7,9 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
rand = "0.3.14"
|
rand = "0.3.14"
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
tiny-keccak = "1.0"
|
tiny-keccak = "1.0"
|
||||||
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
docopt = { version = "0.6", optional = true }
|
docopt = { version = "0.7", optional = true }
|
||||||
ethcore-bigint = { path = "../util/bigint" }
|
ethcore-bigint = { path = "../util/bigint" }
|
||||||
rust-crypto = "0.2"
|
rust-crypto = "0.2"
|
||||||
byteorder = "1.0"
|
byteorder = "1.0"
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
|
|
||||||
[![Build Status][travis-image]][travis-url]
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
|
||||||
[travis-image]: https://travis-ci.org/ethcore/ethkey.svg?branch=master
|
[travis-image]: https://travis-ci.org/paritytech/ethkey.svg?branch=master
|
||||||
[travis-url]: https://travis-ci.org/ethcore/ethkey
|
[travis-url]: https://travis-ci.org/paritytech/ethkey
|
||||||
|
|
||||||
Ethereum keys generator.
|
Ethereum keys generator.
|
||||||
|
|
||||||
[Documentation](http://ethcore.github.io/ethkey/ethkey/index.html)
|
[Documentation](http://paritytech.github.io/ethkey/ethkey/index.html)
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
@ -163,6 +163,6 @@ true
|
|||||||
# Ethcore toolchain
|
# Ethcore toolchain
|
||||||
*this project is a part of the ethcore toolchain*
|
*this project is a part of the ethcore toolchain*
|
||||||
|
|
||||||
- [**ethkey**](https://github.com/ethcore/ethkey) - Ethereum keys generator and signer.
|
- [**ethkey**](https://github.com/paritytech/ethkey) - Ethereum keys generator and signer.
|
||||||
- [**ethstore**](https://github.com/ethcore/ethstore) - Ethereum key management.
|
- [**ethstore**](https://github.com/paritytech/ethstore) - Ethereum key management.
|
||||||
- [**ethabi**](https://github.com/ethcore/ethabi) - Ethereum function calls encoding.
|
- [**ethabi**](https://github.com/paritytech/ethabi) - Ethereum function calls encoding.
|
||||||
|
@ -29,7 +29,7 @@ impl Brain {
|
|||||||
impl Generator for Brain {
|
impl Generator for Brain {
|
||||||
fn generate(self) -> Result<KeyPair, Error> {
|
fn generate(self) -> Result<KeyPair, Error> {
|
||||||
let seed = self.0;
|
let seed = self.0;
|
||||||
let mut secret = seed.bytes().collect::<Vec<u8>>().keccak256();
|
let mut secret = seed.into_bytes().keccak256();
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
loop {
|
loop {
|
||||||
|
@ -5,8 +5,8 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
libc = "0.2.11"
|
libc = "0.2"
|
||||||
rand = "0.3.14"
|
rand = "0.3"
|
||||||
ethkey = { path = "../ethkey" }
|
ethkey = { path = "../ethkey" }
|
||||||
serde = "0.9"
|
serde = "0.9"
|
||||||
serde_json = "0.9"
|
serde_json = "0.9"
|
||||||
@ -14,15 +14,15 @@ serde_derive = "0.9"
|
|||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
rust-crypto = "0.2.36"
|
rust-crypto = "0.2.36"
|
||||||
tiny-keccak = "1.0"
|
tiny-keccak = "1.0"
|
||||||
docopt = { version = "0.6", optional = true }
|
docopt = { version = "0.7", optional = true }
|
||||||
time = "0.1.34"
|
time = "0.1.34"
|
||||||
lazy_static = "0.2"
|
|
||||||
itertools = "0.5"
|
itertools = "0.5"
|
||||||
parking_lot = "0.3"
|
parking_lot = "0.4"
|
||||||
ethcrypto = { path = "../ethcrypto" }
|
ethcrypto = { path = "../ethcrypto" }
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
smallvec = "0.3.1"
|
smallvec = "0.3.1"
|
||||||
ethcore-devtools = { path = "../devtools" }
|
ethcore-devtools = { path = "../devtools" }
|
||||||
|
parity-wordlist = "1.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
cli = ["docopt"]
|
cli = ["docopt"]
|
||||||
|
@ -2,12 +2,12 @@
|
|||||||
|
|
||||||
[![Build Status][travis-image]][travis-url]
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
|
||||||
[travis-image]: https://travis-ci.org/ethcore/ethstore.svg?branch=master
|
[travis-image]: https://travis-ci.org/paritytech/ethstore.svg?branch=master
|
||||||
[travis-url]: https://travis-ci.org/ethcore/ethstore
|
[travis-url]: https://travis-ci.org/paritytech/ethstore
|
||||||
|
|
||||||
Ethereum key management.
|
Ethereum key management.
|
||||||
|
|
||||||
[Documentation](http://ethcore.github.io/ethstore/ethstore/index.html)
|
[Documentation](http://paritytech.github.io/ethstore/ethstore/index.html)
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
@ -321,6 +321,6 @@ OK
|
|||||||
# Ethcore toolchain
|
# Ethcore toolchain
|
||||||
*this project is a part of the ethcore toolchain*
|
*this project is a part of the ethcore toolchain*
|
||||||
|
|
||||||
- [**ethkey**](https://github.com/ethcore/ethkey) - Ethereum keys generator and signer.
|
- [**ethkey**](https://github.com/paritytech/ethkey) - Ethereum keys generator and signer.
|
||||||
- [**ethstore**](https://github.com/ethcore/ethstore) - Ethereum key management.
|
- [**ethstore**](https://github.com/paritytech/ethstore) - Ethereum key management.
|
||||||
- [**ethabi**](https://github.com/ethcore/ethabi) - Ethereum function calls encoding.
|
- [**ethabi**](https://github.com/paritytech/ethabi) - Ethereum function calls encoding.
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -16,32 +16,29 @@
|
|||||||
|
|
||||||
//! Ethereum key-management.
|
//! Ethereum key-management.
|
||||||
|
|
||||||
|
|
||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
|
|
||||||
extern crate libc;
|
extern crate crypto as rcrypto;
|
||||||
extern crate itertools;
|
extern crate itertools;
|
||||||
extern crate smallvec;
|
extern crate libc;
|
||||||
|
extern crate parking_lot;
|
||||||
extern crate rand;
|
extern crate rand;
|
||||||
extern crate time;
|
extern crate rustc_serialize;
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
extern crate rustc_serialize;
|
extern crate smallvec;
|
||||||
extern crate crypto as rcrypto;
|
extern crate time;
|
||||||
extern crate tiny_keccak;
|
extern crate tiny_keccak;
|
||||||
extern crate parking_lot;
|
|
||||||
extern crate ethcore_devtools as devtools;
|
|
||||||
|
|
||||||
// reexport it nicely
|
extern crate ethcore_devtools as devtools;
|
||||||
extern crate ethkey as _ethkey;
|
|
||||||
extern crate ethcrypto as crypto;
|
|
||||||
extern crate ethcore_util as util;
|
extern crate ethcore_util as util;
|
||||||
|
extern crate ethcrypto as crypto;
|
||||||
|
extern crate ethkey as _ethkey;
|
||||||
|
extern crate parity_wordlist;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
|
|
||||||
pub mod dir;
|
pub mod dir;
|
||||||
@ -67,4 +64,5 @@ pub use self::secret_store::{
|
|||||||
SecretVaultRef, StoreAccountRef, SimpleSecretStore, SecretStore,
|
SecretVaultRef, StoreAccountRef, SimpleSecretStore, SecretStore,
|
||||||
Derivation, IndexDerivation,
|
Derivation, IndexDerivation,
|
||||||
};
|
};
|
||||||
pub use self::random::{random_phrase, random_string};
|
pub use self::random::random_string;
|
||||||
|
pub use self::parity_wordlist::random_phrase;
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use rand::{Rng, OsRng};
|
use rand::{Rng, OsRng};
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
pub trait Random {
|
pub trait Random {
|
||||||
fn random() -> Self where Self: Sized;
|
fn random() -> Self where Self: Sized;
|
||||||
@ -39,41 +38,9 @@ impl Random for [u8; 32] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a string which is a random phrase of a number of lowercase words.
|
|
||||||
///
|
|
||||||
/// `words` is the number of words, chosen from a dictionary of 7,530. An value of
|
|
||||||
/// 12 gives 155 bits of entropy (almost saturating address space); 20 gives 258 bits
|
|
||||||
/// which is enough to saturate 32-byte key space
|
|
||||||
pub fn random_phrase(words: usize) -> String {
|
|
||||||
lazy_static! {
|
|
||||||
static ref WORDS: Vec<String> = String::from_utf8_lossy(include_bytes!("../res/wordlist.txt"))
|
|
||||||
.lines()
|
|
||||||
.map(|s| s.to_owned())
|
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
let mut rng = OsRng::new().expect("Not able to operate without random source.");
|
|
||||||
(0..words).map(|_| rng.choose(&WORDS).unwrap()).join(" ")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate a random string of given length.
|
/// Generate a random string of given length.
|
||||||
pub fn random_string(length: usize) -> String {
|
pub fn random_string(length: usize) -> String {
|
||||||
let mut rng = OsRng::new().expect("Not able to operate without random source.");
|
let mut rng = OsRng::new().expect("Not able to operate without random source.");
|
||||||
rng.gen_ascii_chars().take(length).collect()
|
rng.gen_ascii_chars().take(length).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::random_phrase;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn should_produce_right_number_of_words() {
|
|
||||||
let p = random_phrase(10);
|
|
||||||
assert_eq!(p.split(" ").count(), 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn should_not_include_carriage_return() {
|
|
||||||
let p = random_phrase(10);
|
|
||||||
assert!(!p.contains('\r'), "Carriage return should be trimmed.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -14,7 +14,7 @@ path = "./src/main.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
docopt = { version = "0.6" }
|
docopt = { version = "0.7" }
|
||||||
ethcore = { path = "../ethcore" }
|
ethcore = { path = "../ethcore" }
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
|
|
||||||
|
@ -8,9 +8,9 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
parking_lot = "0.3"
|
parking_lot = "0.4"
|
||||||
hidapi = { git = "https://github.com/ethcore/hidapi-rs" }
|
hidapi = { git = "https://github.com/paritytech/hidapi-rs" }
|
||||||
libusb = { git = "https://github.com/ethcore/libusb-rs" }
|
libusb = { git = "https://github.com/paritytech/libusb-rs" }
|
||||||
ethkey = { path = "../ethkey" }
|
ethkey = { path = "../ethkey" }
|
||||||
ethcore-bigint = { path = "../util/bigint" }
|
ethcore-bigint = { path = "../util/bigint" }
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ build = "build.rs"
|
|||||||
ethcore-ipc-codegen = { path = "../ipc/codegen" }
|
ethcore-ipc-codegen = { path = "../ipc/codegen" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
ethcore-ipc = { path = "../ipc/rpc" }
|
ethcore-ipc = { path = "../ipc/rpc" }
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
|
|
||||||
|
@ -9,9 +9,9 @@ build = "build.rs"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-ipc = { path = "../rpc" }
|
ethcore-ipc = { path = "../rpc" }
|
||||||
nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git", branch = "parity-1.7" }
|
nanomsg = { git = "https://github.com/paritytech/nanomsg.rs.git", branch = "parity-1.7" }
|
||||||
ethcore-ipc-nano = { path = "../nano" }
|
ethcore-ipc-nano = { path = "../nano" }
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
|
|
||||||
|
@ -8,6 +8,6 @@ license = "GPL-3.0"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-ipc = { path = "../rpc" }
|
ethcore-ipc = { path = "../rpc" }
|
||||||
nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git", branch = "parity-1.7" }
|
nanomsg = { git = "https://github.com/paritytech/nanomsg.rs.git", branch = "parity-1.7" }
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
|
@ -8,6 +8,6 @@ license = "GPL-3.0"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-devtools = { path = "../../devtools" }
|
ethcore-devtools = { path = "../../devtools" }
|
||||||
nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git", branch = "parity-1.7" }
|
nanomsg = { git = "https://github.com/paritytech/nanomsg.rs.git", branch = "parity-1.7" }
|
||||||
ethcore-util = { path = "../../util" }
|
ethcore-util = { path = "../../util" }
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
|
@ -10,8 +10,8 @@ path = "run.rs"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-ipc = { path = "../rpc" }
|
ethcore-ipc = { path = "../rpc" }
|
||||||
ethcore-devtools = { path = "../../devtools" }
|
ethcore-devtools = { path = "../../devtools" }
|
||||||
semver = "0.5"
|
semver = "0.6"
|
||||||
nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git", branch = "parity-1.7" }
|
nanomsg = { git = "https://github.com/paritytech/nanomsg.rs.git", branch = "parity-1.7" }
|
||||||
ethcore-ipc-nano = { path = "../nano" }
|
ethcore-ipc-nano = { path = "../nano" }
|
||||||
ethcore-util = { path = "../../util" }
|
ethcore-util = { path = "../../util" }
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
A thin, lightweight promise wrapper for the api.etherscan.io/apis service, exposing a common endpoint for use in JavaScript applications.
|
A thin, lightweight promise wrapper for the api.etherscan.io/apis service, exposing a common endpoint for use in JavaScript applications.
|
||||||
|
|
||||||
[https://github.com/ethcore/parity/tree/master/js/src/3rdparty/etherscan](https://github.com/ethcore/parity/tree/master/js/src/3rdparty/etherscan)
|
[https://github.com/paritytech/parity/tree/master/js/src/3rdparty/etherscan](https://github.com/paritytech/parity/tree/master/js/src/3rdparty/etherscan)
|
||||||
|
|
||||||
## usage
|
## usage
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/ethcore/parity.git"
|
"url": "git+https://github.com/paritytech/parity.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"Ethereum",
|
"Ethereum",
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
JSON and JS interface defintions for RPC calls.
|
JSON and JS interface defintions for RPC calls.
|
||||||
|
|
||||||
[https://github.com/ethcore/parity/tree/master/js/src/jsonrpc](https://github.com/ethcore/parity/tree/master/js/src/jsonrpc)
|
[https://github.com/paritytech/parity/tree/master/js/src/jsonrpc](https://github.com/paritytech/parity/tree/master/js/src/jsonrpc)
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/ethcore/parity.git"
|
"url": "git+https://github.com/paritytech/parity.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"Ethereum",
|
"Ethereum",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Parity.js is a thin, fast, Promise-based wrapper around the Ethereum APIs.
|
Parity.js is a thin, fast, Promise-based wrapper around the Ethereum APIs.
|
||||||
|
|
||||||
[https://github.com/ethcore/parity/tree/master/js/src/api](https://github.com/ethcore/parity/tree/master/js/src/api)
|
[https://github.com/paritytech/parity/tree/master/js/src/api](https://github.com/paritytech/parity/tree/master/js/src/api)
|
||||||
|
|
||||||
## installation
|
## installation
|
||||||
|
|
||||||
@ -80,4 +80,4 @@ contract.instance
|
|||||||
|
|
||||||
## apis
|
## apis
|
||||||
|
|
||||||
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/ethcore/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.
|
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/paritytech/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/ethcore/parity.git"
|
"url": "git+https://github.com/paritytech/parity.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"Ethereum",
|
"Ethereum",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
A thin ES6 promise wrapper around the shapeshift.io APIs as documented at https://shapeshift.io/api
|
A thin ES6 promise wrapper around the shapeshift.io APIs as documented at https://shapeshift.io/api
|
||||||
|
|
||||||
[https://github.com/ethcore/parity/tree/master/js/src/3rdparty/shapeshift](https://github.com/ethcore/parity/tree/master/js/src/3rdparty/shapeshift)
|
[https://github.com/paritytech/parity/tree/master/js/src/3rdparty/shapeshift](https://github.com/paritytech/parity/tree/master/js/src/3rdparty/shapeshift)
|
||||||
|
|
||||||
## usage
|
## usage
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/ethcore/parity.git"
|
"url": "git+https://github.com/paritytech/parity.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"Ethereum",
|
"Ethereum",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "parity.js",
|
"name": "parity.js",
|
||||||
"version": "1.7.29",
|
"version": "1.7.43",
|
||||||
"main": "release/index.js",
|
"main": "release/index.js",
|
||||||
"jsnext:main": "src/index.js",
|
"jsnext:main": "src/index.js",
|
||||||
"author": "Parity Team <admin@parity.io>",
|
"author": "Parity Team <admin@parity.io>",
|
||||||
@ -14,7 +14,7 @@
|
|||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/ethcore/parity.git"
|
"url": "git+https://github.com/paritytech/parity.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"Ethereum",
|
"Ethereum",
|
||||||
@ -26,6 +26,11 @@
|
|||||||
"Promise"
|
"Promise"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"install": "napa",
|
||||||
|
"analize": "npm run analize:lib && npm run analize:dll && npm run analize:app",
|
||||||
|
"analize:app": "WPANALIZE=1 webpack --config webpack/app --json > .build/analize.app.json && cat .build/analize.app.json | webpack-bundle-size-analyzer",
|
||||||
|
"analize:lib": "WPANALIZE=1 webpack --config webpack/libraries --json > .build/analize.lib.json && cat .build/analize.lib.json | webpack-bundle-size-analyzer",
|
||||||
|
"analize:dll": "WPANALIZE=1 webpack --config webpack/vendor --json > .build/analize.dll.json && cat .build/analize.dll.json | webpack-bundle-size-analyzer",
|
||||||
"build": "npm run build:lib && npm run build:dll && npm run build:app && npm run build:embed",
|
"build": "npm run build:lib && npm run build:dll && npm run build:app && npm run build:embed",
|
||||||
"build:app": "webpack --config webpack/app",
|
"build:app": "webpack --config webpack/app",
|
||||||
"build:lib": "webpack --config webpack/libraries",
|
"build:lib": "webpack --config webpack/libraries",
|
||||||
@ -49,6 +54,7 @@
|
|||||||
"lint:cached": "npm run lint:css && npm run lint:js:cached",
|
"lint:cached": "npm run lint:css && npm run lint:js:cached",
|
||||||
"lint:css": "stylelint ./src/**/*.css",
|
"lint:css": "stylelint ./src/**/*.css",
|
||||||
"lint:fix": "npm run lint:js:fix",
|
"lint:fix": "npm run lint:js:fix",
|
||||||
|
"lint:i18n": "babel-node ./scripts/lint-i18n.js",
|
||||||
"lint:js": "eslint --ignore-path .gitignore ./src/",
|
"lint:js": "eslint --ignore-path .gitignore ./src/",
|
||||||
"lint:js:cached": "eslint --cache --ignore-path .gitignore ./src/",
|
"lint:js:cached": "eslint --cache --ignore-path .gitignore ./src/",
|
||||||
"lint:js:fix": "eslint --fix --ignore-path .gitignore ./src/",
|
"lint:js:fix": "eslint --fix --ignore-path .gitignore ./src/",
|
||||||
@ -58,6 +64,9 @@
|
|||||||
"test:npm": "(cd .npmjs && npm i) && node test/npmParity && node test/npmJsonRpc && (rm -rf .npmjs/node_modules)",
|
"test:npm": "(cd .npmjs && npm i) && node test/npmParity && node test/npmJsonRpc && (rm -rf .npmjs/node_modules)",
|
||||||
"prepush": "npm run lint:cached"
|
"prepush": "npm run lint:cached"
|
||||||
},
|
},
|
||||||
|
"napa": {
|
||||||
|
"qrcode-generator": "kazuhikoarase/qrcode-generator"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"babel-cli": "6.23.0",
|
"babel-cli": "6.23.0",
|
||||||
"babel-core": "6.23.1",
|
"babel-core": "6.23.1",
|
||||||
@ -141,6 +150,7 @@
|
|||||||
"to-source": "2.0.3",
|
"to-source": "2.0.3",
|
||||||
"url-loader": "0.5.7",
|
"url-loader": "0.5.7",
|
||||||
"webpack": "2.2.1",
|
"webpack": "2.2.1",
|
||||||
|
"webpack-bundle-size-analyzer": "2.5.0",
|
||||||
"webpack-dev-middleware": "1.10.1",
|
"webpack-dev-middleware": "1.10.1",
|
||||||
"webpack-error-notification": "0.1.6",
|
"webpack-error-notification": "0.1.6",
|
||||||
"webpack-hot-middleware": "2.17.1",
|
"webpack-hot-middleware": "2.17.1",
|
||||||
@ -148,6 +158,7 @@
|
|||||||
"yargs": "6.6.0"
|
"yargs": "6.6.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@parity/wordlist": "1.0.1",
|
||||||
"base32.js": "0.1.0",
|
"base32.js": "0.1.0",
|
||||||
"bignumber.js": "3.0.1",
|
"bignumber.js": "3.0.1",
|
||||||
"blockies": "0.0.2",
|
"blockies": "0.0.2",
|
||||||
@ -165,6 +176,7 @@
|
|||||||
"geopattern": "1.2.3",
|
"geopattern": "1.2.3",
|
||||||
"isomorphic-fetch": "2.2.1",
|
"isomorphic-fetch": "2.2.1",
|
||||||
"js-sha3": "0.5.5",
|
"js-sha3": "0.5.5",
|
||||||
|
"keythereum": "0.4.3",
|
||||||
"lodash": "4.17.2",
|
"lodash": "4.17.2",
|
||||||
"loglevel": "1.4.1",
|
"loglevel": "1.4.1",
|
||||||
"marked": "0.3.6",
|
"marked": "0.3.6",
|
||||||
@ -174,22 +186,23 @@
|
|||||||
"mobx-react": "4.0.3",
|
"mobx-react": "4.0.3",
|
||||||
"mobx-react-devtools": "4.2.10",
|
"mobx-react-devtools": "4.2.10",
|
||||||
"moment": "2.17.0",
|
"moment": "2.17.0",
|
||||||
|
"napa": "2.3.0",
|
||||||
"phoneformat.js": "1.0.3",
|
"phoneformat.js": "1.0.3",
|
||||||
"promise-worker": "1.1.1",
|
"promise-worker": "1.1.1",
|
||||||
"push.js": "0.0.11",
|
"push.js": "0.0.11",
|
||||||
"qrcode-npm": "0.0.3",
|
|
||||||
"qs": "6.3.0",
|
"qs": "6.3.0",
|
||||||
"react": "15.4.1",
|
"react": "15.4.2",
|
||||||
"react-ace": "4.1.0",
|
"react-ace": "4.1.0",
|
||||||
"react-addons-css-transition-group": "15.4.1",
|
"react-addons-css-transition-group": "15.4.2",
|
||||||
"react-copy-to-clipboard": "4.2.3",
|
"react-copy-to-clipboard": "4.2.3",
|
||||||
"react-dom": "15.4.1",
|
"react-dom": "15.4.2",
|
||||||
"react-dropzone": "3.7.3",
|
"react-dropzone": "3.7.3",
|
||||||
"react-element-to-jsx-string": "6.0.0",
|
"react-element-to-jsx-string": "6.0.0",
|
||||||
"react-event-listener": "0.4.1",
|
"react-event-listener": "0.4.1",
|
||||||
"react-intl": "2.1.5",
|
"react-intl": "2.1.5",
|
||||||
"react-markdown": "2.4.4",
|
"react-markdown": "2.4.4",
|
||||||
"react-portal": "3.0.0",
|
"react-portal": "3.0.0",
|
||||||
|
"react-qr-reader": "1.0.3",
|
||||||
"react-redux": "4.4.6",
|
"react-redux": "4.4.6",
|
||||||
"react-router": "3.0.0",
|
"react-router": "3.0.0",
|
||||||
"react-router-redux": "4.0.7",
|
"react-router-redux": "4.0.7",
|
||||||
@ -201,12 +214,13 @@
|
|||||||
"redux-thunk": "2.1.0",
|
"redux-thunk": "2.1.0",
|
||||||
"rlp": "2.0.0",
|
"rlp": "2.0.0",
|
||||||
"scryptsy": "2.0.0",
|
"scryptsy": "2.0.0",
|
||||||
|
"secp256k1": "3.2.5",
|
||||||
"solc": "ngotchac/solc-js",
|
"solc": "ngotchac/solc-js",
|
||||||
"store": "1.3.20",
|
"store": "1.3.20",
|
||||||
"sw-toolbox": "^3.6.0",
|
"sw-toolbox": "^3.6.0",
|
||||||
"u2f-api": "0.0.9",
|
"u2f-api": "0.0.9",
|
||||||
"u2f-api-polyfill": "0.4.3",
|
"u2f-api-polyfill": "0.4.3",
|
||||||
"uglify-js": "2.8.2",
|
"uglify-js": "2.8.16",
|
||||||
"useragent.js": "0.5.6",
|
"useragent.js": "0.5.6",
|
||||||
"utf8": "2.1.2",
|
"utf8": "2.1.2",
|
||||||
"valid-url": "1.0.9",
|
"valid-url": "1.0.9",
|
||||||
|
53
js/scripts/lint-i18n.js
Normal file
53
js/scripts/lint-i18n.js
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import flatten from 'flat';
|
||||||
|
|
||||||
|
import * as defaults from '../src/i18n/_default';
|
||||||
|
import { LANGUAGES, MESSAGES } from '../src/i18n/store';
|
||||||
|
|
||||||
|
const SKIP_LANG = ['en'];
|
||||||
|
const defaultKeys = Object.keys(flatten(Object.assign({}, defaults, LANGUAGES)));
|
||||||
|
|
||||||
|
Object
|
||||||
|
.keys(MESSAGES)
|
||||||
|
.filter((lang) => !SKIP_LANG.includes(lang))
|
||||||
|
.forEach((lang) => {
|
||||||
|
const messageKeys = Object.keys(MESSAGES[lang]);
|
||||||
|
let extra = 0;
|
||||||
|
let found = 0;
|
||||||
|
let missing = 0;
|
||||||
|
|
||||||
|
console.log(`*** Checking translations for ${lang}`);
|
||||||
|
|
||||||
|
defaultKeys.forEach((key) => {
|
||||||
|
if (messageKeys.includes(key)) {
|
||||||
|
found++;
|
||||||
|
} else {
|
||||||
|
missing++;
|
||||||
|
console.log(` Missing ${key}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
messageKeys.forEach((key) => {
|
||||||
|
if (!defaultKeys.includes(key)) {
|
||||||
|
extra++;
|
||||||
|
console.log(` Extra ${key}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${found} keys, missing ${missing} keys, ${extra} extraneous keys\n`);
|
||||||
|
});
|
@ -5,8 +5,8 @@ set -e
|
|||||||
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
||||||
PACKAGES=( "parity" "etherscan" "shapeshift" "jsonrpc" )
|
PACKAGES=( "parity" "etherscan" "shapeshift" "jsonrpc" )
|
||||||
BRANCH=$CI_BUILD_REF_NAME
|
BRANCH=$CI_BUILD_REF_NAME
|
||||||
GIT_JS_PRECOMPILED="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git"
|
GIT_JS_PRECOMPILED="https://${GITHUB_JS_PRECOMPILED}:@github.com/paritytech/js-precompiled.git"
|
||||||
GIT_PARITY="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git"
|
GIT_PARITY="https://${GITHUB_JS_PRECOMPILED}:@github.com/paritytech/parity.git"
|
||||||
|
|
||||||
# setup the git user defaults for the current repo
|
# setup the git user defaults for the current repo
|
||||||
function setup_git_user {
|
function setup_git_user {
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
// test script 8
|
// test script 9
|
||||||
// trigger rebuild on master 15 Mar 2017, 11:19
|
// trigger rebuild on master 15 Mar 2017, 11:19
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# ethabi-js
|
# ethabi-js
|
||||||
|
|
||||||
A very early, very POC-type port of [https://github.com/ethcore/ethabi](https://github.com/ethcore/ethabi) to JavaScript
|
A very early, very POC-type port of [https://github.com/paritytech/ethabi](https://github.com/paritytech/ethabi) to JavaScript
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/jacogr/ethabi-js.svg?branch=master)](https://travis-ci.org/jacogr/ethabi-js)
|
[![Build Status](https://travis-ci.org/jacogr/ethabi-js.svg?branch=master)](https://travis-ci.org/jacogr/ethabi-js)
|
||||||
[![Coverage Status](https://coveralls.io/repos/github/jacogr/ethabi-js/badge.svg?branch=master)](https://coveralls.io/github/jacogr/ethabi-js?branch=master)
|
[![Coverage Status](https://coveralls.io/repos/github/jacogr/ethabi-js/badge.svg?branch=master)](https://coveralls.io/github/jacogr/ethabi-js?branch=master)
|
||||||
|
@ -131,16 +131,16 @@ contract
|
|||||||
|
|
||||||
## apis
|
## apis
|
||||||
|
|
||||||
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/ethcore/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.
|
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/paritytech/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.
|
||||||
|
|
||||||
- [ethapi.db](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#db)
|
- [ethapi.db](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#db)
|
||||||
- [ethapi.eth](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#eth)
|
- [ethapi.eth](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#eth)
|
||||||
- [ethapi.parity](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#parity)
|
- [ethapi.parity](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#parity)
|
||||||
- [ethapi.net](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#net)
|
- [ethapi.net](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#net)
|
||||||
- [ethapi.personal](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#personal)
|
- [ethapi.personal](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#personal)
|
||||||
- [ethapi.shh](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#shh)
|
- [ethapi.shh](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#shh)
|
||||||
- [ethapi.signer](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#signer)
|
- [ethapi.signer](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#signer)
|
||||||
- [ethapi.trace](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#trace)
|
- [ethapi.trace](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#trace)
|
||||||
- [ethapi.web3](https://github.com/ethcore/ethereum-rpc-json/blob/master/interfaces.md#web3)
|
- [ethapi.web3](https://github.com/paritytech/ethereum-rpc-json/blob/master/interfaces.md#web3)
|
||||||
|
|
||||||
As a verification step, all exposed interfaces are tested for existing and pointing to the correct endpoints by using the generated interfaces from the above repo.
|
As a verification step, all exposed interfaces are tested for existing and pointing to the correct endpoints by using the generated interfaces from the above repo.
|
||||||
|
@ -23,9 +23,10 @@ import { Db, Eth, Parity, Net, Personal, Shh, Signer, Trace, Web3 } from './rpc'
|
|||||||
import Subscriptions from './subscriptions';
|
import Subscriptions from './subscriptions';
|
||||||
import util from './util';
|
import util from './util';
|
||||||
import { isFunction } from './util/types';
|
import { isFunction } from './util/types';
|
||||||
|
import { LocalAccountsMiddleware } from './local';
|
||||||
|
|
||||||
export default class Api extends EventEmitter {
|
export default class Api extends EventEmitter {
|
||||||
constructor (transport) {
|
constructor (transport, allowSubscriptions = true) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
if (!transport || !isFunction(transport.execute)) {
|
if (!transport || !isFunction(transport.execute)) {
|
||||||
@ -44,7 +45,24 @@ export default class Api extends EventEmitter {
|
|||||||
this._trace = new Trace(transport);
|
this._trace = new Trace(transport);
|
||||||
this._web3 = new Web3(transport);
|
this._web3 = new Web3(transport);
|
||||||
|
|
||||||
this._subscriptions = new Subscriptions(this);
|
if (allowSubscriptions) {
|
||||||
|
this._subscriptions = new Subscriptions(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Doing a request here in test env would cause an error
|
||||||
|
if (process.env.NODE_ENV !== 'test') {
|
||||||
|
const middleware = this.parity
|
||||||
|
.nodeKind()
|
||||||
|
.then((nodeKind) => {
|
||||||
|
if (nodeKind.availability === 'public') {
|
||||||
|
return new LocalAccountsMiddleware(transport);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
transport.addMiddleware(middleware);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
get db () {
|
get db () {
|
||||||
@ -96,10 +114,18 @@ export default class Api extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
subscribe (subscriptionName, callback) {
|
subscribe (subscriptionName, callback) {
|
||||||
|
if (!this._subscriptions) {
|
||||||
|
return Promise.resolve(1);
|
||||||
|
}
|
||||||
|
|
||||||
return this._subscriptions.subscribe(subscriptionName, callback);
|
return this._subscriptions.subscribe(subscriptionName, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsubscribe (subscriptionId) {
|
unsubscribe (subscriptionId) {
|
||||||
|
if (!this._subscriptions) {
|
||||||
|
return Promise.resolve(true);
|
||||||
|
}
|
||||||
|
|
||||||
return this._subscriptions.unsubscribe(subscriptionId);
|
return this._subscriptions.unsubscribe(subscriptionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,46 +139,46 @@ export function inOptionsCondition (condition) {
|
|||||||
return condition;
|
return condition;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function inOptions (options) {
|
export function inOptions (_options = {}) {
|
||||||
if (options) {
|
const options = { ..._options };
|
||||||
Object.keys(options).forEach((key) => {
|
|
||||||
switch (key) {
|
|
||||||
case 'to':
|
|
||||||
// Don't encode the `to` option if it's empty
|
|
||||||
// (eg. contract deployments)
|
|
||||||
if (options[key]) {
|
|
||||||
options[key] = inAddress(options[key]);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'from':
|
Object.keys(options).forEach((key) => {
|
||||||
options[key] = inAddress(options[key]);
|
switch (key) {
|
||||||
break;
|
case 'to':
|
||||||
|
// Don't encode the `to` option if it's empty
|
||||||
|
// (eg. contract deployments)
|
||||||
|
if (options[key]) {
|
||||||
|
options.to = inAddress(options[key]);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case 'condition':
|
case 'from':
|
||||||
options[key] = inOptionsCondition(options[key]);
|
options[key] = inAddress(options[key]);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'gas':
|
case 'condition':
|
||||||
case 'gasPrice':
|
options[key] = inOptionsCondition(options[key]);
|
||||||
options[key] = inNumber16((new BigNumber(options[key])).round());
|
break;
|
||||||
break;
|
|
||||||
|
|
||||||
case 'minBlock':
|
case 'gas':
|
||||||
options[key] = options[key] ? inNumber16(options[key]) : null;
|
case 'gasPrice':
|
||||||
break;
|
options[key] = inNumber16((new BigNumber(options[key])).round());
|
||||||
|
break;
|
||||||
|
|
||||||
case 'value':
|
case 'minBlock':
|
||||||
case 'nonce':
|
options[key] = options[key] ? inNumber16(options[key]) : null;
|
||||||
options[key] = inNumber16(options[key]);
|
break;
|
||||||
break;
|
|
||||||
|
|
||||||
case 'data':
|
case 'value':
|
||||||
options[key] = inData(options[key]);
|
case 'nonce':
|
||||||
break;
|
options[key] = inNumber16(options[key]);
|
||||||
}
|
break;
|
||||||
});
|
|
||||||
}
|
case 'data':
|
||||||
|
options[key] = inData(options[key]);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
|
@ -140,6 +140,10 @@ export function outHwAccountInfo (infos) {
|
|||||||
}, {});
|
}, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function outNodeKind (info) {
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
export function outNumber (number) {
|
export function outNumber (number) {
|
||||||
return new BigNumber(number || 0);
|
return new BigNumber(number || 0);
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
import BigNumber from 'bignumber.js';
|
import BigNumber from 'bignumber.js';
|
||||||
|
|
||||||
import { outBlock, outAccountInfo, outAddress, outChainStatus, outDate, outHistogram, outHwAccountInfo, outNumber, outPeer, outPeers, outReceipt, outRecentDapps, outSyncing, outTransaction, outTrace, outVaultMeta } from './output';
|
import { outBlock, outAccountInfo, outAddress, outChainStatus, outDate, outHistogram, outHwAccountInfo, outNodeKind, outNumber, outPeer, outPeers, outReceipt, outRecentDapps, outSyncing, outTransaction, outTrace, outVaultMeta } from './output';
|
||||||
import { isAddress, isBigNumber, isInstanceOf } from '../../../test/types';
|
import { isAddress, isBigNumber, isInstanceOf } from '../../../test/types';
|
||||||
|
|
||||||
describe('api/format/output', () => {
|
describe('api/format/output', () => {
|
||||||
@ -173,6 +173,14 @@ describe('api/format/output', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('outNodeKind', () => {
|
||||||
|
it('formats the input as received', () => {
|
||||||
|
const kind = { availability: 'personal', capability: 'full' };
|
||||||
|
|
||||||
|
expect(outNodeKind(kind)).to.deep.equal(kind);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('outNumber', () => {
|
describe('outNumber', () => {
|
||||||
it('returns a BigNumber equalling the value', () => {
|
it('returns a BigNumber equalling the value', () => {
|
||||||
const bn = outNumber('0x123456');
|
const bn = outNumber('0x123456');
|
||||||
|
95
js/src/api/local/accounts/account.js
Normal file
95
js/src/api/local/accounts/account.js
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import { keythereum } from '../ethkey';
|
||||||
|
|
||||||
|
export default class Account {
|
||||||
|
constructor (persist, data) {
|
||||||
|
const {
|
||||||
|
keyObject,
|
||||||
|
meta = {},
|
||||||
|
name = ''
|
||||||
|
} = data;
|
||||||
|
|
||||||
|
this._persist = persist;
|
||||||
|
this._keyObject = keyObject;
|
||||||
|
this._name = name;
|
||||||
|
this._meta = meta;
|
||||||
|
}
|
||||||
|
|
||||||
|
isValidPassword (password) {
|
||||||
|
try {
|
||||||
|
keythereum.recover(Buffer.from(password), this._keyObject);
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get address () {
|
||||||
|
return `0x${this._keyObject.address.toLowerCase()}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
get name () {
|
||||||
|
return this._name;
|
||||||
|
}
|
||||||
|
|
||||||
|
set name (name) {
|
||||||
|
this._name = name;
|
||||||
|
|
||||||
|
this._persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
get meta () {
|
||||||
|
return JSON.stringify(this._meta);
|
||||||
|
}
|
||||||
|
|
||||||
|
set meta (meta) {
|
||||||
|
this._meta = JSON.parse(meta);
|
||||||
|
|
||||||
|
this._persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
get uuid () {
|
||||||
|
return this._keyObject.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
decryptPrivateKey (password) {
|
||||||
|
return keythereum.recover(Buffer.from(password), this._keyObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
static fromPrivateKey (persist, key, password) {
|
||||||
|
const iv = keythereum.crypto.randomBytes(16);
|
||||||
|
const salt = keythereum.crypto.randomBytes(32);
|
||||||
|
|
||||||
|
// Keythereum will fail if `password` is an empty string
|
||||||
|
password = Buffer.from(password);
|
||||||
|
|
||||||
|
const keyObject = keythereum.dump(password, key, salt, iv);
|
||||||
|
|
||||||
|
const account = new Account(persist, { keyObject });
|
||||||
|
|
||||||
|
return account;
|
||||||
|
}
|
||||||
|
|
||||||
|
toJSON () {
|
||||||
|
return {
|
||||||
|
keyObject: this._keyObject,
|
||||||
|
name: this._name,
|
||||||
|
meta: this._meta
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
120
js/src/api/local/accounts/accounts.js
Normal file
120
js/src/api/local/accounts/accounts.js
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import Account from './account';
|
||||||
|
import localStore from 'store';
|
||||||
|
import { debounce } from 'lodash';
|
||||||
|
|
||||||
|
const NULL_ADDRESS = '0x0000000000000000000000000000000000000000';
|
||||||
|
const LS_STORE_KEY = '_parity::localAccounts';
|
||||||
|
|
||||||
|
export default class Accounts {
|
||||||
|
constructor (data = localStore.get(LS_STORE_KEY) || {}) {
|
||||||
|
const {
|
||||||
|
last = NULL_ADDRESS,
|
||||||
|
store = []
|
||||||
|
} = data;
|
||||||
|
|
||||||
|
this.persist = debounce(() => {
|
||||||
|
localStore.set(LS_STORE_KEY, this);
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
this._last = last;
|
||||||
|
this._store = store.map((data) => new Account(this.persist, data));
|
||||||
|
}
|
||||||
|
|
||||||
|
create (secret, password) {
|
||||||
|
const privateKey = Buffer.from(secret.slice(2), 'hex');
|
||||||
|
const account = Account.fromPrivateKey(this.persist, privateKey, password);
|
||||||
|
|
||||||
|
this._store.push(account);
|
||||||
|
this.lastAddress = account.address;
|
||||||
|
|
||||||
|
this.persist();
|
||||||
|
|
||||||
|
return account.address;
|
||||||
|
}
|
||||||
|
|
||||||
|
set lastAddress (value) {
|
||||||
|
this._last = value.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
get lastAddress () {
|
||||||
|
return this._last;
|
||||||
|
}
|
||||||
|
|
||||||
|
get (address) {
|
||||||
|
address = address.toLowerCase();
|
||||||
|
|
||||||
|
this.lastAddress = address;
|
||||||
|
|
||||||
|
const account = this._store.find((account) => account.address === address);
|
||||||
|
|
||||||
|
if (!account) {
|
||||||
|
throw new Error(`Account not found: ${address}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return account;
|
||||||
|
}
|
||||||
|
|
||||||
|
remove (address, password) {
|
||||||
|
address = address.toLowerCase();
|
||||||
|
|
||||||
|
const index = this._store.findIndex((account) => account.address === address);
|
||||||
|
|
||||||
|
if (index === -1) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const account = this._store[index];
|
||||||
|
|
||||||
|
if (!account.isValidPassword(password)) {
|
||||||
|
console.log('invalid password');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (address === this.lastAddress) {
|
||||||
|
this.lastAddress = NULL_ADDRESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._store.splice(index, 1);
|
||||||
|
|
||||||
|
this.persist();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
mapArray (mapper) {
|
||||||
|
return this._store.map(mapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
mapObject (mapper) {
|
||||||
|
const result = {};
|
||||||
|
|
||||||
|
this._store.forEach((account) => {
|
||||||
|
result[account.address] = mapper(account);
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
toJSON () {
|
||||||
|
return {
|
||||||
|
last: this._last,
|
||||||
|
store: this._store
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
@ -14,6 +14,8 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import { createAction } from 'redux-actions';
|
import Accounts from './accounts';
|
||||||
|
|
||||||
export const initAppAction = createAction('init app');
|
const accounts = new Accounts();
|
||||||
|
|
||||||
|
export default accounts;
|
44
js/src/api/local/ethkey/index.js
Normal file
44
js/src/api/local/ethkey/index.js
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// Allow a web worker in the browser, with a fallback for Node.js
|
||||||
|
const hasWebWorkers = typeof Worker !== 'undefined';
|
||||||
|
const KeyWorker = hasWebWorkers ? require('worker-loader!./worker')
|
||||||
|
: require('./worker').KeyWorker;
|
||||||
|
|
||||||
|
// Local accounts should never be used outside of the browser
|
||||||
|
export let keythereum = null;
|
||||||
|
|
||||||
|
if (hasWebWorkers) {
|
||||||
|
require('keythereum/dist/keythereum');
|
||||||
|
|
||||||
|
keythereum = window.keythereum;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function phraseToAddress (phrase) {
|
||||||
|
return phraseToWallet(phrase).then((wallet) => wallet.address);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function phraseToWallet (phrase) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const worker = new KeyWorker();
|
||||||
|
|
||||||
|
worker.postMessage(phrase);
|
||||||
|
worker.onmessage = ({ data }) => {
|
||||||
|
resolve(data);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
57
js/src/api/local/ethkey/index.spec.js
Normal file
57
js/src/api/local/ethkey/index.spec.js
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import { randomPhrase } from '@parity/wordlist';
|
||||||
|
import { phraseToAddress, phraseToWallet } from './';
|
||||||
|
|
||||||
|
describe('api/local/ethkey', () => {
|
||||||
|
describe.skip('phraseToAddress', function () {
|
||||||
|
this.timeout(10000);
|
||||||
|
|
||||||
|
it('generates a valid address', () => {
|
||||||
|
const phrase = randomPhrase(12);
|
||||||
|
|
||||||
|
return phraseToAddress(phrase).then((address) => {
|
||||||
|
expect(address.length).to.be.equal(42);
|
||||||
|
expect(address.slice(0, 4)).to.be.equal('0x00');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('generates valid address for empty phrase', () => {
|
||||||
|
return phraseToAddress('').then((address) => {
|
||||||
|
expect(address).to.be.equal('0x00a329c0648769a73afac7f9381e08fb43dbea72');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe.skip('phraseToWallet', function () {
|
||||||
|
this.timeout(10000);
|
||||||
|
|
||||||
|
it('generates a valid wallet object', () => {
|
||||||
|
const phrase = randomPhrase(12);
|
||||||
|
|
||||||
|
return phraseToWallet(phrase).then((wallet) => {
|
||||||
|
expect(wallet.address.length).to.be.equal(42);
|
||||||
|
expect(wallet.secret.length).to.be.equal(66);
|
||||||
|
expect(wallet.public.length).to.be.equal(130);
|
||||||
|
|
||||||
|
expect(wallet.address.slice(0, 4)).to.be.equal('0x00');
|
||||||
|
expect(wallet.secret.slice(0, 2)).to.be.equal('0x');
|
||||||
|
expect(wallet.public.slice(0, 2)).to.be.equal('0x');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
89
js/src/api/local/ethkey/worker.js
Normal file
89
js/src/api/local/ethkey/worker.js
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import { keccak_256 as keccak256 } from 'js-sha3';
|
||||||
|
import secp256k1 from 'secp256k1/js';
|
||||||
|
|
||||||
|
// Stay compatible between environments
|
||||||
|
if (typeof self !== 'object') {
|
||||||
|
const scope = typeof global === 'undefined' ? window : global;
|
||||||
|
|
||||||
|
scope.self = scope;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bytesToHex (bytes) {
|
||||||
|
return '0x' + Array.from(bytes).map(n => ('0' + n.toString(16)).slice(-2)).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logic ported from /ethkey/src/brain.rs
|
||||||
|
function phraseToWallet (phrase) {
|
||||||
|
let secret = keccak256.array(phrase);
|
||||||
|
|
||||||
|
for (let i = 0; i < 16384; i++) {
|
||||||
|
secret = keccak256.array(secret);
|
||||||
|
}
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
secret = keccak256.array(secret);
|
||||||
|
|
||||||
|
const secretBuf = Buffer.from(secret);
|
||||||
|
|
||||||
|
if (secp256k1.privateKeyVerify(secretBuf)) {
|
||||||
|
// No compression, slice out last 64 bytes
|
||||||
|
const publicBuf = secp256k1.publicKeyCreate(secretBuf, false).slice(-64);
|
||||||
|
const address = keccak256.array(publicBuf).slice(12);
|
||||||
|
|
||||||
|
if (address[0] !== 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const wallet = {
|
||||||
|
secret: bytesToHex(secretBuf),
|
||||||
|
public: bytesToHex(publicBuf),
|
||||||
|
address: bytesToHex(address)
|
||||||
|
};
|
||||||
|
|
||||||
|
return wallet;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.onmessage = function ({ data }) {
|
||||||
|
const wallet = phraseToWallet(data);
|
||||||
|
|
||||||
|
postMessage(wallet);
|
||||||
|
close();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Emulate a web worker in Node.js
|
||||||
|
class KeyWorker {
|
||||||
|
postMessage (data) {
|
||||||
|
// Force async
|
||||||
|
setTimeout(() => {
|
||||||
|
const wallet = phraseToWallet(data);
|
||||||
|
|
||||||
|
this.onmessage({ data: wallet });
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
onmessage (event) {
|
||||||
|
// no-op to be overriden
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (exports != null) {
|
||||||
|
exports.KeyWorker = KeyWorker;
|
||||||
|
}
|
17
js/src/api/local/index.js
Normal file
17
js/src/api/local/index.js
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
export LocalAccountsMiddleware from './middleware';
|
174
js/src/api/local/middleware.js
Normal file
174
js/src/api/local/middleware.js
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import EthereumTx from 'ethereumjs-tx';
|
||||||
|
import accounts from './accounts';
|
||||||
|
import transactions from './transactions';
|
||||||
|
import { Middleware } from '../transport';
|
||||||
|
import { inNumber16 } from '../format/input';
|
||||||
|
import { phraseToWallet, phraseToAddress } from './ethkey';
|
||||||
|
import { randomPhrase } from '@parity/wordlist';
|
||||||
|
|
||||||
|
export default class LocalAccountsMiddleware extends Middleware {
|
||||||
|
// Maps transaction requests to transaction hashes.
|
||||||
|
// This allows the locally-signed transactions to emulate the signer.
|
||||||
|
transactionHashes = {};
|
||||||
|
transactions = {};
|
||||||
|
|
||||||
|
// Current transaction id. This doesn't need to be stored, as it's
|
||||||
|
// only relevant for the current the session.
|
||||||
|
transactionId = 1;
|
||||||
|
|
||||||
|
constructor (transport) {
|
||||||
|
super(transport);
|
||||||
|
|
||||||
|
const register = this.register.bind(this);
|
||||||
|
|
||||||
|
register('eth_accounts', () => {
|
||||||
|
return accounts.mapArray((account) => account.address);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('eth_coinbase', () => {
|
||||||
|
return accounts.lastAddress;
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_accountsInfo', () => {
|
||||||
|
return accounts.mapObject(({ name }) => {
|
||||||
|
return { name };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_allAccountsInfo', () => {
|
||||||
|
return accounts.mapObject(({ name, meta, uuid }) => {
|
||||||
|
return { name, meta, uuid };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_checkRequest', ([id]) => {
|
||||||
|
return transactions.hash(id) || Promise.resolve(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_defaultAccount', () => {
|
||||||
|
return accounts.lastAddress;
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_generateSecretPhrase', () => {
|
||||||
|
return randomPhrase(12);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_getNewDappsAddresses', () => {
|
||||||
|
return [];
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_hardwareAccountsInfo', () => {
|
||||||
|
return {};
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_newAccountFromPhrase', ([phrase, password]) => {
|
||||||
|
return phraseToWallet(phrase)
|
||||||
|
.then((wallet) => {
|
||||||
|
return accounts.create(wallet.secret, password);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_setAccountMeta', ([address, meta]) => {
|
||||||
|
accounts.get(address).meta = meta;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_setAccountName', ([address, name]) => {
|
||||||
|
accounts.get(address).name = name;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_postTransaction', ([tx]) => {
|
||||||
|
if (!tx.from) {
|
||||||
|
tx.from = accounts.lastAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
tx.nonce = null;
|
||||||
|
tx.condition = null;
|
||||||
|
|
||||||
|
return transactions.add(tx);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_phraseToAddress', ([phrase]) => {
|
||||||
|
return phraseToAddress(phrase);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_useLocalAccounts', () => {
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_listGethAccounts', () => {
|
||||||
|
return [];
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_listRecentDapps', () => {
|
||||||
|
return {};
|
||||||
|
});
|
||||||
|
|
||||||
|
register('parity_killAccount', ([address, password]) => {
|
||||||
|
return accounts.remove(address, password);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('signer_confirmRequest', ([id, modify, password]) => {
|
||||||
|
const {
|
||||||
|
gasPrice,
|
||||||
|
gas: gasLimit,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
value,
|
||||||
|
data
|
||||||
|
} = Object.assign(transactions.get(id), modify);
|
||||||
|
|
||||||
|
return this
|
||||||
|
.rpcRequest('parity_nextNonce', [from])
|
||||||
|
.then((nonce) => {
|
||||||
|
const tx = new EthereumTx({
|
||||||
|
nonce,
|
||||||
|
to,
|
||||||
|
data,
|
||||||
|
gasLimit: inNumber16(gasLimit),
|
||||||
|
gasPrice: inNumber16(gasPrice),
|
||||||
|
value: inNumber16(value)
|
||||||
|
});
|
||||||
|
const account = accounts.get(from);
|
||||||
|
|
||||||
|
tx.sign(account.decryptPrivateKey(password));
|
||||||
|
|
||||||
|
const serializedTx = `0x${tx.serialize().toString('hex')}`;
|
||||||
|
|
||||||
|
return this.rpcRequest('eth_sendRawTransaction', [serializedTx]);
|
||||||
|
})
|
||||||
|
.then((hash) => {
|
||||||
|
transactions.confirm(id, hash);
|
||||||
|
|
||||||
|
return {};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
register('signer_rejectRequest', ([id]) => {
|
||||||
|
return transactions.reject(id);
|
||||||
|
});
|
||||||
|
|
||||||
|
register('signer_requestsToConfirm', () => {
|
||||||
|
return transactions.requestsToConfirm();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
123
js/src/api/local/transactions.js
Normal file
123
js/src/api/local/transactions.js
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import { toHex } from '../util/format';
|
||||||
|
import { TransportError } from '../transport';
|
||||||
|
|
||||||
|
const AWAITING = Symbol('awaiting');
|
||||||
|
const CONFIRMED = Symbol('confirmed');
|
||||||
|
const REJECTED = Symbol('rejected');
|
||||||
|
|
||||||
|
class Transactions {
|
||||||
|
constructor () {
|
||||||
|
this.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
// should only really be needed in the constructor and tests
|
||||||
|
reset () {
|
||||||
|
this._id = 1;
|
||||||
|
this._states = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
nextId () {
|
||||||
|
return toHex(this._id++);
|
||||||
|
}
|
||||||
|
|
||||||
|
add (tx) {
|
||||||
|
const id = this.nextId();
|
||||||
|
|
||||||
|
this._states[id] = {
|
||||||
|
status: AWAITING,
|
||||||
|
transaction: tx
|
||||||
|
};
|
||||||
|
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
get (id) {
|
||||||
|
const state = this._states[id];
|
||||||
|
|
||||||
|
if (!state || state.status !== AWAITING) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return state.transaction;
|
||||||
|
}
|
||||||
|
|
||||||
|
hash (id) {
|
||||||
|
const state = this._states[id];
|
||||||
|
|
||||||
|
if (!state) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (state.status) {
|
||||||
|
case REJECTED:
|
||||||
|
throw TransportError.requestRejected();
|
||||||
|
case CONFIRMED:
|
||||||
|
return state.hash;
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
confirm (id, hash) {
|
||||||
|
const state = this._states[id];
|
||||||
|
|
||||||
|
if (!state || state.status !== AWAITING) {
|
||||||
|
throw new Error('Trying to confirm an invalid transaction');
|
||||||
|
}
|
||||||
|
|
||||||
|
state.hash = hash;
|
||||||
|
state.status = CONFIRMED;
|
||||||
|
}
|
||||||
|
|
||||||
|
reject (id) {
|
||||||
|
const state = this._states[id];
|
||||||
|
|
||||||
|
if (!state) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.status = REJECTED;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
requestsToConfirm () {
|
||||||
|
const result = [];
|
||||||
|
|
||||||
|
Object.keys(this._states).forEach((id) => {
|
||||||
|
const state = this._states[id];
|
||||||
|
|
||||||
|
if (state.status === AWAITING) {
|
||||||
|
result.push({
|
||||||
|
id,
|
||||||
|
origin: {
|
||||||
|
signer: '0x0'
|
||||||
|
},
|
||||||
|
payload: {
|
||||||
|
sendTransaction: state.transaction
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default new Transactions();
|
68
js/src/api/local/transactions.spec.js
Normal file
68
js/src/api/local/transactions.spec.js
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import transactions from './transactions';
|
||||||
|
import { TransportError } from '../transport/error';
|
||||||
|
|
||||||
|
const DUMMY_TX = 'dummy';
|
||||||
|
|
||||||
|
describe('api/local/transactions', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
transactions.reset();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('can store transactions', () => {
|
||||||
|
const id1 = transactions.add(DUMMY_TX);
|
||||||
|
const id2 = transactions.add(DUMMY_TX);
|
||||||
|
const requests = transactions.requestsToConfirm();
|
||||||
|
|
||||||
|
expect(id1).to.be.equal('0x1');
|
||||||
|
expect(id2).to.be.equal('0x2');
|
||||||
|
expect(requests.length).to.be.equal(2);
|
||||||
|
expect(requests[0].id).to.be.equal(id1);
|
||||||
|
expect(requests[1].id).to.be.equal(id2);
|
||||||
|
expect(requests[0].payload.sendTransaction).to.be.equal(DUMMY_TX);
|
||||||
|
expect(requests[1].payload.sendTransaction).to.be.equal(DUMMY_TX);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('can confirm transactions', () => {
|
||||||
|
const id1 = transactions.add(DUMMY_TX);
|
||||||
|
const id2 = transactions.add(DUMMY_TX);
|
||||||
|
|
||||||
|
const hash1 = '0x1111111111111111111111111111111111111111';
|
||||||
|
const hash2 = '0x2222222222222222222222222222222222222222';
|
||||||
|
|
||||||
|
transactions.confirm(id1, hash1);
|
||||||
|
transactions.confirm(id2, hash2);
|
||||||
|
|
||||||
|
const requests = transactions.requestsToConfirm();
|
||||||
|
|
||||||
|
expect(requests.length).to.be.equal(0);
|
||||||
|
expect(transactions.hash(id1)).to.be.equal(hash1);
|
||||||
|
expect(transactions.hash(id2)).to.be.equal(hash2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('can reject transactions', () => {
|
||||||
|
const id = transactions.add(DUMMY_TX);
|
||||||
|
|
||||||
|
transactions.reject(id);
|
||||||
|
|
||||||
|
const requests = transactions.requestsToConfirm();
|
||||||
|
|
||||||
|
expect(requests.length).to.be.equal(0);
|
||||||
|
expect(() => transactions.hash(id)).to.throw(TransportError);
|
||||||
|
});
|
||||||
|
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user