Compare commits
132 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
badb04502f | ||
|
a44fad04fa | ||
|
7ae5d8ebdc | ||
|
b7b1484f2e | ||
|
da8c4df8cc | ||
|
06287c478a | ||
|
6ad6c3cbf9 | ||
|
25936ae0f6 | ||
|
7bfbe41c72 | ||
|
e38293b085 | ||
|
4bb517ec94 | ||
|
de906d4afd | ||
|
fafb534cd3 | ||
|
a5a06e49ba | ||
|
9f96fa0a73 | ||
|
bbae075c60 | ||
|
bacc0f0b9a | ||
|
582a4ea339 | ||
|
09edb94d53 | ||
|
02e33c4f91 | ||
|
8d24b4e804 | ||
|
895574b774 | ||
|
b4af8df535 | ||
|
5f064a9076 | ||
|
5b30f22011 | ||
|
9d9e2b43f2 | ||
|
5dc5be1e58 | ||
|
306c1764eb | ||
|
f6a390849b | ||
|
cd088a4345 | ||
|
bd9a8aa22b | ||
|
8fc504eb1a | ||
|
c1b3d5fe1a | ||
|
e53bf9a95e | ||
|
decc9eaa85 | ||
|
3f61f2d8d9 | ||
|
7f02a08741 | ||
|
cff1bc88fa | ||
|
59f0eb7e6b | ||
|
a729ca3223 | ||
|
afe14b42e2 | ||
|
62832c93b0 | ||
|
1887080990 | ||
|
edc38c2cee | ||
|
16e0ad1288 | ||
|
5a561997cf | ||
|
413442d7be | ||
|
4489ca0a38 | ||
|
3a2c173fe1 | ||
|
9c051ab756 | ||
|
63221c5152 | ||
|
8714a40d84 | ||
|
825c7990f2 | ||
|
d743df549b | ||
|
6022c47b53 | ||
|
02eb046774 | ||
|
f2dd032884 | ||
|
55c046cb88 | ||
|
7701f73cdf | ||
|
213e007a4b | ||
|
78d0a8696f | ||
|
e1333ea1e9 | ||
|
4ee217ba7c | ||
|
6fc5014b4d | ||
|
859a41308c | ||
|
d2120ded56 | ||
|
be5db14160 | ||
|
1786b6eedd | ||
|
2d693be735 | ||
|
35c607f6be | ||
|
20248c443b | ||
|
bf55db4c7e | ||
|
dbdb57a8c0 | ||
|
78a1d8b7b4 | ||
|
1c076af5ee | ||
|
bf4fa658f3 | ||
|
5da8da68cc | ||
|
083dcc369b | ||
|
7827cc048e | ||
|
9de1afeeb6 | ||
|
eed630a002 | ||
|
6be45367e9 | ||
|
f7dae48c17 | ||
|
44161874ff | ||
|
faf6f1f9ea | ||
|
425dcd45c2 | ||
|
dae5d75dd6 | ||
|
33d3bfae8b | ||
|
7e89bab4aa | ||
|
9584faee55 | ||
|
9bbf8b6c0f | ||
|
bf242552f3 | ||
|
752031a657 | ||
|
d250f348a3 | ||
|
26d1303034 | ||
|
89987745f6 | ||
|
c31ffab22e | ||
|
21a27fee9f | ||
|
10c121a299 | ||
|
67e75e1da1 | ||
|
412e2ce3a0 | ||
|
73f2ff38ec | ||
|
981ad0bc53 | ||
|
5a581c1c90 | ||
|
87699f8de0 | ||
|
42268fd1ef | ||
|
2cf0d7d775 | ||
|
de91a5532d | ||
|
e91eb337c9 | ||
|
8dfa46f4f0 | ||
|
e04a2f926a | ||
|
98b89c8e4f | ||
|
b30b54e446 | ||
|
4723ea69a1 | ||
|
64fd64fd6b | ||
|
28eb05f032 | ||
|
969a9815e4 | ||
|
4cc274e75f | ||
|
c5fa7aab43 | ||
|
e7cdad6146 | ||
|
92e770e916 | ||
|
6fcd775d48 | ||
|
944bf6a59e | ||
|
90a7ca9d10 | ||
|
9e2b2b361c | ||
|
08f0573fb5 | ||
|
8132d38b50 | ||
|
10e1787ad1 | ||
|
69085aa282 | ||
|
fba63de974 | ||
|
288d73789a | ||
|
002496603c |
@ -1,3 +1,27 @@
|
|||||||
|
# NOTE: if you make changes here, remember to also update:
|
||||||
|
# scripts/test-linux.sh
|
||||||
|
# scripts/build-linux.sh
|
||||||
|
# scripts/build-windows.sh
|
||||||
|
|
||||||
|
# Using 'cfg` is broken, see https://github.com/rust-lang/cargo/issues/6858
|
||||||
|
#[target.'cfg(target_arch = "x86_64")']
|
||||||
|
#rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"]
|
||||||
|
|
||||||
|
# …so instead we list all target triples (Tier 1 64-bit platforms)
|
||||||
|
[target.x86_64-unknown-linux-gnu]
|
||||||
|
# Enables the aes-ni instructions for RustCrypto dependency.
|
||||||
|
rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"]
|
||||||
|
|
||||||
|
[target.x86_64-pc-windows-gnu]
|
||||||
|
# Enables the aes-ni instructions for RustCrypto dependency.
|
||||||
|
rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"]
|
||||||
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
[target.x86_64-pc-windows-msvc]
|
||||||
|
# Enables the aes-ni instructions for RustCrypto dependency.
|
||||||
# Link the C runtime statically ; https://github.com/paritytech/parity-ethereum/issues/6643
|
# Link the C runtime statically ; https://github.com/paritytech/parity-ethereum/issues/6643
|
||||||
rustflags = ["-Ctarget-feature=+crt-static"]
|
rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3", "-Ctarget-feature=+crt-static"]
|
||||||
|
|
||||||
|
[target.x86_64-apple-darwin]
|
||||||
|
# Enables the aes-ni instructions for RustCrypto dependency.
|
||||||
|
rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"]
|
||||||
|
|
||||||
|
37
.github/CONTRIBUTING.md
vendored
37
.github/CONTRIBUTING.md
vendored
@ -24,7 +24,42 @@ Also, try to include **steps to reproduce** the issue and expand on the **actual
|
|||||||
|
|
||||||
If you would like to contribute to Parity Ethereum, please **fork it**, fix bugs or implement features, and [propose a pull request](https://github.com/paritytech/parity-ethereum/compare).
|
If you would like to contribute to Parity Ethereum, please **fork it**, fix bugs or implement features, and [propose a pull request](https://github.com/paritytech/parity-ethereum/compare).
|
||||||
|
|
||||||
Please, refer to the [Coding Guide](https://wiki.parity.io/Coding-guide) in our wiki for more details about hacking on Parity.
|
### Labels & Milestones
|
||||||
|
|
||||||
|
We use [labels](https://github.com/paritytech/parity-ethereum/labels) to manage PRs and issues and communicate the state of a PR. Please familiarize yourself with them. Furthermore we are organizing issues in [milestones](https://github.com/paritytech/parity-ethereum/milestones). Best way to get started is to a pick a ticket from the current milestone tagged [`easy`](https://github.com/paritytech/parity-ethereum/labels/Q2-easy%20%F0%9F%92%83) and get going, or [`mentor`](https://github.com/paritytech/parity-ethereum/labels/Q1-mentor%20%F0%9F%95%BA) and get in contact with the mentor offering their support on that larger task.
|
||||||
|
|
||||||
|
### Rules
|
||||||
|
|
||||||
|
There are a few basic ground-rules for contributors (including the maintainer(s) of the project):
|
||||||
|
|
||||||
|
* **No pushing directly to the master branch**.
|
||||||
|
* **All modifications** must be made in a **pull-request** to solicit feedback from other contributors.
|
||||||
|
* Pull-requests cannot be merged before CI runs green and two reviewers have given their approval.
|
||||||
|
* Contributors should adhere to the [Parity Ethereum Style Guide](https://wiki.parity.io/Parity-Ethereum-Style-Guide).
|
||||||
|
|
||||||
|
### Recommendations
|
||||||
|
|
||||||
|
* **Non-master branch names** *should* be prefixed with a short name moniker, followed by the associated Github Issue ID (if any), and a brief description of the task using the format `<GITHUB_USERNAME>-<ISSUE_ID>-<BRIEF_DESCRIPTION>` (e.g. `gavin-123-readme`). The name moniker helps people to inquiry about their unfinished work, and the GitHub Issue ID helps your future self and other developers (particularly those who are onboarding) find out about and understand the original scope of the task, and where it fits into Parity Ethereum [Projects](https://github.com/paritytech/parity-ethereum/projects).
|
||||||
|
* **Remove stale branches periodically**
|
||||||
|
|
||||||
|
### Preparing Pull Requests
|
||||||
|
|
||||||
|
* If your PR does not alter any logic (e.g. comments, dependencies, docs), then it may be tagged [`insubstantial`](https://github.com/paritytech/parity-ethereum/pulls?q=is%3Aopen+is%3Apr+label%3A%22A2-insubstantial+%F0%9F%91%B6%22).
|
||||||
|
|
||||||
|
* Once a PR is ready for review please add the [`pleasereview`](https://github.com/paritytech/parity-ethereum/pulls?utf8=%E2%9C%93&q=is%3Aopen+is%3Apr+label%3A%22A0-pleasereview+%F0%9F%A4%93%22+) label.
|
||||||
|
|
||||||
|
### Reviewing Pull Requests*:
|
||||||
|
|
||||||
|
* At least two reviewers are required to review PRs (even for PRs tagged [`insubstantial`](https://github.com/paritytech/parity-ethereum/pulls?q=is%3Aopen+is%3Apr+label%3A%22A2-insubstantial+%F0%9F%91%B6%22)).
|
||||||
|
|
||||||
|
When doing a review, make sure to look for any:
|
||||||
|
|
||||||
|
* Buggy behavior.
|
||||||
|
* Undue maintenance burden.
|
||||||
|
* Breaking with house coding style.
|
||||||
|
* Pessimization (i.e. reduction of speed as measured in the projects benchmarks).
|
||||||
|
* Breaking changes should be carefuly reviewed and tagged as such so they end up in the [changelog](../CHANGELOG.md).
|
||||||
|
* Uselessness (i.e. it does not strictly add a feature or fix a known issue).
|
||||||
|
|
||||||
## License.
|
## License.
|
||||||
|
|
||||||
|
6
.github/ISSUE_TEMPLATE.md
vendored
6
.github/ISSUE_TEMPLATE.md
vendored
@ -1,10 +1,14 @@
|
|||||||
_Before filing a new issue, please **provide the following information**._
|
_Before filing a new issue, please **provide the following information**._
|
||||||
|
|
||||||
|
_If you think that your issue is an exploitable security vulnerability, please mail your bugreport to security@parity.io instead; your submission might be eligible for our Bug Bounty._
|
||||||
|
_You can find mode info on the reporting process in [SECURITY.md](https://github.com/paritytech/parity-ethereum/blob/master/SECURITY.md)_
|
||||||
|
|
||||||
|
|
||||||
- **Parity Ethereum version**: 0.0.0
|
- **Parity Ethereum version**: 0.0.0
|
||||||
- **Operating system**: Windows / MacOS / Linux
|
- **Operating system**: Windows / MacOS / Linux
|
||||||
- **Installation**: homebrew / one-line installer / built from source
|
- **Installation**: homebrew / one-line installer / built from source
|
||||||
- **Fully synchronized**: no / yes
|
- **Fully synchronized**: no / yes
|
||||||
- **Network**: ethereum / ropsten / kovan / ...
|
- **Network**: ethereum / ropsten / goerli / ...
|
||||||
- **Restarted**: no / yes
|
- **Restarted**: no / yes
|
||||||
|
|
||||||
_Your issue description goes here below. Try to include **actual** vs. **expected behavior** and **steps to reproduce** the issue._
|
_Your issue description goes here below. Try to include **actual** vs. **expected behavior** and **steps to reproduce** the issue._
|
||||||
|
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
21
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
Thank you for your Pull Request!
|
||||||
|
|
||||||
|
Before you submitting, please check that:
|
||||||
|
|
||||||
|
- [ ] You added a brief description of the PR, e.g.:
|
||||||
|
- What does it do?
|
||||||
|
- What important points reviewers should know?
|
||||||
|
- Is there something left for follow-up PRs?
|
||||||
|
- [ ] You labeled the PR with appropriate labels if you have permissions to do so.
|
||||||
|
- [ ] You mentioned a related issue if this PR related to it, e.g. `Fixes #228` or `Related #1337`.
|
||||||
|
- [ ] You asked any particular reviewers to review. If you aren't sure, start with GH suggestions.
|
||||||
|
- [ ] Your PR adheres [the style guide](https://wiki.parity.io/Coding-guide)
|
||||||
|
- In particular, mind the maximal line length.
|
||||||
|
- There is no commented code checked in unless necessary.
|
||||||
|
- Any panickers have a proof or removed.
|
||||||
|
- [ ] You updated any rustdocs which may have changed
|
||||||
|
|
||||||
|
After you've read this notice feel free to remove it.
|
||||||
|
Thank you!
|
||||||
|
|
||||||
|
✄ -----------------------------------------------------------------------------
|
221
.gitlab-ci.yml
221
.gitlab-ci.yml
@ -4,17 +4,15 @@ stages:
|
|||||||
- publish
|
- publish
|
||||||
- optional
|
- optional
|
||||||
|
|
||||||
image: parity/rust-parity-ethereum-build:xenial
|
image: parity/parity-ci-linux:latest
|
||||||
variables:
|
variables:
|
||||||
GIT_STRATEGY: fetch
|
GIT_STRATEGY: fetch
|
||||||
GIT_SUBMODULE_STRATEGY: recursive
|
GIT_SUBMODULE_STRATEGY: recursive
|
||||||
CI_SERVER_NAME: "GitLab CI"
|
CI_SERVER_NAME: "GitLab CI"
|
||||||
|
CARGO_HOME: "/ci-cache/${CI_PROJECT_NAME}/cargo/${CI_JOB_NAME}"
|
||||||
|
SCCACHE_DIR: "/ci-cache/${CI_PROJECT_NAME}/sccache"
|
||||||
CARGO_TARGET: x86_64-unknown-linux-gnu
|
CARGO_TARGET: x86_64-unknown-linux-gnu
|
||||||
|
|
||||||
.no_git: &no_git # disable git strategy
|
|
||||||
variables:
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
GIT_SUBMODULE_STRATEGY: none
|
|
||||||
|
|
||||||
.releaseable_branches: # list of git refs for building GitLab artifacts (think "pre-release binaries")
|
.releaseable_branches: # list of git refs for building GitLab artifacts (think "pre-release binaries")
|
||||||
only: &releaseable_branches
|
only: &releaseable_branches
|
||||||
@ -30,41 +28,72 @@ variables:
|
|||||||
expire_in: 1 mos
|
expire_in: 1 mos
|
||||||
paths:
|
paths:
|
||||||
- artifacts/
|
- artifacts/
|
||||||
|
- tools/
|
||||||
|
|
||||||
.docker-cache-status: &docker-cache-status
|
.docker-cache-status: &docker-cache-status
|
||||||
variables:
|
variables:
|
||||||
CARGO_HOME: "/ci-cache/parity-ethereum/cargo/${CI_JOB_NAME}"
|
CARGO_HOME: "/ci-cache/parity-ethereum/cargo/${CI_JOB_NAME}"
|
||||||
|
dependencies: []
|
||||||
before_script:
|
before_script:
|
||||||
- SCCACHE_ERROR_LOG=/builds/parity/parity-ethereum/sccache_error.log RUST_LOG=sccache::server=debug sccache --start-server
|
- rustup show
|
||||||
|
- cargo --version
|
||||||
|
- SCCACHE_ERROR_LOG=/builds/parity/parity-ethereum/sccache_debug.log
|
||||||
|
RUST_LOG=sccache=debug
|
||||||
|
sccache --start-server
|
||||||
- sccache -s
|
- sccache -s
|
||||||
after_script:
|
after_script:
|
||||||
- echo "All crate-types:"
|
# sccache debug info
|
||||||
- grep 'parse_arguments.*--crate-type' sccache_error.log | sed -re 's/.*"--crate-type", "([^"]+)".*/\1/' | sort | uniq -c
|
- if test -e sccache_debug.log;
|
||||||
- echo "Non-cacheable reasons:"
|
then
|
||||||
- grep CannotCache sccache_error.log | sed -re 's/.*CannotCache\((.+)\).*/\1/' | sort | uniq -c
|
echo "_____All crate-types:_____";
|
||||||
|
grep 'parse_arguments.*--crate-type' sccache_debug.log | sed -re 's/.*"--crate-type", "([^"]+)".*/\1/' | sort | uniq -c;
|
||||||
|
echo "_____Non-cacheable reasons:_____";
|
||||||
|
grep CannotCache sccache_debug.log | sed -re 's/.*CannotCache\((.+)\).*/\1/' | sort | uniq -c;
|
||||||
|
else
|
||||||
|
echo "_____No logs from sccache_____";
|
||||||
|
exit 0;
|
||||||
|
fi
|
||||||
tags:
|
tags:
|
||||||
- linux-docker
|
- linux-docker
|
||||||
|
|
||||||
|
.build-on-linux: &build-on-linux
|
||||||
|
stage: build
|
||||||
|
<<: *docker-cache-status
|
||||||
|
<<: *collect_artifacts
|
||||||
|
script:
|
||||||
|
- scripts/gitlab/build-linux.sh
|
||||||
|
- sccache -s
|
||||||
|
after_script:
|
||||||
|
- mkdir -p tools
|
||||||
|
- cp -r scripts/docker/hub/* ./tools
|
||||||
|
- cp scripts/gitlab/publish-snap.sh ./tools
|
||||||
|
- cp scripts/gitlab/publish-onchain.sh ./tools
|
||||||
|
- cp scripts/gitlab/safe-curl.sh ./tools
|
||||||
|
- echo v"$(sed -r -n '1,/^version/s/^version\s*=\s*"([^"]+)".*$/\1/p' Cargo.toml)" |
|
||||||
|
tee ./tools/VERSION
|
||||||
|
- echo "$(sed -r -n '1,/^track/s/^track\s*=\s*"([^"]+)".*$/\1/p' ./util/version/Cargo.toml)" |
|
||||||
|
tee ./tools/TRACK
|
||||||
|
|
||||||
|
|
||||||
cargo-check 0 3:
|
cargo-check 0 3:
|
||||||
stage: test
|
stage: test
|
||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
script:
|
script:
|
||||||
- time cargo check --target $CARGO_TARGET --locked --no-default-features
|
- time cargo check --target $CARGO_TARGET --locked --no-default-features --verbose --color=always
|
||||||
- sccache -s
|
- sccache -s
|
||||||
|
|
||||||
cargo-check 1 3:
|
cargo-check 1 3:
|
||||||
stage: test
|
stage: test
|
||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
script:
|
script:
|
||||||
- time cargo check --target $CARGO_TARGET --locked --manifest-path util/io/Cargo.toml --no-default-features
|
- time cargo check --target $CARGO_TARGET --locked --manifest-path util/io/Cargo.toml --no-default-features --verbose --color=always
|
||||||
- sccache -s
|
- sccache -s
|
||||||
|
|
||||||
cargo-check 2 3:
|
cargo-check 2 3:
|
||||||
stage: test
|
stage: test
|
||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
script:
|
script:
|
||||||
- time cargo check --target $CARGO_TARGET --locked --manifest-path util/io/Cargo.toml --features "mio"
|
- time cargo check --target $CARGO_TARGET --locked --manifest-path util/io/Cargo.toml --features "mio" --verbose --color=always
|
||||||
- sccache -s
|
- sccache -s
|
||||||
|
|
||||||
cargo-audit:
|
cargo-audit:
|
||||||
@ -72,7 +101,7 @@ cargo-audit:
|
|||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
script:
|
script:
|
||||||
- cargo audit
|
- cargo audit
|
||||||
- sccache -s
|
allow_failure: true # failed cargo audit shouldn't prevent a PR from being merged
|
||||||
|
|
||||||
validate-chainspecs:
|
validate-chainspecs:
|
||||||
stage: test
|
stage: test
|
||||||
@ -92,52 +121,61 @@ test-linux:
|
|||||||
stage: build
|
stage: build
|
||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
script:
|
script:
|
||||||
- ./scripts/gitlab/test-linux.sh
|
- ./scripts/gitlab/test-linux.sh stable
|
||||||
- sccache -s
|
- sccache -s
|
||||||
|
|
||||||
build-android:
|
test-linux-beta:
|
||||||
stage: build
|
|
||||||
image: parity/rust-parity-ethereum-android-build:stretch
|
|
||||||
variables:
|
|
||||||
CARGO_TARGET: armv7-linux-androideabi
|
|
||||||
<<: *docker-cache-status
|
|
||||||
<<: *collect_artifacts
|
|
||||||
script:
|
|
||||||
- scripts/gitlab/build-linux.sh
|
|
||||||
tags:
|
|
||||||
- linux-docker
|
|
||||||
|
|
||||||
build-linux: &build-linux
|
|
||||||
stage: build
|
stage: build
|
||||||
only: *releaseable_branches
|
only: *releaseable_branches
|
||||||
<<: *docker-cache-status
|
<<: *docker-cache-status
|
||||||
<<: *collect_artifacts
|
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab/build-linux.sh
|
- ./scripts/gitlab/test-linux.sh beta
|
||||||
- sccache -s
|
- sccache -s
|
||||||
|
|
||||||
|
test-linux-nightly:
|
||||||
|
stage: build
|
||||||
|
only: *releaseable_branches
|
||||||
|
<<: *docker-cache-status
|
||||||
|
script:
|
||||||
|
- ./scripts/gitlab/test-linux.sh nightly
|
||||||
|
- sccache -s
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
|
build-android:
|
||||||
|
<<: *build-on-linux
|
||||||
|
image: parity/parity-ci-android:stretch
|
||||||
|
variables:
|
||||||
|
CARGO_TARGET: armv7-linux-androideabi
|
||||||
|
|
||||||
|
build-linux:
|
||||||
|
<<: *build-on-linux
|
||||||
|
only: *releaseable_branches
|
||||||
|
|
||||||
build-linux-i386:
|
build-linux-i386:
|
||||||
<<: *build-linux
|
<<: *build-on-linux
|
||||||
image: parity/rust-parity-ethereum-build:i386
|
only: *releaseable_branches
|
||||||
|
image: parity/parity-ci-i386:latest
|
||||||
variables:
|
variables:
|
||||||
CARGO_TARGET: i686-unknown-linux-gnu
|
CARGO_TARGET: i686-unknown-linux-gnu
|
||||||
|
|
||||||
build-linux-arm64:
|
build-linux-arm64:
|
||||||
<<: *build-linux
|
<<: *build-on-linux
|
||||||
image: parity/rust-parity-ethereum-build:arm64
|
only: *releaseable_branches
|
||||||
|
image: parity/parity-ci-arm64:latest
|
||||||
variables:
|
variables:
|
||||||
CARGO_TARGET: aarch64-unknown-linux-gnu
|
CARGO_TARGET: aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
build-linux-armhf:
|
build-linux-armhf:
|
||||||
<<: *build-linux
|
<<: *build-on-linux
|
||||||
image: parity/rust-parity-ethereum-build:armhf
|
only: *releaseable_branches
|
||||||
|
image: parity/parity-ci-armhf:latest
|
||||||
variables:
|
variables:
|
||||||
CARGO_TARGET: armv7-unknown-linux-gnueabihf
|
CARGO_TARGET: armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
build-darwin:
|
build-darwin:
|
||||||
stage: build
|
stage: build
|
||||||
only: *releaseable_branches
|
|
||||||
<<: *collect_artifacts
|
<<: *collect_artifacts
|
||||||
|
only: *releaseable_branches
|
||||||
variables:
|
variables:
|
||||||
CARGO_TARGET: x86_64-apple-darwin
|
CARGO_TARGET: x86_64-apple-darwin
|
||||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo"
|
||||||
@ -164,20 +202,36 @@ build-windows:
|
|||||||
publish-docker:
|
publish-docker:
|
||||||
stage: publish
|
stage: publish
|
||||||
only: *releaseable_branches
|
only: *releaseable_branches
|
||||||
cache: {}
|
except:
|
||||||
|
- nightly
|
||||||
|
when: manual
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux
|
- build-linux
|
||||||
tags:
|
environment:
|
||||||
- shell
|
name: parity-build
|
||||||
|
cache: {}
|
||||||
|
image: docker:stable
|
||||||
|
services:
|
||||||
|
- docker:dind
|
||||||
|
variables:
|
||||||
|
GIT_STRATEGY: none
|
||||||
|
DOCKER_HOST: tcp://localhost:2375
|
||||||
|
DOCKER_DRIVER: overlay2
|
||||||
|
GIT_STRATEGY: none
|
||||||
|
# DOCKERFILE: tools/Dockerfile
|
||||||
|
# CONTAINER_IMAGE: parity/parity
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab/publish-docker.sh parity
|
- ./tools/publish-docker.sh
|
||||||
|
tags:
|
||||||
|
- kubernetes-parity-build
|
||||||
|
|
||||||
publish-snap: &publish-snap
|
publish-snap-nightly: &publish-snap
|
||||||
stage: publish
|
stage: publish
|
||||||
only: *releaseable_branches
|
only:
|
||||||
<<: *collect_artifacts
|
- nightly
|
||||||
image: snapcore/snapcraft
|
image: snapcore/snapcraft
|
||||||
variables:
|
variables:
|
||||||
|
GIT_STRATEGY: none
|
||||||
BUILD_ARCH: amd64
|
BUILD_ARCH: amd64
|
||||||
cache: {}
|
cache: {}
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -185,47 +239,80 @@ publish-snap: &publish-snap
|
|||||||
tags:
|
tags:
|
||||||
- linux-docker
|
- linux-docker
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab/publish-snap.sh
|
- ./tools/publish-snap.sh
|
||||||
|
|
||||||
publish-snap-i386:
|
publish-snap-manually:
|
||||||
|
<<: *publish-snap
|
||||||
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
|
publish-snap-i386-nightly: &publish-snap-i386
|
||||||
<<: *publish-snap
|
<<: *publish-snap
|
||||||
variables:
|
variables:
|
||||||
BUILD_ARCH: i386
|
BUILD_ARCH: i386
|
||||||
|
CARGO_TARGET: i686-unknown-linux-gnu
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux-i386
|
- build-linux-i386
|
||||||
|
|
||||||
publish-snap-arm64:
|
publish-snap-i386-manually:
|
||||||
|
<<: *publish-snap-i386
|
||||||
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
|
publish-snap-arm64-nightly: &publish-snap-arm64
|
||||||
<<: *publish-snap
|
<<: *publish-snap
|
||||||
variables:
|
variables:
|
||||||
BUILD_ARCH: arm64
|
BUILD_ARCH: arm64
|
||||||
|
CARGO_TARGET: aarch64-unknown-linux-gnu
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux-arm64
|
- build-linux-arm64
|
||||||
|
|
||||||
publish-snap-armhf:
|
publish-snap-arm64-manually:
|
||||||
|
<<: *publish-snap-arm64
|
||||||
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
|
publish-snap-armhf-nightly: &publish-snap-armhf
|
||||||
<<: *publish-snap
|
<<: *publish-snap
|
||||||
variables:
|
variables:
|
||||||
BUILD_ARCH: armhf
|
BUILD_ARCH: armhf
|
||||||
|
CARGO_TARGET: armv7-unknown-linux-gnueabihf
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux-armhf
|
- build-linux-armhf
|
||||||
|
|
||||||
publish-onchain:
|
publish-snap-armhf-manually:
|
||||||
stage: publish
|
<<: *publish-snap-armhf
|
||||||
only: *releaseable_branches
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
|
publish-onchain-nightly: &publish-onchain
|
||||||
|
stage: publish
|
||||||
|
only:
|
||||||
|
- nightly
|
||||||
cache: {}
|
cache: {}
|
||||||
|
variables:
|
||||||
|
GIT_STRATEGY: none
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux
|
- build-linux
|
||||||
- build-darwin
|
- build-darwin
|
||||||
- build-windows
|
- build-windows
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab/publish-onchain.sh
|
- ./tools/publish-onchain.sh
|
||||||
tags:
|
tags:
|
||||||
- linux-docker
|
- linux-docker
|
||||||
|
|
||||||
publish-awss3-release:
|
publish-onchain-manually:
|
||||||
|
<<: *publish-onchain
|
||||||
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
|
publish-release-awss3-nightly: &publish-release-awss3
|
||||||
image: parity/awscli:latest
|
image: parity/awscli:latest
|
||||||
stage: publish
|
stage: publish
|
||||||
only: *releaseable_branches
|
only:
|
||||||
<<: *no_git
|
- nightly
|
||||||
|
variables:
|
||||||
|
GIT_STRATEGY: none
|
||||||
cache: {}
|
cache: {}
|
||||||
dependencies:
|
dependencies:
|
||||||
- build-linux
|
- build-linux
|
||||||
@ -247,15 +334,39 @@ publish-awss3-release:
|
|||||||
tags:
|
tags:
|
||||||
- linux-docker
|
- linux-docker
|
||||||
|
|
||||||
|
publish-release-awss3-manually:
|
||||||
|
<<: *publish-release-awss3
|
||||||
|
only: *releaseable_branches
|
||||||
|
when: manual
|
||||||
|
|
||||||
publish-docs:
|
publish-docs:
|
||||||
stage: publish
|
stage: publish
|
||||||
image: parity/rust-parity-ethereum-docs:xenial
|
image: parity/parity-ci-docs:latest
|
||||||
only:
|
only:
|
||||||
- tags
|
- tags
|
||||||
except:
|
except:
|
||||||
- nightly
|
- nightly
|
||||||
|
when: manual
|
||||||
cache: {}
|
cache: {}
|
||||||
|
dependencies: []
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab/publish-docs.sh
|
- scripts/gitlab/publish-docs.sh
|
||||||
tags:
|
tags:
|
||||||
- linux-docker
|
- linux-docker
|
||||||
|
allow_failure: true
|
||||||
|
|
||||||
|
publish-av-whitelist:
|
||||||
|
stage: publish
|
||||||
|
variables:
|
||||||
|
GIT_STRATEGY: none
|
||||||
|
only: *releaseable_branches
|
||||||
|
except:
|
||||||
|
- nightly
|
||||||
|
when: manual
|
||||||
|
cache: {}
|
||||||
|
dependencies:
|
||||||
|
- build-windows
|
||||||
|
script:
|
||||||
|
- scripts/gitlab/publish-av-whitelists.sh
|
||||||
|
tags:
|
||||||
|
- linux-docker
|
||||||
|
333
CHANGELOG.md
333
CHANGELOG.md
@ -1,151 +1,216 @@
|
|||||||
## Parity-Ethereum [v2.4.3](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.3) (2019-03-22)
|
## Parity-Ethereum [v2.6.4](https://github.com/paritytech/parity-ethereum/releases/tag/v2.6.3)
|
||||||
|
|
||||||
Parity-Ethereum 2.4.3-beta is a bugfix release that improves performance and stability. This patch release contains a critical bug fix where serving light clients previously led to client crashes. Upgrading is highly recommended.
|
Parity Ethereum v2.6.4-stable is a patch release that adds the block numbers for activating the Istanbul hardfork on test networks: Ropsten, Görli, Rinkeby and Kovan.
|
||||||
|
|
||||||
|
A full list of included changes:
|
||||||
|
|
||||||
|
* ethcore/res: activate Istanbul on Ropsten, Görli, Rinkeby, Kovan (#11068)
|
||||||
|
* cleanup json crate (#11027)
|
||||||
|
* [json-spec] make blake2 pricing spec more readable (#11034)
|
||||||
|
* Update JSON tests to d4f86ecf4aa7c (#11054)
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.6.3](https://github.com/paritytech/parity-ethereum/releases/tag/v2.6.3)
|
||||||
|
|
||||||
|
Parity Ethereum v2.6.3-stable is a patch release that improves security, stability and performance.
|
||||||
|
|
||||||
|
* The most noteworthy improvement in this release is incorporating all the EIPs required for the Istanbul hard fork.
|
||||||
|
* This release also fixes certain security and performance issues, one of which was suspected to be consensus-threatening but turned out to be benign. Thanks to Martin Holst Swende and Felix Lange from the Ethereum Foundation for bringing the suspicious issue to our attention.
|
||||||
|
|
||||||
The full list of included changes:
|
The full list of included changes:
|
||||||
- 2.4.3 beta backports ([#10508](https://github.com/paritytech/parity-ethereum/pull/10508))
|
|
||||||
- Version: bump beta
|
|
||||||
- Add additional request tests ([#10503](https://github.com/paritytech/parity-ethereum/pull/10503))
|
|
||||||
|
|
||||||
## Parity-Ethereum [v2.4.2](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.2) (2019-03-20)
|
* add more tx tests (#11038)
|
||||||
|
* Fix parallel transactions race-condition (#10995)
|
||||||
|
* Add blake2_f precompile (#11017)
|
||||||
|
* [trace] introduce trace failed to Ext (#11019)
|
||||||
|
* Edit publish-onchain.sh to use https (#11016)
|
||||||
|
* Fix deadlock in network-devp2p (#11013)
|
||||||
|
* EIP 1108: Reduce alt_bn128 precompile gas costs (#11008)
|
||||||
|
* xDai chain support and nodes list update (#10989)
|
||||||
|
* EIP 2028: transaction gas lowered from 68 to 16 (#10987)
|
||||||
|
* EIP-1344 Add CHAINID op-code (#10983)
|
||||||
|
* manual publish jobs for releases, no changes for nightlies (#10977)
|
||||||
|
* [blooms-db] Fix benchmarks (#10974)
|
||||||
|
* Verify transaction against its block during import (#10954)
|
||||||
|
* Better error message for rpc gas price errors (#10931)
|
||||||
|
* Fix fork choice (#10837)
|
||||||
|
* Fix compilation on recent nightlies (#10991)
|
||||||
|
* Don't build rpc with ethcore test-helpers (#11048)
|
||||||
|
* EIP 1884 Re-pricing of trie-size dependent operations (#10992)
|
||||||
|
* Implement EIP-1283 reenable transition, EIP-1706 and EIP-2200 (#10191)
|
||||||
|
|
||||||
Parity-Ethereum 2.4.2-beta is a bugfix release that improves performance and stability.
|
## Parity-Ethereum [v2.6.2](https://github.com/paritytech/parity-ethereum/releases/tag/v2.6.2)
|
||||||
|
|
||||||
|
Parity Ethereum v2.6.2-stable is a bugfix release that fixes a potential DoS attack in the trace_call RPC method. This is a critical upgrade for anyone running Parity nodes with RPC exposed to the public internet (and highly recommended for anyone else). For details see this blog post.
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.6.1](https://github.com/paritytech/parity-ethereum/releases/tag/v2.6.1)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.6.1-beta is a patch release that improves stability.
|
||||||
|
|
||||||
|
This release includes:
|
||||||
|
* Allow specifying hostnames for node URLs
|
||||||
|
* Fix a bug where archive nodes were losing peers
|
||||||
|
* Add support for Energy Web Foundations new chains 'Volta' and 'EWC', and remove their deprecated 'Tobalaba' chain.
|
||||||
|
|
||||||
The full list of included changes:
|
The full list of included changes:
|
||||||
- 2.4.2 beta backports ([#10488](https://github.com/paritytech/parity-ethereum/pull/10488))
|
* Add support for Energy Web Foundation's new chains (#10957)
|
||||||
- Version: bump beta
|
* Kaspersky AV whitelisting (#10919)
|
||||||
- Сaching through docker volume ([#10477](https://github.com/paritytech/parity-ethereum/pull/10477))
|
* Avast whitelist script (#10900)
|
||||||
- fix win&mac build ([#10486](https://github.com/paritytech/parity-ethereum/pull/10486))
|
* Docker images renaming (#10863)
|
||||||
- fix(extract `timestamp_checked_add` as lib) ([#10383](https://github.com/paritytech/parity-ethereum/pull/10383))
|
* Remove excessive warning (#10831)
|
||||||
|
* Allow --nat extip:your.host.here.org (#10830)
|
||||||
|
* When updating the client or when called from RPC, sleep should mean sleep (#10814)
|
||||||
|
* added new ropsten-bootnode and removed old one (#10794)
|
||||||
|
* ethkey no longer uses byteorder (#10786)
|
||||||
|
* docs: Update Readme with TOC, Contributor Guideline. Update Cargo package descriptions (#10652)
|
||||||
|
|
||||||
## Parity-Ethereum [v2.4.1](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.1) (2019-03-19)
|
## Parity-Ethereum [v2.6.0](https://github.com/paritytech/parity-ethereum/releases/tag/v2.6.0)
|
||||||
|
|
||||||
Parity-Ethereum 2.4.1-beta is a bugfix release that improves performance and stability.
|
Parity-Ethereum 2.6.0-beta is a minor release that stabilizes the 2.6 branch by
|
||||||
|
marking it as a beta release.
|
||||||
|
|
||||||
|
This release includes:
|
||||||
|
* Major refactoring of the codebase
|
||||||
|
* Many bugfixes
|
||||||
|
* Significant improvements to logging, error and warning message clarity.
|
||||||
|
* SecretStore: remove support of old database formats (#10757)
|
||||||
|
* This is a potentially breaking change if you have not upgraded for
|
||||||
|
quite some time.
|
||||||
|
|
||||||
|
As of today, Parity-Ethereum 2.4 reaches end of life, and everyone is
|
||||||
|
encouraged to upgrade.
|
||||||
|
|
||||||
The full list of included changes:
|
The full list of included changes:
|
||||||
- 2.4.1 beta backports ([#10471](https://github.com/paritytech/parity-ethereum/pull/10471))
|
* update jsonrpc to 12.0 ([#10841](https://github.com/paritytech/parity-ethereum/pull/10841))
|
||||||
- Version: bump beta
|
* Move more code into state-account ([#10840](https://github.com/paritytech/parity-ethereum/pull/10840))
|
||||||
- Implement parity_versionInfo & parity_setChain on LC; fix parity_setChain ([#10312](https://github.com/paritytech/parity-ethereum/pull/10312))
|
* Extract AccountDB to account-db ([#10839](https://github.com/paritytech/parity-ethereum/pull/10839))
|
||||||
- CI publish to aws ([#10446](https://github.com/paritytech/parity-ethereum/pull/10446))
|
* Extricate PodAccount and state Account to own crates ([#10838](https://github.com/paritytech/parity-ethereum/pull/10838))
|
||||||
- CI aws git checkout ([#10451](https://github.com/paritytech/parity-ethereum/pull/10451))
|
* Fix fork choice ([#10837](https://github.com/paritytech/parity-ethereum/pull/10837))
|
||||||
- Revert "CI aws git checkout ([#10451](https://github.com/paritytech/parity-ethereum/pull/10451))" ([#10456](https://github.com/paritytech/parity-ethereum/pull/10456))
|
* tests: Relates to #10655: Test instructions for Readme ([#10835](https://github.com/paritytech/parity-ethereum/pull/10835))
|
||||||
- Tests parallelized ([#10452](https://github.com/paritytech/parity-ethereum/pull/10452))
|
* idiomatic changes to PodState ([#10834](https://github.com/paritytech/parity-ethereum/pull/10834))
|
||||||
- Ensure static validator set changes are recognized ([#10467](https://github.com/paritytech/parity-ethereum/pull/10467))
|
* Break circular dependency between Client and Engine (part 1) ([#10833](https://github.com/paritytech/parity-ethereum/pull/10833))
|
||||||
|
* Remove excessive warning ([#10831](https://github.com/paritytech/parity-ethereum/pull/10831))
|
||||||
## Parity-Ethereum [v2.4.0](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.0) (2019-02-25)
|
* Allow --nat extip:your.host.here.org ([#10830](https://github.com/paritytech/parity-ethereum/pull/10830))
|
||||||
|
* ethcore does not use byteorder ([#10829](https://github.com/paritytech/parity-ethereum/pull/10829))
|
||||||
Parity-Ethereum 2.4.0-beta is our trifortnightly minor version release coming with a lot of new features as well as bugfixes and performance improvements.
|
* Fix typo in README.md ([#10828](https://github.com/paritytech/parity-ethereum/pull/10828))
|
||||||
|
* Update wordlist to v1.3 ([#10823](https://github.com/paritytech/parity-ethereum/pull/10823))
|
||||||
Notable changes:
|
* bump `smallvec 0.6.10` to fix vulnerability ([#10822](https://github.com/paritytech/parity-ethereum/pull/10822))
|
||||||
- Account management is now deprecated ([#10213](https://github.com/paritytech/parity-ethereum/pull/10213))
|
* removed additional_params method ([#10818](https://github.com/paritytech/parity-ethereum/pull/10818))
|
||||||
- Local accounts can now be specified via CLI ([#9960](https://github.com/paritytech/parity-ethereum/pull/9960))
|
* Improve logging when remote peer is unknown ([#10817](https://github.com/paritytech/parity-ethereum/pull/10817))
|
||||||
- Chains can now be reset to a particular block via CLI ([#9782](https://github.com/paritytech/parity-ethereum/pull/9782))
|
* replace memzero with zeroize crate ([#10816](https://github.com/paritytech/parity-ethereum/pull/10816))
|
||||||
- Ethash now additionally implements ProgPoW ([#9762](https://github.com/paritytech/parity-ethereum/pull/9762))
|
* When updating the client or when called from RPC, sleep should mean sleep ([#10814](https://github.com/paritytech/parity-ethereum/pull/10814))
|
||||||
- The `eip1283DisableTransition` flag was added to revert EIP-1283 ([#10214](https://github.com/paritytech/parity-ethereum/pull/10214))
|
* Don't reimplement the logic from the Default impl ([#10813](https://github.com/paritytech/parity-ethereum/pull/10813))
|
||||||
|
* refactor: whisper: Add type aliases and update rustdocs in message.rs ([#10812](https://github.com/paritytech/parity-ethereum/pull/10812))
|
||||||
The full list of included changes:
|
* test: whisper/cli `add invalid pool size test depending on processor` ([#10811](https://github.com/paritytech/parity-ethereum/pull/10811))
|
||||||
- More Backports for Beta 2.4.0 ([#10431](https://github.com/paritytech/parity-ethereum/pull/10431))
|
* Add Constantinople EIPs to the dev (instant_seal) config ([#10809](https://github.com/paritytech/parity-ethereum/pull/10809))
|
||||||
- Revert some changes, could be buggy ([#10399](https://github.com/paritytech/parity-ethereum/pull/10399))
|
* fix spurious test failure ([#10808](https://github.com/paritytech/parity-ethereum/pull/10808))
|
||||||
- Ci: clean up gitlab-ci.yml leftovers from previous merge ([#10429](https://github.com/paritytech/parity-ethereum/pull/10429))
|
* revert temp changes to .gitlab-ci.yml ([#10807](https://github.com/paritytech/parity-ethereum/pull/10807))
|
||||||
- 10000 > 5000 ([#10422](https://github.com/paritytech/parity-ethereum/pull/10422))
|
* removed redundant fmt::Display implementations ([#10806](https://github.com/paritytech/parity-ethereum/pull/10806))
|
||||||
- Fix underflow in pip, closes [#10419](https://github.com/paritytech/parity-ethereum/pull/10419) ([#10423](https://github.com/paritytech/parity-ethereum/pull/10423))
|
* removed EthEngine alias ([#10805](https://github.com/paritytech/parity-ethereum/pull/10805))
|
||||||
- Fix panic when logging directory does not exist, closes [#10420](https://github.com/paritytech/parity-ethereum/pull/10420) ([#10424](https://github.com/paritytech/parity-ethereum/pull/10424))
|
* ethcore-bloom-journal updated to 2018 ([#10804](https://github.com/paritytech/parity-ethereum/pull/10804))
|
||||||
- Update hardcoded headers for Foundation, Ropsten, Kovan and Classic ([#10417](https://github.com/paritytech/parity-ethereum/pull/10417))
|
* Fix a few typos and unused warnings. ([#10803](https://github.com/paritytech/parity-ethereum/pull/10803))
|
||||||
- Backports for Beta 2.4.0 ([#10416](https://github.com/paritytech/parity-ethereum/pull/10416))
|
* updated price-info to edition 2018 ([#10801](https://github.com/paritytech/parity-ethereum/pull/10801))
|
||||||
- No-git for publish jobs, empty artifacts dir ([#10393](https://github.com/paritytech/parity-ethereum/pull/10393))
|
* updated parity-local-store to edition 2018 ([#10800](https://github.com/paritytech/parity-ethereum/pull/10800))
|
||||||
- Snap: reenable i386, arm64, armhf architecture publishing ([#10386](https://github.com/paritytech/parity-ethereum/pull/10386))
|
* updated project to ansi_term 0.11 ([#10799](https://github.com/paritytech/parity-ethereum/pull/10799))
|
||||||
- Tx pool: always accept local transactions ([#10375](https://github.com/paritytech/parity-ethereum/pull/10375))
|
* ethcore-light uses bincode 1.1 ([#10798](https://github.com/paritytech/parity-ethereum/pull/10798))
|
||||||
- Fix to_pod storage trie value decoding ([#10368](https://github.com/paritytech/parity-ethereum/pull/10368))
|
* ethcore-network-devp2p uses igd 0.9 ([#10797](https://github.com/paritytech/parity-ethereum/pull/10797))
|
||||||
- Version: mark 2.4.0 beta
|
* Better logging when backfilling ancient blocks fail ([#10796](https://github.com/paritytech/parity-ethereum/pull/10796))
|
||||||
- Update to latest mem-db, hash-db and trie-db. ([#10314](https://github.com/paritytech/parity-ethereum/pull/10314))
|
* added new ropsten-bootnode and removed old one ([#10794](https://github.com/paritytech/parity-ethereum/pull/10794))
|
||||||
- Tx pool: always accept local transactions ([#10375](https://github.com/paritytech/parity-ethereum/pull/10375))
|
* Removed machine abstraction from ethcore ([#10791](https://github.com/paritytech/parity-ethereum/pull/10791))
|
||||||
- Fix(trace_main! macro): don't re-export ([#10384](https://github.com/paritytech/parity-ethereum/pull/10384))
|
* Removed redundant ethcore-service error type ([#10788](https://github.com/paritytech/parity-ethereum/pull/10788))
|
||||||
- Exchanged old(azure) bootnodes with new(ovh) ones ([#10309](https://github.com/paritytech/parity-ethereum/pull/10309))
|
* Cleanup unused vm dependencies ([#10787](https://github.com/paritytech/parity-ethereum/pull/10787))
|
||||||
- Ethash: implement Progpow ([#9762](https://github.com/paritytech/parity-ethereum/pull/9762))
|
* ethkey no longer uses byteorder ([#10786](https://github.com/paritytech/parity-ethereum/pull/10786))
|
||||||
- Snap: add the removable-media plug ([#10377](https://github.com/paritytech/parity-ethereum/pull/10377))
|
* Updated blooms-db to rust 2018 and removed redundant deps ([#10785](https://github.com/paritytech/parity-ethereum/pull/10785))
|
||||||
- Add message to IO errors ([#10324](https://github.com/paritytech/parity-ethereum/pull/10324))
|
* Treat empty account the same as non-exist accounts in EIP-1052 ([#10775](https://github.com/paritytech/parity-ethereum/pull/10775))
|
||||||
- Chore(bump parity-daemonize): require rust >= 1.31 ([#10359](https://github.com/paritytech/parity-ethereum/pull/10359))
|
* Do not drop the peer with None difficulty ([#10772](https://github.com/paritytech/parity-ethereum/pull/10772))
|
||||||
- Secretstore: use in-memory transport in cluster tests ([#9850](https://github.com/paritytech/parity-ethereum/pull/9850))
|
* EIP-1702: Generalized Account Versioning Scheme ([#10771](https://github.com/paritytech/parity-ethereum/pull/10771))
|
||||||
- Add fields to `memzero`'s Cargo.toml ([#10362](https://github.com/paritytech/parity-ethereum/pull/10362))
|
* Move Engine::register_client to be before other I/O handler registration ([#10767](https://github.com/paritytech/parity-ethereum/pull/10767))
|
||||||
- Snap: release untagged versions from branches to the candidate snap channel ([#10357](https://github.com/paritytech/parity-ethereum/pull/10357))
|
* ethcore/res: activate atlantis classic hf on block 8772000 ([#10766](https://github.com/paritytech/parity-ethereum/pull/10766))
|
||||||
- Fix(compilation warns): `no-default-features` ([#10346](https://github.com/paritytech/parity-ethereum/pull/10346))
|
* Updated Bn128PairingImpl to use optimized batch pairing ([#10765](https://github.com/paritytech/parity-ethereum/pull/10765))
|
||||||
- No volumes are needed, just run -v volume:/path/in/the/container ([#10345](https://github.com/paritytech/parity-ethereum/pull/10345))
|
* Remove unused code ([#10762](https://github.com/paritytech/parity-ethereum/pull/10762))
|
||||||
- Fixed misstype ([#10351](https://github.com/paritytech/parity-ethereum/pull/10351))
|
* Initialize private tx logger only if private tx functionality is enabled ([#10758](https://github.com/paritytech/parity-ethereum/pull/10758))
|
||||||
- Snap: prefix version and populate candidate channel ([#10343](https://github.com/paritytech/parity-ethereum/pull/10343))
|
* SecretStore: remove support of old database formats ([#10757](https://github.com/paritytech/parity-ethereum/pull/10757))
|
||||||
- Bundle protocol and packet_id together in chain sync ([#10315](https://github.com/paritytech/parity-ethereum/pull/10315))
|
* Enable aesni ([#10756](https://github.com/paritytech/parity-ethereum/pull/10756))
|
||||||
- Role back docker build image and docker deploy image to ubuntu:xenial… ([#10338](https://github.com/paritytech/parity-ethereum/pull/10338))
|
* updater: fix static id hashes initialization ([#10755](https://github.com/paritytech/parity-ethereum/pull/10755))
|
||||||
- Change docker image based on debian instead of ubuntu due to the chan… ([#10336](https://github.com/paritytech/parity-ethereum/pull/10336))
|
* Use fewer threads for snapshotting ([#10752](https://github.com/paritytech/parity-ethereum/pull/10752))
|
||||||
- Don't add discovery initiators to the node table ([#10305](https://github.com/paritytech/parity-ethereum/pull/10305))
|
* Die error_chain, die ([#10747](https://github.com/paritytech/parity-ethereum/pull/10747))
|
||||||
- Fix(docker): fix not receives SIGINT ([#10059](https://github.com/paritytech/parity-ethereum/pull/10059))
|
* Fix deprectation warnings on nightly ([#10746](https://github.com/paritytech/parity-ethereum/pull/10746))
|
||||||
- Snap: official image / test ([#10168](https://github.com/paritytech/parity-ethereum/pull/10168))
|
* Improve logging and cleanup in miner around block sealing ([#10745](https://github.com/paritytech/parity-ethereum/pull/10745))
|
||||||
- Fix(add helper for timestamp overflows) ([#10330](https://github.com/paritytech/parity-ethereum/pull/10330))
|
* Add a way to signal shutdown to snapshotting threads ([#10744](https://github.com/paritytech/parity-ethereum/pull/10744))
|
||||||
- Additional error for invalid gas ([#10327](https://github.com/paritytech/parity-ethereum/pull/10327))
|
* fix docker tags for publishing ([#10741](https://github.com/paritytech/parity-ethereum/pull/10741))
|
||||||
- Revive parity_setMinGasPrice RPC call ([#10294](https://github.com/paritytech/parity-ethereum/pull/10294))
|
* refactor: Fix indentation in ethjson ([#10740](https://github.com/paritytech/parity-ethereum/pull/10740))
|
||||||
- Add Statetest support for Constantinople Fix ([#10323](https://github.com/paritytech/parity-ethereum/pull/10323))
|
* Log validator set changes in EpochManager ([#10734](https://github.com/paritytech/parity-ethereum/pull/10734))
|
||||||
- Fix(parity-clib): grumbles that were not addressed in [#9920](https://github.com/paritytech/parity-ethereum/pull/9920) ([#10154](https://github.com/paritytech/parity-ethereum/pull/10154))
|
* Print warnings when using dangerous settings for ValidatorSet ([#10733](https://github.com/paritytech/parity-ethereum/pull/10733))
|
||||||
- Fix(light-rpc): Make `light_sync` generic ([#10238](https://github.com/paritytech/parity-ethereum/pull/10238))
|
* ethcore: enable ECIP-1054 for classic ([#10731](https://github.com/paritytech/parity-ethereum/pull/10731))
|
||||||
- Fix publish job ([#10317](https://github.com/paritytech/parity-ethereum/pull/10317))
|
* Stop breaking out of loop if a non-canonical hash is found ([#10729](https://github.com/paritytech/parity-ethereum/pull/10729))
|
||||||
- Secure WS-RPC: grant access to all apis ([#10246](https://github.com/paritytech/parity-ethereum/pull/10246))
|
* Removed secret_store folder ([#10722](https://github.com/paritytech/parity-ethereum/pull/10722))
|
||||||
- Make specification of protocol in SyncRequester::send_request explicit ([#10295](https://github.com/paritytech/parity-ethereum/pull/10295))
|
* Revert "enable lto for release builds (#10717)" ([#10721](https://github.com/paritytech/parity-ethereum/pull/10721))
|
||||||
- Fix: parity-clib/examples/cpp/CMakeLists.txt ([#10313](https://github.com/paritytech/parity-ethereum/pull/10313))
|
* fix: aura don't add `SystemTime::now()` ([#10720](https://github.com/paritytech/parity-ethereum/pull/10720))
|
||||||
- Ci optimizations ([#10297](https://github.com/paritytech/parity-ethereum/pull/10297))
|
* Use RUSTFLAGS to set the optimization level ([#10719](https://github.com/paritytech/parity-ethereum/pull/10719))
|
||||||
- Increase number of requested block bodies in chain sync ([#10247](https://github.com/paritytech/parity-ethereum/pull/10247))
|
* enable lto for release builds ([#10717](https://github.com/paritytech/parity-ethereum/pull/10717))
|
||||||
- Deprecate account management ([#10213](https://github.com/paritytech/parity-ethereum/pull/10213))
|
* [devp2p] Update to 2018 edition ([#10716](https://github.com/paritytech/parity-ethereum/pull/10716))
|
||||||
- Properly handle check_epoch_end_signal errors ([#10015](https://github.com/paritytech/parity-ethereum/pull/10015))
|
* [devp2p] Don't use `rust-crypto` ([#10714](https://github.com/paritytech/parity-ethereum/pull/10714))
|
||||||
- Fix(osx and windows builds): bump parity-daemonize ([#10291](https://github.com/paritytech/parity-ethereum/pull/10291))
|
* [devp2p] Fix warnings and re-org imports ([#10710](https://github.com/paritytech/parity-ethereum/pull/10710))
|
||||||
- Add missing step for Using `systemd` service file ([#10175](https://github.com/paritytech/parity-ethereum/pull/10175))
|
* DevP2p: Get node IP address and udp port from Socket, if not included in PING packet ([#10705](https://github.com/paritytech/parity-ethereum/pull/10705))
|
||||||
- Call private contract methods from another private contract (read-onl… ([#10086](https://github.com/paritytech/parity-ethereum/pull/10086))
|
* introduce MissingParent Error, fixes #10699 ([#10700](https://github.com/paritytech/parity-ethereum/pull/10700))
|
||||||
- Update ring to 0.14 ([#10262](https://github.com/paritytech/parity-ethereum/pull/10262))
|
* Refactor Clique stepping ([#10691](https://github.com/paritytech/parity-ethereum/pull/10691))
|
||||||
- Fix(secret-store): deprecation warning ([#10301](https://github.com/paritytech/parity-ethereum/pull/10301))
|
* add_sync_notifier in EthPubSubClient holds on to a Client for too long ([#10689](https://github.com/paritytech/parity-ethereum/pull/10689))
|
||||||
- Update to jsonrpc-derive 10.0.2, fixes aliases bug ([#10300](https://github.com/paritytech/parity-ethereum/pull/10300))
|
* Fix compiler warning (that will become an error) ([#10683](https://github.com/paritytech/parity-ethereum/pull/10683))
|
||||||
- Convert to jsonrpc-derive, use jsonrpc-* from crates.io ([#10298](https://github.com/paritytech/parity-ethereum/pull/10298))
|
* Don't panic if extra_data is longer than VANITY_LENGTH ([#10682](https://github.com/paritytech/parity-ethereum/pull/10682))
|
||||||
- Fix Windows build ([#10284](https://github.com/paritytech/parity-ethereum/pull/10284))
|
* Remove annoying compiler warnings ([#10679](https://github.com/paritytech/parity-ethereum/pull/10679))
|
||||||
- Don't run the CPP example on CI ([#10285](https://github.com/paritytech/parity-ethereum/pull/10285))
|
* Remove support for hardware wallets ([#10678](https://github.com/paritytech/parity-ethereum/pull/10678))
|
||||||
- Additional tests for uint deserialization. ([#10279](https://github.com/paritytech/parity-ethereum/pull/10279))
|
* [CI] allow cargo audit to fail ([#10676](https://github.com/paritytech/parity-ethereum/pull/10676))
|
||||||
- Prevent silent errors in daemon mode ([#10007](https://github.com/paritytech/parity-ethereum/pull/10007))
|
* new image ([#10673](https://github.com/paritytech/parity-ethereum/pull/10673))
|
||||||
- Fix join-set test to be deterministic. ([#10263](https://github.com/paritytech/parity-ethereum/pull/10263))
|
* Upgrade ethereum types ([#10670](https://github.com/paritytech/parity-ethereum/pull/10670))
|
||||||
- Update CHANGELOG-2.2.md ([#10254](https://github.com/paritytech/parity-ethereum/pull/10254))
|
* Reset blockchain properly ([#10669](https://github.com/paritytech/parity-ethereum/pull/10669))
|
||||||
- Macos heapsize force jemalloc ([#10234](https://github.com/paritytech/parity-ethereum/pull/10234))
|
* fix: Move PR template into .github/ folder ([#10663](https://github.com/paritytech/parity-ethereum/pull/10663))
|
||||||
- Allow specifying local accounts via CLI ([#9960](https://github.com/paritytech/parity-ethereum/pull/9960))
|
* docs: evmbin - Update Rust docs ([#10658](https://github.com/paritytech/parity-ethereum/pull/10658))
|
||||||
- Take in account zero gas price certification when doing transact_cont… ([#10232](https://github.com/paritytech/parity-ethereum/pull/10232))
|
* refactor: Related #9459 - evmbin: replace untyped json! macro with fully typed serde serialization using Rust structs ([#10657](https://github.com/paritytech/parity-ethereum/pull/10657))
|
||||||
- Update CHANGELOG.md ([#10249](https://github.com/paritytech/parity-ethereum/pull/10249))
|
* docs: Add PR template ([#10654](https://github.com/paritytech/parity-ethereum/pull/10654))
|
||||||
- Fix typo: CHANGELOG-2.1 -> CHANGELOG-2.2 ([#10233](https://github.com/paritytech/parity-ethereum/pull/10233))
|
* docs: Add ProgPoW Rust docs to ethash module ([#10653](https://github.com/paritytech/parity-ethereum/pull/10653))
|
||||||
- Update copyright year to 2019. ([#10181](https://github.com/paritytech/parity-ethereum/pull/10181))
|
* docs: Update Readme with TOC, Contributor Guideline. Update Cargo package descriptions ([#10652](https://github.com/paritytech/parity-ethereum/pull/10652))
|
||||||
- Fixed: types::transaction::SignedTransaction; ([#10229](https://github.com/paritytech/parity-ethereum/pull/10229))
|
* Upgrade to parity-crypto 0.4 ([#10650](https://github.com/paritytech/parity-ethereum/pull/10650))
|
||||||
- Fix(ManageNetwork): replace Range with RangeInclusive ([#10209](https://github.com/paritytech/parity-ethereum/pull/10209))
|
* fix(compilation warnings) ([#10649](https://github.com/paritytech/parity-ethereum/pull/10649))
|
||||||
- Import rpc transactions sequentially ([#10051](https://github.com/paritytech/parity-ethereum/pull/10051))
|
* [whisper] Move needed aes_gcm crypto in-crate ([#10647](https://github.com/paritytech/parity-ethereum/pull/10647))
|
||||||
- Enable St-Peters-Fork ("Constantinople Fix") ([#10223](https://github.com/paritytech/parity-ethereum/pull/10223))
|
* Update publishing ([#10644](https://github.com/paritytech/parity-ethereum/pull/10644))
|
||||||
- Add EIP-1283 disable transition ([#10214](https://github.com/paritytech/parity-ethereum/pull/10214))
|
* ci: publish docs debug ([#10638](https://github.com/paritytech/parity-ethereum/pull/10638))
|
||||||
- Echo CORS request headers by default ([#10221](https://github.com/paritytech/parity-ethereum/pull/10221))
|
* Fix publish docs ([#10635](https://github.com/paritytech/parity-ethereum/pull/10635))
|
||||||
- Happy New Year! ([#10211](https://github.com/paritytech/parity-ethereum/pull/10211))
|
* Fix rinkeby petersburg fork ([#10632](https://github.com/paritytech/parity-ethereum/pull/10632))
|
||||||
- Perform stripping during build ([#10208](https://github.com/paritytech/parity-ethereum/pull/10208))
|
* Update kovan.json to switch Kovan validator set to POA Consensus Contracts ([#10628](https://github.com/paritytech/parity-ethereum/pull/10628))
|
||||||
- Remove CallContract and RegistryInfo re-exports from `ethcore/client` ([#10205](https://github.com/paritytech/parity-ethereum/pull/10205))
|
* [ethcore] remove error_chain ([#10616](https://github.com/paritytech/parity-ethereum/pull/10616))
|
||||||
- Extract CallContract and RegistryInfo traits into their own crate ([#10178](https://github.com/paritytech/parity-ethereum/pull/10178))
|
* Remove unused import ([#10615](https://github.com/paritytech/parity-ethereum/pull/10615))
|
||||||
- Update the changelogs for 2.1.11, 2.2.6, 2.2.7, and 2.3.0 ([#10197](https://github.com/paritytech/parity-ethereum/pull/10197))
|
* Adds parity_getRawBlockByNumber, parity_submitRawBlock ([#10609](https://github.com/paritytech/parity-ethereum/pull/10609))
|
||||||
- Cancel Constantinople HF on POA Core ([#10198](https://github.com/paritytech/parity-ethereum/pull/10198))
|
* adds rpc error message for --no-ancient-blocks ([#10608](https://github.com/paritytech/parity-ethereum/pull/10608))
|
||||||
- Adds cli interface to allow reseting chain to a particular block ([#9782](https://github.com/paritytech/parity-ethereum/pull/9782))
|
* Constantinople HF on POA Core ([#10606](https://github.com/paritytech/parity-ethereum/pull/10606))
|
||||||
- Run all `igd` methods in its own thread ([#10195](https://github.com/paritytech/parity-ethereum/pull/10195))
|
* Clique: zero-fill extradata when the supplied value is less than 32 bytes in length ([#10605](https://github.com/paritytech/parity-ethereum/pull/10605))
|
||||||
- Pull constantinople on ethereum network ([#10189](https://github.com/paritytech/parity-ethereum/pull/10189))
|
* evm: add some mulmod benches ([#10600](https://github.com/paritytech/parity-ethereum/pull/10600))
|
||||||
- Update for Android cross-compilation. ([#10180](https://github.com/paritytech/parity-ethereum/pull/10180))
|
* sccache logs to stdout ([#10596](https://github.com/paritytech/parity-ethereum/pull/10596))
|
||||||
- Version: bump fork blocks for kovan and foundation ([#10186](https://github.com/paritytech/parity-ethereum/pull/10186))
|
* update bootnodes ([#10595](https://github.com/paritytech/parity-ethereum/pull/10595))
|
||||||
- Handle the case for contract creation on an empty but exist account w… ([#10065](https://github.com/paritytech/parity-ethereum/pull/10065))
|
* Merge `Notifier` and `TransactionsPoolNotifier` ([#10591](https://github.com/paritytech/parity-ethereum/pull/10591))
|
||||||
- Align personal_unlockAccount behaviour when permanent unlock is disab… ([#10060](https://github.com/paritytech/parity-ethereum/pull/10060))
|
* fix(whisper): change expiry `unix_time + ttl + work` ([#10587](https://github.com/paritytech/parity-ethereum/pull/10587))
|
||||||
- Drop `runtime` after others (especially `ws_server`) ([#10179](https://github.com/paritytech/parity-ethereum/pull/10179))
|
* fix(evmbin): make benches compile again ([#10586](https://github.com/paritytech/parity-ethereum/pull/10586))
|
||||||
- Version: bump nightly to 2.4 ([#10165](https://github.com/paritytech/parity-ethereum/pull/10165))
|
* fix issue with compilation when 'slow-blocks' feature enabled ([#10585](https://github.com/paritytech/parity-ethereum/pull/10585))
|
||||||
- Skip locking in statedb for non-canon blocks ([#10141](https://github.com/paritytech/parity-ethereum/pull/10141))
|
* Allow CORS requests in Secret Store API ([#10584](https://github.com/paritytech/parity-ethereum/pull/10584))
|
||||||
- Remove reference to ui-interface command-line option ([#10170](https://github.com/paritytech/parity-ethereum/pull/10170))
|
* CI improvements ([#10579](https://github.com/paritytech/parity-ethereum/pull/10579))
|
||||||
- Fix [#9822](https://github.com/paritytech/parity-ethereum/pull/9822): trace_filter does not return failed contract creation ([#10140](https://github.com/paritytech/parity-ethereum/pull/10140))
|
* ethcore: improve timestamp handling ([#10574](https://github.com/paritytech/parity-ethereum/pull/10574))
|
||||||
- Fix _cannot recursively call into `Core`_ issue ([#10144](https://github.com/paritytech/parity-ethereum/pull/10144))
|
* Update Issue Template to direct security issue to email ([#10562](https://github.com/paritytech/parity-ethereum/pull/10562))
|
||||||
- Fix(whisper): correct PoW calculation ([#10166](https://github.com/paritytech/parity-ethereum/pull/10166))
|
* version: bump master to 2.6 ([#10560](https://github.com/paritytech/parity-ethereum/pull/10560))
|
||||||
- Bump JSON-RPC ([#10151](https://github.com/paritytech/parity-ethereum/pull/10151))
|
* fix(light cull): poll light cull instead of timer ([#10559](https://github.com/paritytech/parity-ethereum/pull/10559))
|
||||||
- Ping nodes from discovery ([#10167](https://github.com/paritytech/parity-ethereum/pull/10167))
|
* Watch transactions pool ([#10558](https://github.com/paritytech/parity-ethereum/pull/10558))
|
||||||
- Fix(android): remove dependency to libusb ([#10161](https://github.com/paritytech/parity-ethereum/pull/10161))
|
* Add SealingState; don't prepare block when not ready. ([#10529](https://github.com/paritytech/parity-ethereum/pull/10529))
|
||||||
- Refactor(trim_right_matches -> trim_end_matches) ([#10159](https://github.com/paritytech/parity-ethereum/pull/10159))
|
* Explicitly enable or disable Stratum in config file (Issue 9785) ([#10521](https://github.com/paritytech/parity-ethereum/pull/10521))
|
||||||
- Merge Machine and WithRewards ([#10071](https://github.com/paritytech/parity-ethereum/pull/10071))
|
* Add filtering capability to `parity_pendingTransactions` (issue 8269) ([#10506](https://github.com/paritytech/parity-ethereum/pull/10506))
|
||||||
|
* Remove calls to heapsize ([#10432](https://github.com/paritytech/parity-ethereum/pull/10432))
|
||||||
|
* RPC: Implements eth_subscribe("syncing") ([#10311](https://github.com/paritytech/parity-ethereum/pull/10311))
|
||||||
|
* SecretStore: non-blocking wait of session completion ([#10303](https://github.com/paritytech/parity-ethereum/pull/10303))
|
||||||
|
* Node table limiting and cache for node filter ([#10288](https://github.com/paritytech/parity-ethereum/pull/10288))
|
||||||
|
* SecretStore: expose restore_key_public in HTTP API ([#10241](https://github.com/paritytech/parity-ethereum/pull/10241))
|
||||||
|
* Trivial journal for private transactions ([#10056](https://github.com/paritytech/parity-ethereum/pull/10056))
|
||||||
|
|
||||||
## Previous releases
|
## Previous releases
|
||||||
|
|
||||||
- [CHANGELOG-2.3](docs/CHANGELOG-2.3.md) (_stable_)
|
- [CHANGELOG-2.5](docs/CHANGELOG-2.5.md) (_stable_)
|
||||||
|
- [CHANGELOG-2.4](docs/CHANGELOG-2.4.md) (EOL: 2019-07-08)
|
||||||
|
- [CHANGELOG-2.3](docs/CHANGELOG-2.3.md) (EOL: 2019-04-09)
|
||||||
- [CHANGELOG-2.2](docs/CHANGELOG-2.2.md) (EOL: 2019-02-25)
|
- [CHANGELOG-2.2](docs/CHANGELOG-2.2.md) (EOL: 2019-02-25)
|
||||||
- [CHANGELOG-2.1](docs/CHANGELOG-2.1.md) (EOL: 2019-01-16)
|
- [CHANGELOG-2.1](docs/CHANGELOG-2.1.md) (EOL: 2019-01-16)
|
||||||
- [CHANGELOG-2.0](docs/CHANGELOG-2.0.md) (EOL: 2018-11-15)
|
- [CHANGELOG-2.0](docs/CHANGELOG-2.0.md) (EOL: 2018-11-15)
|
||||||
|
1970
Cargo.lock
generated
1970
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
24
Cargo.toml
@ -2,7 +2,7 @@
|
|||||||
description = "Parity Ethereum client"
|
description = "Parity Ethereum client"
|
||||||
name = "parity-ethereum"
|
name = "parity-ethereum"
|
||||||
# NOTE Make sure to update util/version/Cargo.toml as well
|
# NOTE Make sure to update util/version/Cargo.toml as well
|
||||||
version = "2.5.0"
|
version = "2.6.4"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ num_cpus = "1.2"
|
|||||||
number_prefix = "0.2"
|
number_prefix = "0.2"
|
||||||
rpassword = "1.0"
|
rpassword = "1.0"
|
||||||
semver = "0.9"
|
semver = "0.9"
|
||||||
ansi_term = "0.10"
|
ansi_term = "0.11"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
regex = "1.0"
|
regex = "1.0"
|
||||||
atty = "0.2.8"
|
atty = "0.2.8"
|
||||||
@ -29,7 +29,7 @@ serde_derive = "1.0"
|
|||||||
futures = "0.1"
|
futures = "0.1"
|
||||||
fdlimit = "0.1"
|
fdlimit = "0.1"
|
||||||
ctrlc = { git = "https://github.com/paritytech/rust-ctrlc.git" }
|
ctrlc = { git = "https://github.com/paritytech/rust-ctrlc.git" }
|
||||||
jsonrpc-core = "10.0.1"
|
jsonrpc-core = "12.0.0"
|
||||||
parity-bytes = "0.1"
|
parity-bytes = "0.1"
|
||||||
common-types = { path = "ethcore/types" }
|
common-types = { path = "ethcore/types" }
|
||||||
ethcore = { path = "ethcore", features = ["parity"] }
|
ethcore = { path = "ethcore", features = ["parity"] }
|
||||||
@ -45,11 +45,11 @@ ethcore-network = { path = "util/network" }
|
|||||||
ethcore-private-tx = { path = "ethcore/private-tx" }
|
ethcore-private-tx = { path = "ethcore/private-tx" }
|
||||||
ethcore-service = { path = "ethcore/service" }
|
ethcore-service = { path = "ethcore/service" }
|
||||||
ethcore-sync = { path = "ethcore/sync" }
|
ethcore-sync = { path = "ethcore/sync" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
ethkey = { path = "accounts/ethkey" }
|
ethkey = { path = "accounts/ethkey" }
|
||||||
ethstore = { path = "accounts/ethstore" }
|
ethstore = { path = "accounts/ethstore" }
|
||||||
node-filter = { path = "ethcore/node-filter" }
|
node-filter = { path = "ethcore/node-filter" }
|
||||||
rlp = { version = "0.3.0", features = ["ethereum"] }
|
rlp = "0.4.0"
|
||||||
cli-signer= { path = "cli-signer" }
|
cli-signer= { path = "cli-signer" }
|
||||||
parity-daemonize = "0.3"
|
parity-daemonize = "0.3"
|
||||||
parity-hash-fetch = { path = "updater/hash-fetch" }
|
parity-hash-fetch = { path = "updater/hash-fetch" }
|
||||||
@ -63,7 +63,7 @@ parity-whisper = { path = "whisper" }
|
|||||||
parity-path = "0.1"
|
parity-path = "0.1"
|
||||||
dir = { path = "util/dir" }
|
dir = { path = "util/dir" }
|
||||||
panic_hook = { path = "util/panic-hook" }
|
panic_hook = { path = "util/panic-hook" }
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
migration-rocksdb = { path = "util/migration-rocksdb" }
|
migration-rocksdb = { path = "util/migration-rocksdb" }
|
||||||
kvdb = "0.1"
|
kvdb = "0.1"
|
||||||
kvdb-rocksdb = "0.1.3"
|
kvdb-rocksdb = "0.1.3"
|
||||||
@ -73,6 +73,8 @@ ethcore-secretstore = { path = "secret-store", optional = true }
|
|||||||
|
|
||||||
registrar = { path = "util/registrar" }
|
registrar = { path = "util/registrar" }
|
||||||
|
|
||||||
|
parity-util-mem = { version = "0.1", features = ["jemalloc-global"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
rustc_version = "0.2"
|
rustc_version = "0.2"
|
||||||
|
|
||||||
@ -81,7 +83,6 @@ pretty_assertions = "0.1"
|
|||||||
ipnetwork = "0.12.6"
|
ipnetwork = "0.12.6"
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
fake-fetch = { path = "util/fake-fetch" }
|
fake-fetch = { path = "util/fake-fetch" }
|
||||||
lazy_static = "1.2.0"
|
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
winapi = { version = "0.3.4", features = ["winsock2", "winuser", "shellapi"] }
|
winapi = { version = "0.3.4", features = ["winsock2", "winuser", "shellapi"] }
|
||||||
@ -91,7 +92,6 @@ default = ["accounts"]
|
|||||||
accounts = ["ethcore-accounts", "parity-rpc/accounts"]
|
accounts = ["ethcore-accounts", "parity-rpc/accounts"]
|
||||||
miner-debug = ["ethcore/miner-debug"]
|
miner-debug = ["ethcore/miner-debug"]
|
||||||
json-tests = ["ethcore/json-tests"]
|
json-tests = ["ethcore/json-tests"]
|
||||||
ci-skip-tests = ["ethcore/ci-skip-tests"]
|
|
||||||
test-heavy = ["ethcore/test-heavy"]
|
test-heavy = ["ethcore/test-heavy"]
|
||||||
evm-debug = ["ethcore/evm-debug"]
|
evm-debug = ["ethcore/evm-debug"]
|
||||||
evm-debug-tests = ["ethcore/evm-debug-tests"]
|
evm-debug-tests = ["ethcore/evm-debug-tests"]
|
||||||
@ -118,10 +118,9 @@ path = "parity/lib.rs"
|
|||||||
path = "parity/main.rs"
|
path = "parity/main.rs"
|
||||||
name = "parity"
|
name = "parity"
|
||||||
|
|
||||||
[profile.dev]
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
debug = false
|
debug = false
|
||||||
|
lto = true
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
# This should only list projects that are not
|
# This should only list projects that are not
|
||||||
@ -139,8 +138,5 @@ members = [
|
|||||||
"util/keccak-hasher",
|
"util/keccak-hasher",
|
||||||
"util/patricia-trie-ethereum",
|
"util/patricia-trie-ethereum",
|
||||||
"util/fastmap",
|
"util/fastmap",
|
||||||
"util/time-utils"
|
"util/time-utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
[patch.crates-io]
|
|
||||||
heapsize = { git = "https://github.com/cheme/heapsize.git", branch = "ec-macfix" }
|
|
||||||
|
273
README.md
273
README.md
@ -7,6 +7,25 @@
|
|||||||
<p align="center"><a href="https://gitlab.parity.io/parity/parity-ethereum/commits/master" target="_blank"><img src="https://gitlab.parity.io/parity/parity-ethereum/badges/master/build.svg" /></a>
|
<p align="center"><a href="https://gitlab.parity.io/parity/parity-ethereum/commits/master" target="_blank"><img src="https://gitlab.parity.io/parity/parity-ethereum/badges/master/build.svg" /></a>
|
||||||
<a href="https://www.gnu.org/licenses/gpl-3.0.en.html" target="_blank"><img src="https://img.shields.io/badge/license-GPL%20v3-green.svg" /></a></p>
|
<a href="https://www.gnu.org/licenses/gpl-3.0.en.html" target="_blank"><img src="https://img.shields.io/badge/license-GPL%20v3-green.svg" /></a></p>
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [Description](#chapter-001)
|
||||||
|
2. [Technical Overview](#chapter-002)
|
||||||
|
3. [Building](#chapter-003)<br>
|
||||||
|
3.1 [Building Dependencies](#chapter-0031)<br>
|
||||||
|
3.2 [Building from Source Code](#chapter-0032)<br>
|
||||||
|
3.3 [Simple One-Line Installer for Mac and Linux](#chapter-0033)<br>
|
||||||
|
3.4 [Starting Parity Ethereum](#chapter-0034)
|
||||||
|
4. [Testing](#chapter-004)
|
||||||
|
5. [Documentation](#chapter-005)
|
||||||
|
6. [Toolchain](#chapter-006)
|
||||||
|
7. [Community](#chapter-007)
|
||||||
|
8. [Contributing](#chapter-008)
|
||||||
|
9. [License](#chapter-009)
|
||||||
|
|
||||||
|
|
||||||
|
## 1. Description <a id="chapter-001"></a>
|
||||||
|
|
||||||
**Built for mission-critical use**: Miners, service providers, and exchanges need fast synchronisation and maximum uptime. Parity Ethereum provides the core infrastructure essential for speedy and reliable services.
|
**Built for mission-critical use**: Miners, service providers, and exchanges need fast synchronisation and maximum uptime. Parity Ethereum provides the core infrastructure essential for speedy and reliable services.
|
||||||
|
|
||||||
- Clean, modular codebase for easy customisation
|
- Clean, modular codebase for easy customisation
|
||||||
@ -15,7 +34,7 @@
|
|||||||
- Synchronise in hours, not days with Warp Sync
|
- Synchronise in hours, not days with Warp Sync
|
||||||
- Modular for light integration into your service or product
|
- Modular for light integration into your service or product
|
||||||
|
|
||||||
## Technical Overview
|
## 2. Technical Overview <a id="chapter-002"></a>
|
||||||
|
|
||||||
Parity Ethereum's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity Ethereum using the sophisticated and cutting-edge **Rust programming language**. Parity Ethereum is licensed under the GPLv3 and can be used for all your Ethereum needs.
|
Parity Ethereum's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity Ethereum using the sophisticated and cutting-edge **Rust programming language**. Parity Ethereum is licensed under the GPLv3 and can be used for all your Ethereum needs.
|
||||||
|
|
||||||
@ -25,7 +44,9 @@ If you run into problems while using Parity Ethereum, check out the [wiki for do
|
|||||||
|
|
||||||
Parity Ethereum's current beta-release is 2.1. You can download it at [the releases page](https://github.com/paritytech/parity-ethereum/releases) or follow the instructions below to build from source. Please, mind the [CHANGELOG.md](CHANGELOG.md) for a list of all changes between different versions.
|
Parity Ethereum's current beta-release is 2.1. You can download it at [the releases page](https://github.com/paritytech/parity-ethereum/releases) or follow the instructions below to build from source. Please, mind the [CHANGELOG.md](CHANGELOG.md) for a list of all changes between different versions.
|
||||||
|
|
||||||
## Build Dependencies
|
## 3. Building <a id="chapter-003"></a>
|
||||||
|
|
||||||
|
### 3.1 Build Dependencies <a id="chapter-0031"></a>
|
||||||
|
|
||||||
Parity Ethereum requires **latest stable Rust version** to build.
|
Parity Ethereum requires **latest stable Rust version** to build.
|
||||||
|
|
||||||
@ -36,7 +57,7 @@ We recommend installing Rust through [rustup](https://www.rustup.rs/). If you do
|
|||||||
$ curl https://sh.rustup.rs -sSf | sh
|
$ curl https://sh.rustup.rs -sSf | sh
|
||||||
```
|
```
|
||||||
|
|
||||||
Parity Ethereum also requires `gcc`, `g++`, `libudev-dev`, `pkg-config`, `file`, `make`, and `cmake` packages to be installed.
|
Parity Ethereum also requires `gcc`, `g++`, `pkg-config`, `file`, `make`, and `cmake` packages to be installed.
|
||||||
|
|
||||||
- OSX:
|
- OSX:
|
||||||
```bash
|
```bash
|
||||||
@ -45,7 +66,7 @@ We recommend installing Rust through [rustup](https://www.rustup.rs/). If you do
|
|||||||
|
|
||||||
`clang` is required. It comes with Xcode command line tools or can be installed with homebrew.
|
`clang` is required. It comes with Xcode command line tools or can be installed with homebrew.
|
||||||
|
|
||||||
- Windows
|
- Windows:
|
||||||
Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the `rustup` installer from
|
Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the `rustup` installer from
|
||||||
https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe, start "VS2015 x64 Native Tools Command Prompt", and use the following command to install and set up the `msvc` toolchain:
|
https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe, start "VS2015 x64 Native Tools Command Prompt", and use the following command to install and set up the `msvc` toolchain:
|
||||||
```bash
|
```bash
|
||||||
@ -58,7 +79,7 @@ Once you have `rustup` installed, then you need to install:
|
|||||||
|
|
||||||
Make sure that these binaries are in your `PATH`. After that, you should be able to build Parity Ethereum from source.
|
Make sure that these binaries are in your `PATH`. After that, you should be able to build Parity Ethereum from source.
|
||||||
|
|
||||||
## Build from Source Code
|
### 3.2 Build from Source Code <a id="chapter-0032"></a>
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# download Parity Ethereum code
|
# download Parity Ethereum code
|
||||||
@ -95,7 +116,7 @@ or
|
|||||||
$ git checkout beta
|
$ git checkout beta
|
||||||
```
|
```
|
||||||
|
|
||||||
## Simple One-Line Installer for Mac and Linux
|
### 3.3 Simple One-Line Installer for Mac and Linux <a id="chapter-0033"></a>
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash <(curl https://get.parity.io -L)
|
bash <(curl https://get.parity.io -L)
|
||||||
@ -107,9 +128,9 @@ The one-line installer always defaults to the latest beta release. To install a
|
|||||||
bash <(curl https://get.parity.io -L) -r stable
|
bash <(curl https://get.parity.io -L) -r stable
|
||||||
```
|
```
|
||||||
|
|
||||||
## Start Parity Ethereum
|
### 3.4 Starting Parity Ethereum <a id="chapter-0034"></a>
|
||||||
|
|
||||||
### Manually
|
#### Manually
|
||||||
|
|
||||||
To start Parity Ethereum manually, just run
|
To start Parity Ethereum manually, just run
|
||||||
|
|
||||||
@ -119,7 +140,7 @@ $ ./target/release/parity
|
|||||||
|
|
||||||
so Parity Ethereum begins syncing the Ethereum blockchain.
|
so Parity Ethereum begins syncing the Ethereum blockchain.
|
||||||
|
|
||||||
### Using `systemd` service file
|
#### Using `systemd` service file
|
||||||
|
|
||||||
To start Parity Ethereum as a regular user using `systemd` init:
|
To start Parity Ethereum as a regular user using `systemd` init:
|
||||||
|
|
||||||
@ -128,17 +149,225 @@ To start Parity Ethereum as a regular user using `systemd` init:
|
|||||||
2. Copy release to bin folder, write `sudo install ./target/release/parity /usr/bin/parity`
|
2. Copy release to bin folder, write `sudo install ./target/release/parity /usr/bin/parity`
|
||||||
3. To configure Parity Ethereum, write a `/etc/parity/config.toml` config file, see [Configuring Parity Ethereum](https://paritytech.github.io/wiki/Configuring-Parity) for details.
|
3. To configure Parity Ethereum, write a `/etc/parity/config.toml` config file, see [Configuring Parity Ethereum](https://paritytech.github.io/wiki/Configuring-Parity) for details.
|
||||||
|
|
||||||
## Parity Ethereum toolchain
|
## 4. Testing <a id="chapter-004"></a>
|
||||||
|
|
||||||
|
You can run tests with the following commands:
|
||||||
|
|
||||||
|
* **All** packages
|
||||||
|
```
|
||||||
|
cargo test --all
|
||||||
|
```
|
||||||
|
|
||||||
|
* Specific package
|
||||||
|
```
|
||||||
|
cargo test --package <spec>
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `<spec>` with one of the packages from the [package list](#package-list) (e.g. `cargo test --package evmbin`).
|
||||||
|
|
||||||
|
You can show your logs in the test output by passing `--nocapture` (i.e. `cargo test --package evmbin -- --nocapture`)
|
||||||
|
|
||||||
|
## 5. Documentation <a id="chapter-005"></a>
|
||||||
|
|
||||||
|
Official website: https://parity.io
|
||||||
|
|
||||||
|
Be sure to [check out our wiki](https://wiki.parity.io) for more information.
|
||||||
|
|
||||||
|
### Viewing documentation for Parity Ethereum packages
|
||||||
|
|
||||||
|
You can generate documentation for Parity Ethereum Rust packages that automatically opens in your web browser using [rustdoc with Cargo](https://doc.rust-lang.org/rustdoc/what-is-rustdoc.html#using-rustdoc-with-cargo) (of the The Rustdoc Book), by running the the following commands:
|
||||||
|
|
||||||
|
* **All** packages
|
||||||
|
```
|
||||||
|
cargo doc --document-private-items --open
|
||||||
|
```
|
||||||
|
|
||||||
|
* Specific package
|
||||||
|
```
|
||||||
|
cargo doc --package <spec> -- --document-private-items --open
|
||||||
|
```
|
||||||
|
|
||||||
|
Use`--document-private-items` to also view private documentation and `--no-deps` to exclude building documentation for dependencies.
|
||||||
|
|
||||||
|
Replacing `<spec>` with one of the following from the details section below (i.e. `cargo doc --package parity-ethereum --open`):
|
||||||
|
|
||||||
|
<a id="package-list"></a>
|
||||||
|
**Package List**
|
||||||
|
<details><p>
|
||||||
|
|
||||||
|
* Parity Ethereum (EthCore) Client Application
|
||||||
|
```bash
|
||||||
|
parity-ethereum
|
||||||
|
```
|
||||||
|
* Parity Ethereum Account Management, Key Management Tool, and Keys Generator
|
||||||
|
```bash
|
||||||
|
ethcore-accounts, ethkey-cli, ethstore, ethstore-cli
|
||||||
|
```
|
||||||
|
* Parity Chain Specification
|
||||||
|
```bash
|
||||||
|
chainspec
|
||||||
|
```
|
||||||
|
* Parity CLI Signer Tool & RPC Client
|
||||||
|
```bash
|
||||||
|
cli-signer parity-rpc-client
|
||||||
|
```
|
||||||
|
* Parity Ethereum Ethash & ProgPoW Implementations
|
||||||
|
```bash
|
||||||
|
ethash
|
||||||
|
```
|
||||||
|
* Parity (EthCore) Library
|
||||||
|
```bash
|
||||||
|
ethcore
|
||||||
|
```
|
||||||
|
* Parity Ethereum Blockchain Database, Test Generator, Configuration,
|
||||||
|
Caching, Importing Blocks, and Block Information
|
||||||
|
```bash
|
||||||
|
ethcore-blockchain
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Contract Calls and Blockchain Service & Registry Information
|
||||||
|
```bash
|
||||||
|
ethcore-call-contract
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Database Access & Utilities, Database Cache Manager
|
||||||
|
```bash
|
||||||
|
ethcore-db
|
||||||
|
```
|
||||||
|
* Parity Ethereum Virtual Machine (EVM) Rust Implementation
|
||||||
|
```bash
|
||||||
|
evm
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Light Client Implementation
|
||||||
|
```bash
|
||||||
|
ethcore-light
|
||||||
|
```
|
||||||
|
* Parity Smart Contract based Node Filter, Manage Permissions of Network Connections
|
||||||
|
```bash
|
||||||
|
node-filter
|
||||||
|
```
|
||||||
|
* Parity Private Transactions
|
||||||
|
```bash
|
||||||
|
ethcore-private-tx
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Client & Network Service Creation & Registration with the I/O Subsystem
|
||||||
|
```bash
|
||||||
|
ethcore-service
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Blockchain Synchronization
|
||||||
|
```bash
|
||||||
|
ethcore-sync
|
||||||
|
```
|
||||||
|
* Parity Ethereum Common Types
|
||||||
|
```bash
|
||||||
|
common-types
|
||||||
|
```
|
||||||
|
* Parity Ethereum Virtual Machines (VM) Support Library
|
||||||
|
```bash
|
||||||
|
vm
|
||||||
|
```
|
||||||
|
* Parity Ethereum WASM Interpreter
|
||||||
|
```bash
|
||||||
|
wasm
|
||||||
|
```
|
||||||
|
* Parity Ethereum WASM Test Runner
|
||||||
|
```bash
|
||||||
|
pwasm-run-test
|
||||||
|
```
|
||||||
|
* Parity EVM Implementation
|
||||||
|
```bash
|
||||||
|
evmbin
|
||||||
|
```
|
||||||
|
* Parity Ethereum IPFS-compatible API
|
||||||
|
```bash
|
||||||
|
parity-ipfs-api
|
||||||
|
```
|
||||||
|
* Parity Ethereum JSON Deserialization
|
||||||
|
```bash
|
||||||
|
ethjson
|
||||||
|
```
|
||||||
|
* Parity Ethereum State Machine Generalization for Consensus Engines
|
||||||
|
```bash
|
||||||
|
parity-machine
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Miner Interface
|
||||||
|
```bash
|
||||||
|
ethcore-miner parity-local-store price-info ethcore-stratum using_queue
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Logger Implementation
|
||||||
|
```bash
|
||||||
|
ethcore-logger
|
||||||
|
```
|
||||||
|
* C bindings library for the Parity Ethereum client
|
||||||
|
```bash
|
||||||
|
parity-clib
|
||||||
|
```
|
||||||
|
* Parity Ethereum JSON-RPC Servers
|
||||||
|
```bash
|
||||||
|
parity-rpc
|
||||||
|
```
|
||||||
|
* Parity Ethereum (EthCore) Secret Store
|
||||||
|
```bash
|
||||||
|
ethcore-secretstore
|
||||||
|
```
|
||||||
|
* Parity Updater Service
|
||||||
|
```bash
|
||||||
|
parity-updater parity-hash-fetch
|
||||||
|
```
|
||||||
|
* Parity Core Libraries (Parity Util)
|
||||||
|
```bash
|
||||||
|
ethcore-bloom-journal blooms-db dir eip-712 fake-fetch fastmap fetch ethcore-io
|
||||||
|
journaldb keccak-hasher len-caching-lock macros memory-cache memzero
|
||||||
|
migration-rocksdb ethcore-network ethcore-network-devp2p panic_hook
|
||||||
|
patricia-trie-ethereum registrar rlp_compress rlp_derive parity-runtime stats
|
||||||
|
time-utils triehash-ethereum unexpected parity-version
|
||||||
|
```
|
||||||
|
* Parity Whisper Protocol Implementation
|
||||||
|
```bash
|
||||||
|
parity-whisper whisper-cli
|
||||||
|
```
|
||||||
|
|
||||||
|
</p></details>
|
||||||
|
|
||||||
|
### Contributing to documentation for Parity Ethereum packages
|
||||||
|
|
||||||
|
[Document source code](https://doc.rust-lang.org/1.9.0/book/documentation.html) for Parity Ethereum packages by annotating the source code with documentation comments.
|
||||||
|
|
||||||
|
Example (generic documentation comment):
|
||||||
|
```markdown
|
||||||
|
/// Summary
|
||||||
|
///
|
||||||
|
/// Description
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Summary of Example 1
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// // insert example 1 code here for use with documentation as tests
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
```
|
||||||
|
|
||||||
|
## 6. Toolchain <a id="chapter-006"></a>
|
||||||
|
|
||||||
In addition to the Parity Ethereum client, there are additional tools in this repository available:
|
In addition to the Parity Ethereum client, there are additional tools in this repository available:
|
||||||
|
|
||||||
- [evmbin](https://github.com/paritytech/parity-ethereum/blob/master/evmbin/) - EVM implementation for Parity Ethereum.
|
- [evmbin](./evmbin) - Parity Ethereum EVM Implementation.
|
||||||
- [ethabi](https://github.com/paritytech/ethabi) - Parity Ethereum function calls encoding.
|
- [ethstore](./accounts/ethstore) - Parity Ethereum Key Management.
|
||||||
- [ethstore](https://github.com/paritytech/parity-ethereum/blob/master/accounts/ethstore) - Parity Ethereum key management.
|
- [ethkey](./accounts/ethkey) - Parity Ethereum Keys Generator.
|
||||||
- [ethkey](https://github.com/paritytech/parity-ethereum/blob/master/accounts/ethkey) - Parity Ethereum keys generator.
|
- [whisper](./whisper) - Parity Ethereum Whisper-v2 PoC Implementation.
|
||||||
- [whisper](https://github.com/paritytech/parity-ethereum/blob/master/whisper/) - Implementation of Whisper-v2 PoC.
|
|
||||||
|
|
||||||
## Join the chat!
|
The following tool is available in a separate repository:
|
||||||
|
- [ethabi](https://github.com/paritytech/ethabi) - Parity Ethereum Encoding of Function Calls. [Docs here](https://crates.io/crates/ethabi)
|
||||||
|
|
||||||
|
## 7. Community <a id="chapter-007"></a>
|
||||||
|
|
||||||
|
### Join the chat!
|
||||||
|
|
||||||
Questions? Get in touch with us on Gitter:
|
Questions? Get in touch with us on Gitter:
|
||||||
[![Gitter: Parity](https://img.shields.io/badge/gitter-parity-4AB495.svg)](https://gitter.im/paritytech/parity)
|
[![Gitter: Parity](https://img.shields.io/badge/gitter-parity-4AB495.svg)](https://gitter.im/paritytech/parity)
|
||||||
@ -149,8 +378,14 @@ Questions? Get in touch with us on Gitter:
|
|||||||
Alternatively, join our community on Matrix:
|
Alternatively, join our community on Matrix:
|
||||||
[![Riot: +Parity](https://img.shields.io/badge/riot-%2Bparity%3Amatrix.parity.io-orange.svg)](https://riot.im/app/#/group/+parity:matrix.parity.io)
|
[![Riot: +Parity](https://img.shields.io/badge/riot-%2Bparity%3Amatrix.parity.io-orange.svg)](https://riot.im/app/#/group/+parity:matrix.parity.io)
|
||||||
|
|
||||||
## Documentation
|
## 8. Contributing <a id="chapter-008"></a>
|
||||||
|
|
||||||
Official website: https://parity.io
|
An introduction has been provided in the ["So You Want to be a Core Developer" presentation slides by Hernando Castano](http://tiny.cc/contrib-to-parity-eth). Additional guidelines are provided in [CONTRIBUTING](./.github/CONTRIBUTING.md).
|
||||||
|
|
||||||
Be sure to [check out our wiki](https://wiki.parity.io) for more information.
|
### Contributor Code of Conduct
|
||||||
|
|
||||||
|
[CODE_OF_CONDUCT](./.github/CODE_OF_CONDUCT.md)
|
||||||
|
|
||||||
|
## 9. License <a id="chapter-009"></a>
|
||||||
|
|
||||||
|
[LICENSE](./LICENSE)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
description = "Account management for Parity Ethereum"
|
description = "Parity Ethereum Account Management"
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "ethcore-accounts"
|
name = "ethcore-accounts"
|
||||||
@ -8,7 +8,6 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
common-types = { path = "../ethcore/types" }
|
|
||||||
ethkey = { path = "ethkey" }
|
ethkey = { path = "ethkey" }
|
||||||
ethstore = { path = "ethstore" }
|
ethstore = { path = "ethstore" }
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
@ -17,12 +16,6 @@ serde = "1.0"
|
|||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[target.'cfg(any(target_os = "linux", target_os = "macos", target_os = "windows"))'.dependencies]
|
|
||||||
hardware-wallet = { path = "hw" }
|
|
||||||
|
|
||||||
[target.'cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))'.dependencies]
|
|
||||||
fake-hardware-wallet = { path = "fake-hardware-wallet" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
|
@ -1,21 +1,21 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Keys Generator"
|
||||||
name = "ethkey"
|
name = "ethkey"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
byteorder = "1.0"
|
|
||||||
edit-distance = "2.0"
|
edit-distance = "2.0"
|
||||||
parity-crypto = "0.3.0"
|
parity-crypto = "0.4.0"
|
||||||
eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
|
eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
lazy_static = "1.0"
|
lazy_static = "1.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
memzero = { path = "../../util/memzero" }
|
parity-wordlist = "1.3"
|
||||||
parity-wordlist = "1.2"
|
|
||||||
quick-error = "1.2.2"
|
quick-error = "1.2.2"
|
||||||
rand = "0.4"
|
rand = "0.6"
|
||||||
rustc-hex = "1.0"
|
rustc-hex = "1.0"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
tiny-keccak = "1.4"
|
tiny-keccak = "1.4"
|
||||||
|
zeroize = "0.9.1"
|
||||||
|
@ -5,7 +5,7 @@ Parity Ethereum keys generator.
|
|||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
```
|
```
|
||||||
Parity Ethereum keys generator.
|
Parity Ethereum Keys Generator.
|
||||||
Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Keys Generator CLI"
|
||||||
name = "ethkey-cli"
|
name = "ethkey-cli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
@ -47,7 +47,7 @@ impl Generator for BrainPrefix {
|
|||||||
for _ in 0..self.iterations {
|
for _ in 0..self.iterations {
|
||||||
let phrase = wordlist::random_phrase(self.no_of_words);
|
let phrase = wordlist::random_phrase(self.no_of_words);
|
||||||
let keypair = Brain::new(phrase.clone()).generate().unwrap();
|
let keypair = Brain::new(phrase.clone()).generate().unwrap();
|
||||||
if keypair.address().starts_with(&self.prefix) {
|
if keypair.address().as_ref().starts_with(&self.prefix) {
|
||||||
self.last_phrase = phrase;
|
self.last_phrase = phrase;
|
||||||
return Ok(keypair)
|
return Ok(keypair)
|
||||||
}
|
}
|
||||||
@ -65,6 +65,6 @@ mod tests {
|
|||||||
fn prefix_generator() {
|
fn prefix_generator() {
|
||||||
let prefix = vec![0x00u8];
|
let prefix = vec![0x00u8];
|
||||||
let keypair = BrainPrefix::new(prefix.clone(), usize::max_value(), 12).generate().unwrap();
|
let keypair = BrainPrefix::new(prefix.clone(), usize::max_value(), 12).generate().unwrap();
|
||||||
assert!(keypair.address().starts_with(&prefix));
|
assert!(keypair.address().as_bytes().starts_with(&prefix));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ pub mod ecdh {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let publ = key::PublicKey::from_slice(context, &pdata)?;
|
let publ = key::PublicKey::from_slice(context, &pdata)?;
|
||||||
let sec = key::SecretKey::from_slice(context, &secret)?;
|
let sec = key::SecretKey::from_slice(context, secret.as_bytes())?;
|
||||||
let shared = ecdh::SharedSecret::new_raw(context, &publ, &sec);
|
let shared = ecdh::SharedSecret::new_raw(context, &publ, &sec);
|
||||||
|
|
||||||
Secret::from_unsafe_slice(&shared[0..32])
|
Secret::from_unsafe_slice(&shared[0..32])
|
||||||
@ -89,12 +89,12 @@ pub mod ecies {
|
|||||||
msg[0] = 0x04u8;
|
msg[0] = 0x04u8;
|
||||||
{
|
{
|
||||||
let msgd = &mut msg[1..];
|
let msgd = &mut msg[1..];
|
||||||
msgd[0..64].copy_from_slice(r.public());
|
msgd[0..64].copy_from_slice(r.public().as_bytes());
|
||||||
let iv = H128::random();
|
let iv = H128::random();
|
||||||
msgd[64..80].copy_from_slice(&iv);
|
msgd[64..80].copy_from_slice(iv.as_bytes());
|
||||||
{
|
{
|
||||||
let cipher = &mut msgd[(64 + 16)..(64 + 16 + plain.len())];
|
let cipher = &mut msgd[(64 + 16)..(64 + 16 + plain.len())];
|
||||||
aes::encrypt_128_ctr(ekey, &iv, plain, cipher)?;
|
aes::encrypt_128_ctr(ekey, iv.as_bytes(), plain, cipher)?;
|
||||||
}
|
}
|
||||||
let mut hmac = hmac::Signer::with(&mkey);
|
let mut hmac = hmac::Signer::with(&mkey);
|
||||||
{
|
{
|
||||||
@ -156,7 +156,7 @@ pub mod ecies {
|
|||||||
let mut hasher = digest::Hasher::sha256();
|
let mut hasher = digest::Hasher::sha256();
|
||||||
let ctrs = [(ctr >> 24) as u8, (ctr >> 16) as u8, (ctr >> 8) as u8, ctr as u8];
|
let ctrs = [(ctr >> 24) as u8, (ctr >> 16) as u8, (ctr >> 8) as u8, ctr as u8];
|
||||||
hasher.update(&ctrs);
|
hasher.update(&ctrs);
|
||||||
hasher.update(secret);
|
hasher.update(secret.as_bytes());
|
||||||
hasher.update(s1);
|
hasher.update(s1);
|
||||||
let d = hasher.finish();
|
let d = hasher.finish();
|
||||||
&mut dest[written..(written + 32)].copy_from_slice(&d);
|
&mut dest[written..(written + 32)].copy_from_slice(&d);
|
||||||
|
@ -35,9 +35,8 @@ impl Label for u32 {
|
|||||||
fn len() -> usize { 4 }
|
fn len() -> usize { 4 }
|
||||||
|
|
||||||
fn store(&self, target: &mut [u8]) {
|
fn store(&self, target: &mut [u8]) {
|
||||||
use byteorder::{BigEndian, ByteOrder};
|
let bytes = self.to_be_bytes();
|
||||||
|
target[0..4].copy_from_slice(&bytes);
|
||||||
BigEndian::write_u32(&mut target[0..4], *self);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,7 +63,7 @@ impl Label for H256 {
|
|||||||
fn len() -> usize { 32 }
|
fn len() -> usize { 32 }
|
||||||
|
|
||||||
fn store(&self, target: &mut [u8]) {
|
fn store(&self, target: &mut [u8]) {
|
||||||
self.copy_to(&mut target[0..32]);
|
(&mut target[0..32]).copy_from_slice(self.as_bytes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,7 +179,7 @@ impl ExtendedKeyPair {
|
|||||||
pub fn with_seed(seed: &[u8]) -> Result<ExtendedKeyPair, DerivationError> {
|
pub fn with_seed(seed: &[u8]) -> Result<ExtendedKeyPair, DerivationError> {
|
||||||
let (master_key, chain_code) = derivation::seed_pair(seed);
|
let (master_key, chain_code) = derivation::seed_pair(seed);
|
||||||
Ok(ExtendedKeyPair::with_secret(
|
Ok(ExtendedKeyPair::with_secret(
|
||||||
Secret::from_unsafe_slice(&*master_key).map_err(|_| DerivationError::InvalidSeed)?,
|
Secret::from_unsafe_slice(master_key.as_bytes()).map_err(|_| DerivationError::InvalidSeed)?,
|
||||||
chain_code,
|
chain_code,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@ -208,12 +207,13 @@ impl ExtendedKeyPair {
|
|||||||
// https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki
|
// https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki
|
||||||
mod derivation {
|
mod derivation {
|
||||||
use parity_crypto::hmac;
|
use parity_crypto::hmac;
|
||||||
use ethereum_types::{U256, U512, H512, H256};
|
use ethereum_types::{BigEndianHash, U256, U512, H512, H256};
|
||||||
use secp256k1::key::{SecretKey, PublicKey};
|
use secp256k1::key::{SecretKey, PublicKey};
|
||||||
use SECP256K1;
|
use SECP256K1;
|
||||||
use keccak;
|
use keccak;
|
||||||
use math::curve_order;
|
use math::curve_order;
|
||||||
use super::{Label, Derivation};
|
use super::{Label, Derivation};
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
@ -237,18 +237,18 @@ mod derivation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn hmac_pair(data: &[u8], private_key: H256, chain_code: H256) -> (H256, H256) {
|
fn hmac_pair(data: &[u8], private_key: H256, chain_code: H256) -> (H256, H256) {
|
||||||
let private: U256 = private_key.into();
|
let private: U256 = private_key.into_uint();
|
||||||
|
|
||||||
// produces 512-bit derived hmac (I)
|
// produces 512-bit derived hmac (I)
|
||||||
let skey = hmac::SigKey::sha512(&*chain_code);
|
let skey = hmac::SigKey::sha512(chain_code.as_bytes());
|
||||||
let i_512 = hmac::sign(&skey, &data[..]);
|
let i_512 = hmac::sign(&skey, &data[..]);
|
||||||
|
|
||||||
// left most 256 bits are later added to original private key
|
// left most 256 bits are later added to original private key
|
||||||
let hmac_key: U256 = H256::from_slice(&i_512[0..32]).into();
|
let hmac_key: U256 = H256::from_slice(&i_512[0..32]).into_uint();
|
||||||
// right most 256 bits are new chain code for later derivations
|
// right most 256 bits are new chain code for later derivations
|
||||||
let next_chain_code = H256::from(&i_512[32..64]);
|
let next_chain_code = H256::from_slice(&i_512[32..64]);
|
||||||
|
|
||||||
let child_key = private_add(hmac_key, private).into();
|
let child_key = BigEndianHash::from_uint(&private_add(hmac_key, private));
|
||||||
(child_key, next_chain_code)
|
(child_key, next_chain_code)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -257,7 +257,7 @@ mod derivation {
|
|||||||
fn private_soft<T>(private_key: H256, chain_code: H256, index: T) -> (H256, H256) where T: Label {
|
fn private_soft<T>(private_key: H256, chain_code: H256, index: T) -> (H256, H256) where T: Label {
|
||||||
let mut data = vec![0u8; 33 + T::len()];
|
let mut data = vec![0u8; 33 + T::len()];
|
||||||
|
|
||||||
let sec_private = SecretKey::from_slice(&SECP256K1, &*private_key)
|
let sec_private = SecretKey::from_slice(&SECP256K1, private_key.as_bytes())
|
||||||
.expect("Caller should provide valid private key");
|
.expect("Caller should provide valid private key");
|
||||||
let sec_public = PublicKey::from_secret_key(&SECP256K1, &sec_private)
|
let sec_public = PublicKey::from_secret_key(&SECP256K1, &sec_private)
|
||||||
.expect("Caller should provide valid private key");
|
.expect("Caller should provide valid private key");
|
||||||
@ -276,7 +276,7 @@ mod derivation {
|
|||||||
// corresponding public keys of the original and derived private keys
|
// corresponding public keys of the original and derived private keys
|
||||||
fn private_hard<T>(private_key: H256, chain_code: H256, index: T) -> (H256, H256) where T: Label {
|
fn private_hard<T>(private_key: H256, chain_code: H256, index: T) -> (H256, H256) where T: Label {
|
||||||
let mut data: Vec<u8> = vec![0u8; 33 + T::len()];
|
let mut data: Vec<u8> = vec![0u8; 33 + T::len()];
|
||||||
let private: U256 = private_key.into();
|
let private: U256 = private_key.into_uint();
|
||||||
|
|
||||||
// 0x00 (padding) -- private_key -- index
|
// 0x00 (padding) -- private_key -- index
|
||||||
// 0 -- 1..33 -- 33..end
|
// 0 -- 1..33 -- 33..end
|
||||||
@ -293,9 +293,8 @@ mod derivation {
|
|||||||
|
|
||||||
// todo: surely can be optimized
|
// todo: surely can be optimized
|
||||||
fn modulo(u1: U512, u2: U256) -> U256 {
|
fn modulo(u1: U512, u2: U256) -> U256 {
|
||||||
let dv = u1 / U512::from(u2);
|
let m = u1 % U512::from(u2);
|
||||||
let md = u1 - (dv * U512::from(u2));
|
m.try_into().expect("U512 modulo U256 should fit into U256; qed")
|
||||||
md.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn public<T>(public_key: H512, chain_code: H256, derivation: Derivation<T>) -> Result<(H512, H256), Error> where T: Label {
|
pub fn public<T>(public_key: H512, chain_code: H256, derivation: Derivation<T>) -> Result<(H512, H256), Error> where T: Label {
|
||||||
@ -306,7 +305,7 @@ mod derivation {
|
|||||||
|
|
||||||
let mut public_sec_raw = [0u8; 65];
|
let mut public_sec_raw = [0u8; 65];
|
||||||
public_sec_raw[0] = 4;
|
public_sec_raw[0] = 4;
|
||||||
public_sec_raw[1..65].copy_from_slice(&*public_key);
|
public_sec_raw[1..65].copy_from_slice(public_key.as_bytes());
|
||||||
let public_sec = PublicKey::from_slice(&SECP256K1, &public_sec_raw).map_err(|_| Error::InvalidPoint)?;
|
let public_sec = PublicKey::from_slice(&SECP256K1, &public_sec_raw).map_err(|_| Error::InvalidPoint)?;
|
||||||
let public_serialized = public_sec.serialize_vec(&SECP256K1, true);
|
let public_serialized = public_sec.serialize_vec(&SECP256K1, true);
|
||||||
|
|
||||||
@ -317,15 +316,15 @@ mod derivation {
|
|||||||
index.store(&mut data[33..(33 + T::len())]);
|
index.store(&mut data[33..(33 + T::len())]);
|
||||||
|
|
||||||
// HMAC512SHA produces [derived private(256); new chain code(256)]
|
// HMAC512SHA produces [derived private(256); new chain code(256)]
|
||||||
let skey = hmac::SigKey::sha512(&*chain_code);
|
let skey = hmac::SigKey::sha512(chain_code.as_bytes());
|
||||||
let i_512 = hmac::sign(&skey, &data[..]);
|
let i_512 = hmac::sign(&skey, &data[..]);
|
||||||
|
|
||||||
let new_private = H256::from(&i_512[0..32]);
|
let new_private = H256::from_slice(&i_512[0..32]);
|
||||||
let new_chain_code = H256::from(&i_512[32..64]);
|
let new_chain_code = H256::from_slice(&i_512[32..64]);
|
||||||
|
|
||||||
// Generated private key can (extremely rarely) be out of secp256k1 key field
|
// Generated private key can (extremely rarely) be out of secp256k1 key field
|
||||||
if curve_order() <= new_private.clone().into() { return Err(Error::MissingIndex); }
|
if curve_order() <= new_private.into_uint() { return Err(Error::MissingIndex); }
|
||||||
let new_private_sec = SecretKey::from_slice(&SECP256K1, &*new_private)
|
let new_private_sec = SecretKey::from_slice(&SECP256K1, new_private.as_bytes())
|
||||||
.expect("Private key belongs to the field [0..CURVE_ORDER) (checked above); So initializing can never fail; qed");
|
.expect("Private key belongs to the field [0..CURVE_ORDER) (checked above); So initializing can never fail; qed");
|
||||||
let mut new_public = PublicKey::from_secret_key(&SECP256K1, &new_private_sec)
|
let mut new_public = PublicKey::from_secret_key(&SECP256K1, &new_private_sec)
|
||||||
.expect("Valid private key produces valid public key");
|
.expect("Valid private key produces valid public key");
|
||||||
@ -337,7 +336,7 @@ mod derivation {
|
|||||||
let serialized = new_public.serialize_vec(&SECP256K1, false);
|
let serialized = new_public.serialize_vec(&SECP256K1, false);
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
H512::from(&serialized[1..65]),
|
H512::from_slice(&serialized[1..65]),
|
||||||
new_chain_code,
|
new_chain_code,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@ -348,18 +347,18 @@ mod derivation {
|
|||||||
|
|
||||||
pub fn chain_code(secret: H256) -> H256 {
|
pub fn chain_code(secret: H256) -> H256 {
|
||||||
// 10,000 rounds of sha3
|
// 10,000 rounds of sha3
|
||||||
let mut running_sha3 = sha3(&*secret);
|
let mut running_sha3 = sha3(secret.as_bytes());
|
||||||
for _ in 0..99999 { running_sha3 = sha3(&*running_sha3); }
|
for _ in 0..99999 { running_sha3 = sha3(running_sha3.as_bytes()); }
|
||||||
running_sha3
|
running_sha3
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point(secret: H256) -> Result<H512, Error> {
|
pub fn point(secret: H256) -> Result<H512, Error> {
|
||||||
let sec = SecretKey::from_slice(&SECP256K1, &*secret)
|
let sec = SecretKey::from_slice(&SECP256K1, secret.as_bytes())
|
||||||
.map_err(|_| Error::InvalidPoint)?;
|
.map_err(|_| Error::InvalidPoint)?;
|
||||||
let public_sec = PublicKey::from_secret_key(&SECP256K1, &sec)
|
let public_sec = PublicKey::from_secret_key(&SECP256K1, &sec)
|
||||||
.map_err(|_| Error::InvalidPoint)?;
|
.map_err(|_| Error::InvalidPoint)?;
|
||||||
let serialized = public_sec.serialize_vec(&SECP256K1, false);
|
let serialized = public_sec.serialize_vec(&SECP256K1, false);
|
||||||
Ok(H512::from(&serialized[1..65]))
|
Ok(H512::from_slice(&serialized[1..65]))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seed_pair(seed: &[u8]) -> (H256, H256) {
|
pub fn seed_pair(seed: &[u8]) -> (H256, H256) {
|
||||||
@ -378,12 +377,13 @@ mod tests {
|
|||||||
use super::{ExtendedSecret, ExtendedPublic, ExtendedKeyPair};
|
use super::{ExtendedSecret, ExtendedPublic, ExtendedKeyPair};
|
||||||
use secret::Secret;
|
use secret::Secret;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use ethereum_types::{H128, H256};
|
use ethereum_types::{H128, H256, H512};
|
||||||
use super::{derivation, Derivation};
|
use super::{derivation, Derivation};
|
||||||
|
|
||||||
fn master_chain_basic() -> (H256, H256) {
|
fn master_chain_basic() -> (H256, H256) {
|
||||||
let seed = H128::from_str("000102030405060708090a0b0c0d0e0f")
|
let seed = H128::from_str("000102030405060708090a0b0c0d0e0f")
|
||||||
.expect("Seed should be valid H128")
|
.expect("Seed should be valid H128")
|
||||||
|
.as_bytes()
|
||||||
.to_vec();
|
.to_vec();
|
||||||
|
|
||||||
derivation::seed_pair(&*seed)
|
derivation::seed_pair(&*seed)
|
||||||
@ -399,27 +399,39 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn smoky() {
|
fn smoky() {
|
||||||
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
||||||
let extended_secret = ExtendedSecret::with_code(secret.clone(), 0u64.into());
|
let extended_secret = ExtendedSecret::with_code(secret.clone(), H256::zero());
|
||||||
|
|
||||||
// hardened
|
// hardened
|
||||||
assert_eq!(&**extended_secret.as_raw(), &*secret);
|
assert_eq!(&**extended_secret.as_raw(), &*secret);
|
||||||
assert_eq!(&**extended_secret.derive(2147483648.into()).as_raw(), &"0927453daed47839608e414a3738dfad10aed17c459bbd9ab53f89b026c834b6".into());
|
assert_eq!(
|
||||||
assert_eq!(&**extended_secret.derive(2147483649.into()).as_raw(), &"44238b6a29c6dcbe9b401364141ba11e2198c289a5fed243a1c11af35c19dc0f".into());
|
**extended_secret.derive(2147483648.into()).as_raw(),
|
||||||
|
H256::from_str("0927453daed47839608e414a3738dfad10aed17c459bbd9ab53f89b026c834b6").unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
**extended_secret.derive(2147483649.into()).as_raw(),
|
||||||
|
H256::from_str("44238b6a29c6dcbe9b401364141ba11e2198c289a5fed243a1c11af35c19dc0f").unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
// normal
|
// normal
|
||||||
assert_eq!(&**extended_secret.derive(0.into()).as_raw(), &"bf6a74e3f7b36fc4c96a1e12f31abc817f9f5904f5a8fc27713163d1f0b713f6".into());
|
assert_eq!(**extended_secret.derive(0.into()).as_raw(), H256::from_str("bf6a74e3f7b36fc4c96a1e12f31abc817f9f5904f5a8fc27713163d1f0b713f6").unwrap());
|
||||||
assert_eq!(&**extended_secret.derive(1.into()).as_raw(), &"bd4fca9eb1f9c201e9448c1eecd66e302d68d4d313ce895b8c134f512205c1bc".into());
|
assert_eq!(**extended_secret.derive(1.into()).as_raw(), H256::from_str("bd4fca9eb1f9c201e9448c1eecd66e302d68d4d313ce895b8c134f512205c1bc").unwrap());
|
||||||
assert_eq!(&**extended_secret.derive(2.into()).as_raw(), &"86932b542d6cab4d9c65490c7ef502d89ecc0e2a5f4852157649e3251e2a3268".into());
|
assert_eq!(**extended_secret.derive(2.into()).as_raw(), H256::from_str("86932b542d6cab4d9c65490c7ef502d89ecc0e2a5f4852157649e3251e2a3268").unwrap());
|
||||||
|
|
||||||
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
||||||
let derived_public = extended_public.derive(0.into()).expect("First derivation of public should succeed");
|
let derived_public = extended_public.derive(0.into()).expect("First derivation of public should succeed");
|
||||||
assert_eq!(&*derived_public.public(), &"f7b3244c96688f92372bfd4def26dc4151529747bab9f188a4ad34e141d47bd66522ff048bc6f19a0a4429b04318b1a8796c000265b4fa200dae5f6dda92dd94".into());
|
assert_eq!(
|
||||||
|
*derived_public.public(),
|
||||||
|
H512::from_str("f7b3244c96688f92372bfd4def26dc4151529747bab9f188a4ad34e141d47bd66522ff048bc6f19a0a4429b04318b1a8796c000265b4fa200dae5f6dda92dd94").unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
let keypair = ExtendedKeyPair::with_secret(
|
let keypair = ExtendedKeyPair::with_secret(
|
||||||
Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap(),
|
Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap(),
|
||||||
064.into(),
|
H256::from_low_u64_be(64),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
**keypair.derive(2147483648u32.into()).expect("Derivation of keypair should succeed").secret().as_raw(),
|
||||||
|
H256::from_str("edef54414c03196557cf73774bc97a645c9a1df2164ed34f0c2a78d1375a930c").unwrap(),
|
||||||
);
|
);
|
||||||
assert_eq!(&**keypair.derive(2147483648u32.into()).expect("Derivation of keypair should succeed").secret().as_raw(), &"edef54414c03196557cf73774bc97a645c9a1df2164ed34f0c2a78d1375a930c".into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -427,7 +439,7 @@ mod tests {
|
|||||||
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
||||||
let derivation_secret = H256::from_str("51eaf04f9dbbc1417dc97e789edd0c37ecda88bac490434e367ea81b71b7b015").unwrap();
|
let derivation_secret = H256::from_str("51eaf04f9dbbc1417dc97e789edd0c37ecda88bac490434e367ea81b71b7b015").unwrap();
|
||||||
|
|
||||||
let extended_secret = ExtendedSecret::with_code(secret.clone(), 0u64.into());
|
let extended_secret = ExtendedSecret::with_code(secret.clone(), H256::zero());
|
||||||
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
||||||
|
|
||||||
let derived_secret0 = extended_secret.derive(Derivation::Soft(derivation_secret));
|
let derived_secret0 = extended_secret.derive(Derivation::Soft(derivation_secret));
|
||||||
@ -442,15 +454,18 @@ mod tests {
|
|||||||
fn h256_hard() {
|
fn h256_hard() {
|
||||||
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
||||||
let derivation_secret = H256::from_str("51eaf04f9dbbc1417dc97e789edd0c37ecda88bac490434e367ea81b71b7b015").unwrap();
|
let derivation_secret = H256::from_str("51eaf04f9dbbc1417dc97e789edd0c37ecda88bac490434e367ea81b71b7b015").unwrap();
|
||||||
let extended_secret = ExtendedSecret::with_code(secret.clone(), 1u64.into());
|
let extended_secret = ExtendedSecret::with_code(secret.clone(), H256::from_low_u64_be(1));
|
||||||
|
|
||||||
assert_eq!(&**extended_secret.derive(Derivation::Hard(derivation_secret)).as_raw(), &"2bc2d696fb744d77ff813b4a1ef0ad64e1e5188b622c54ba917acc5ebc7c5486".into());
|
assert_eq!(
|
||||||
|
**extended_secret.derive(Derivation::Hard(derivation_secret)).as_raw(),
|
||||||
|
H256::from_str("2bc2d696fb744d77ff813b4a1ef0ad64e1e5188b622c54ba917acc5ebc7c5486").unwrap(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn match_() {
|
fn match_() {
|
||||||
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
let secret = Secret::from_str("a100df7a048e50ed308ea696dc600215098141cb391e9527329df289f9383f65").unwrap();
|
||||||
let extended_secret = ExtendedSecret::with_code(secret.clone(), 1.into());
|
let extended_secret = ExtendedSecret::with_code(secret.clone(), H256::from_low_u64_be(1));
|
||||||
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
let extended_public = ExtendedPublic::from_secret(&extended_secret).expect("Extended public should be created");
|
||||||
|
|
||||||
let derived_secret0 = extended_secret.derive(0.into());
|
let derived_secret0 = extended_secret.derive(0.into());
|
||||||
@ -465,6 +480,7 @@ mod tests {
|
|||||||
fn test_seeds() {
|
fn test_seeds() {
|
||||||
let seed = H128::from_str("000102030405060708090a0b0c0d0e0f")
|
let seed = H128::from_str("000102030405060708090a0b0c0d0e0f")
|
||||||
.expect("Seed should be valid H128")
|
.expect("Seed should be valid H128")
|
||||||
|
.as_bytes()
|
||||||
.to_vec();
|
.to_vec();
|
||||||
|
|
||||||
// private key from bitcoin test vector
|
// private key from bitcoin test vector
|
||||||
|
@ -16,14 +16,13 @@
|
|||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use secp256k1::key;
|
use secp256k1::key;
|
||||||
use rustc_hex::ToHex;
|
|
||||||
use keccak::Keccak256;
|
|
||||||
use super::{Secret, Public, Address, SECP256K1, Error};
|
use super::{Secret, Public, Address, SECP256K1, Error};
|
||||||
|
use parity_crypto::Keccak256 as _;
|
||||||
|
|
||||||
pub fn public_to_address(public: &Public) -> Address {
|
pub fn public_to_address(public: &Public) -> Address {
|
||||||
let hash = public.keccak256();
|
let hash = public.keccak256();
|
||||||
let mut result = Address::default();
|
let mut result = Address::zero();
|
||||||
result.copy_from_slice(&hash[12..]);
|
result.as_bytes_mut().copy_from_slice(&hash[12..]);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,9 +35,9 @@ pub struct KeyPair {
|
|||||||
|
|
||||||
impl fmt::Display for KeyPair {
|
impl fmt::Display for KeyPair {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||||
writeln!(f, "secret: {}", self.secret.to_hex())?;
|
writeln!(f, "secret: {:x}", self.secret)?;
|
||||||
writeln!(f, "public: {}", self.public.to_hex())?;
|
writeln!(f, "public: {:x}", self.public)?;
|
||||||
write!(f, "address: {}", self.address().to_hex())
|
write!(f, "address: {:x}", self.address())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +50,7 @@ impl KeyPair {
|
|||||||
let serialized = pub_key.serialize_vec(context, false);
|
let serialized = pub_key.serialize_vec(context, false);
|
||||||
|
|
||||||
let mut public = Public::default();
|
let mut public = Public::default();
|
||||||
public.copy_from_slice(&serialized[1..65]);
|
public.as_bytes_mut().copy_from_slice(&serialized[1..65]);
|
||||||
|
|
||||||
let keypair = KeyPair {
|
let keypair = KeyPair {
|
||||||
secret: secret,
|
secret: secret,
|
||||||
@ -70,7 +69,7 @@ impl KeyPair {
|
|||||||
let serialized = publ.serialize_vec(context, false);
|
let serialized = publ.serialize_vec(context, false);
|
||||||
let secret = Secret::from(sec);
|
let secret = Secret::from(sec);
|
||||||
let mut public = Public::default();
|
let mut public = Public::default();
|
||||||
public.copy_from_slice(&serialized[1..65]);
|
public.as_bytes_mut().copy_from_slice(&serialized[1..65]);
|
||||||
|
|
||||||
KeyPair {
|
KeyPair {
|
||||||
secret: secret,
|
secret: secret,
|
||||||
|
@ -16,11 +16,9 @@
|
|||||||
|
|
||||||
// #![warn(missing_docs)]
|
// #![warn(missing_docs)]
|
||||||
|
|
||||||
extern crate byteorder;
|
|
||||||
extern crate edit_distance;
|
extern crate edit_distance;
|
||||||
extern crate parity_crypto;
|
extern crate parity_crypto;
|
||||||
extern crate ethereum_types;
|
extern crate ethereum_types;
|
||||||
extern crate memzero;
|
|
||||||
extern crate parity_wordlist;
|
extern crate parity_wordlist;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate quick_error;
|
extern crate quick_error;
|
||||||
@ -29,6 +27,7 @@ extern crate rustc_hex;
|
|||||||
extern crate secp256k1;
|
extern crate secp256k1;
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
extern crate tiny_keccak;
|
extern crate tiny_keccak;
|
||||||
|
extern crate zeroize;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
use super::{SECP256K1, Public, Secret, Error};
|
use super::{SECP256K1, Public, Secret, Error};
|
||||||
use secp256k1::key;
|
use secp256k1::key;
|
||||||
use secp256k1::constants::{GENERATOR_X, GENERATOR_Y, CURVE_ORDER};
|
use secp256k1::constants::{GENERATOR_X, GENERATOR_Y, CURVE_ORDER};
|
||||||
use ethereum_types::{U256, H256};
|
use ethereum_types::{BigEndianHash as _, U256, H256};
|
||||||
|
|
||||||
/// Whether the public key is valid.
|
/// Whether the public key is valid.
|
||||||
pub fn public_is_valid(public: &Public) -> bool {
|
pub fn public_is_valid(public: &Public) -> bool {
|
||||||
@ -78,7 +78,7 @@ pub fn generation_point() -> Public {
|
|||||||
|
|
||||||
/// Return secp256k1 elliptic curve order
|
/// Return secp256k1 elliptic curve order
|
||||||
pub fn curve_order() -> U256 {
|
pub fn curve_order() -> U256 {
|
||||||
H256::from_slice(&CURVE_ORDER).into()
|
H256::from_slice(&CURVE_ORDER).into_uint()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_secp256k1_public(public: &Public) -> Result<key::PublicKey, Error> {
|
fn to_secp256k1_public(public: &Public) -> Result<key::PublicKey, Error> {
|
||||||
@ -93,7 +93,7 @@ fn to_secp256k1_public(public: &Public) -> Result<key::PublicKey, Error> {
|
|||||||
|
|
||||||
fn set_public(public: &mut Public, key_public: &key::PublicKey) {
|
fn set_public(public: &mut Public, key_public: &key::PublicKey) {
|
||||||
let key_public_serialized = key_public.serialize_vec(&SECP256K1, false);
|
let key_public_serialized = key_public.serialize_vec(&SECP256K1, false);
|
||||||
public.copy_from_slice(&key_public_serialized[1..65]);
|
public.as_bytes_mut().copy_from_slice(&key_public_serialized[1..65]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -37,7 +37,7 @@ impl Generator for Prefix {
|
|||||||
fn generate(&mut self) -> Result<KeyPair, Error> {
|
fn generate(&mut self) -> Result<KeyPair, Error> {
|
||||||
for _ in 0..self.iterations {
|
for _ in 0..self.iterations {
|
||||||
let keypair = Random.generate()?;
|
let keypair = Random.generate()?;
|
||||||
if keypair.address().starts_with(&self.prefix) {
|
if keypair.address().as_ref().starts_with(&self.prefix) {
|
||||||
return Ok(keypair)
|
return Ok(keypair)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -54,6 +54,6 @@ mod tests {
|
|||||||
fn prefix_generator() {
|
fn prefix_generator() {
|
||||||
let prefix = vec![0xffu8];
|
let prefix = vec![0xffu8];
|
||||||
let keypair = Prefix::new(prefix.clone(), usize::max_value()).generate().unwrap();
|
let keypair = Prefix::new(prefix.clone(), usize::max_value()).generate().unwrap();
|
||||||
assert!(keypair.address().starts_with(&prefix));
|
assert!(keypair.address().as_bytes().starts_with(&prefix));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use rand::os::OsRng;
|
use rand::rngs::OsRng;
|
||||||
use super::{Generator, KeyPair, SECP256K1};
|
use super::{Generator, KeyPair, SECP256K1};
|
||||||
|
|
||||||
/// Randomly generates new keypair, instantiating the RNG each time.
|
/// Randomly generates new keypair, instantiating the RNG each time.
|
||||||
|
@ -21,17 +21,23 @@ use rustc_hex::ToHex;
|
|||||||
use secp256k1::constants::{SECRET_KEY_SIZE as SECP256K1_SECRET_KEY_SIZE};
|
use secp256k1::constants::{SECRET_KEY_SIZE as SECP256K1_SECRET_KEY_SIZE};
|
||||||
use secp256k1::key;
|
use secp256k1::key;
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use memzero::Memzero;
|
use zeroize::Zeroize;
|
||||||
use {Error, SECP256K1};
|
use {Error, SECP256K1};
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub struct Secret {
|
pub struct Secret {
|
||||||
inner: Memzero<H256>,
|
inner: H256,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for Secret {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.inner.0.zeroize()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToHex for Secret {
|
impl ToHex for Secret {
|
||||||
fn to_hex(&self) -> String {
|
fn to_hex(&self) -> String {
|
||||||
format!("{:x}", *self.inner)
|
format!("{:x}", self.inner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59,14 +65,14 @@ impl Secret {
|
|||||||
if key.len() != 32 {
|
if key.len() != 32 {
|
||||||
return None
|
return None
|
||||||
}
|
}
|
||||||
let mut h = H256::default();
|
let mut h = H256::zero();
|
||||||
h.copy_from_slice(&key[0..32]);
|
h.as_bytes_mut().copy_from_slice(&key[0..32]);
|
||||||
Some(Secret { inner: Memzero::from(h) })
|
Some(Secret { inner: h })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates zero key, which is invalid for crypto operations, but valid for math operation.
|
/// Creates zero key, which is invalid for crypto operations, but valid for math operation.
|
||||||
pub fn zero() -> Self {
|
pub fn zero() -> Self {
|
||||||
Secret { inner: Memzero::from(H256::default()) }
|
Secret { inner: H256::zero() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Imports and validates the key.
|
/// Imports and validates the key.
|
||||||
@ -214,7 +220,7 @@ impl FromStr for Secret {
|
|||||||
|
|
||||||
impl From<[u8; 32]> for Secret {
|
impl From<[u8; 32]> for Secret {
|
||||||
fn from(k: [u8; 32]) -> Self {
|
fn from(k: [u8; 32]) -> Self {
|
||||||
Secret { inner: Memzero::from(H256(k)) }
|
Secret { inner: H256(k) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,24 +68,44 @@ impl Signature {
|
|||||||
/// Create a signature object from the sig.
|
/// Create a signature object from the sig.
|
||||||
pub fn from_rsv(r: &H256, s: &H256, v: u8) -> Self {
|
pub fn from_rsv(r: &H256, s: &H256, v: u8) -> Self {
|
||||||
let mut sig = [0u8; 65];
|
let mut sig = [0u8; 65];
|
||||||
sig[0..32].copy_from_slice(&r);
|
sig[0..32].copy_from_slice(r.as_ref());
|
||||||
sig[32..64].copy_from_slice(&s);
|
sig[32..64].copy_from_slice(s.as_ref());
|
||||||
sig[64] = v;
|
sig[64] = v;
|
||||||
Signature(sig)
|
Signature(sig)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if this is a "low" signature.
|
/// Check if this is a "low" signature.
|
||||||
pub fn is_low_s(&self) -> bool {
|
pub fn is_low_s(&self) -> bool {
|
||||||
H256::from_slice(self.s()) <= "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0".into()
|
// "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0"
|
||||||
|
const MASK: H256 = H256([
|
||||||
|
0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||||
|
0x5D, 0x57, 0x6E, 0x73, 0x57, 0xA4, 0x50, 0x1D,
|
||||||
|
0xDF, 0xE9, 0x2F, 0x46, 0x68, 0x1B, 0x20, 0xA0,
|
||||||
|
]);
|
||||||
|
H256::from_slice(self.s()) <= MASK
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if each component of the signature is in range.
|
/// Check if each component of the signature is in range.
|
||||||
pub fn is_valid(&self) -> bool {
|
pub fn is_valid(&self) -> bool {
|
||||||
|
// "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141"
|
||||||
|
const MASK: H256 = H256([
|
||||||
|
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||||
|
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe,
|
||||||
|
0xba, 0xae, 0xdc, 0xe6, 0xaf, 0x48, 0xa0, 0x3b,
|
||||||
|
0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x41,
|
||||||
|
]);
|
||||||
|
const ONE: H256 = H256([
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
|
||||||
|
]);
|
||||||
|
let r = H256::from_slice(self.r());
|
||||||
|
let s = H256::from_slice(self.s());
|
||||||
self.v() <= 1 &&
|
self.v() <= 1 &&
|
||||||
H256::from_slice(self.r()) < "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141".into() &&
|
r < MASK && r >= ONE &&
|
||||||
H256::from_slice(self.r()) >= 1.into() &&
|
s < MASK && s >= ONE
|
||||||
H256::from_slice(self.s()) < "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141".into() &&
|
|
||||||
H256::from_slice(self.s()) >= 1.into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -190,7 +210,7 @@ impl DerefMut for Signature {
|
|||||||
|
|
||||||
pub fn sign(secret: &Secret, message: &Message) -> Result<Signature, Error> {
|
pub fn sign(secret: &Secret, message: &Message) -> Result<Signature, Error> {
|
||||||
let context = &SECP256K1;
|
let context = &SECP256K1;
|
||||||
let sec = SecretKey::from_slice(context, &secret)?;
|
let sec = SecretKey::from_slice(context, secret.as_ref())?;
|
||||||
let s = context.sign_recoverable(&SecpMessage::from_slice(&message[..])?, &sec)?;
|
let s = context.sign_recoverable(&SecpMessage::from_slice(&message[..])?, &sec)?;
|
||||||
let (rec_id, data) = s.serialize_compact(context);
|
let (rec_id, data) = s.serialize_compact(context);
|
||||||
let mut data_arr = [0; 65];
|
let mut data_arr = [0; 65];
|
||||||
@ -208,7 +228,7 @@ pub fn verify_public(public: &Public, signature: &Signature, message: &Message)
|
|||||||
|
|
||||||
let pdata: [u8; 65] = {
|
let pdata: [u8; 65] = {
|
||||||
let mut temp = [4u8; 65];
|
let mut temp = [4u8; 65];
|
||||||
temp[1..65].copy_from_slice(&**public);
|
temp[1..65].copy_from_slice(public.as_bytes());
|
||||||
temp
|
temp
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -233,7 +253,7 @@ pub fn recover(signature: &Signature, message: &Message) -> Result<Public, Error
|
|||||||
let serialized = pubkey.serialize_vec(context, false);
|
let serialized = pubkey.serialize_vec(context, false);
|
||||||
|
|
||||||
let mut public = Public::default();
|
let mut public = Public::default();
|
||||||
public.copy_from_slice(&serialized[1..65]);
|
public.as_bytes_mut().copy_from_slice(&serialized[1..65]);
|
||||||
Ok(public)
|
Ok(public)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Key Management"
|
||||||
name = "ethstore"
|
name = "ethstore"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
@ -6,7 +7,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
rand = "0.4"
|
rand = "0.6"
|
||||||
ethkey = { path = "../ethkey" }
|
ethkey = { path = "../ethkey" }
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
@ -16,13 +17,12 @@ tiny-keccak = "1.4"
|
|||||||
time = "0.1.34"
|
time = "0.1.34"
|
||||||
itertools = "0.5"
|
itertools = "0.5"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
parity-crypto = "0.3.0"
|
parity-crypto = "0.4.0"
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
dir = { path = "../../util/dir" }
|
dir = { path = "../../util/dir" }
|
||||||
smallvec = "0.6"
|
smallvec = "0.6"
|
||||||
parity-wordlist = "1.0"
|
parity-wordlist = "1.0"
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
lazy_static = "1.2.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
matches = "0.1"
|
matches = "0.1"
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Key Management CLI"
|
||||||
name = "ethstore-cli"
|
name = "ethstore-cli"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
@ -21,4 +22,4 @@ path = "src/main.rs"
|
|||||||
doc = false
|
doc = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempdir = "0.3.5"
|
tempdir = "0.3"
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use ethkey::{Password, Secret};
|
use ethkey::{Password, Secret};
|
||||||
use {json, Error, crypto};
|
use {json, Error, crypto};
|
||||||
use crypto::Keccak256;
|
use crypto::Keccak256;
|
||||||
@ -74,12 +73,12 @@ impl From<Crypto> for String {
|
|||||||
|
|
||||||
impl Crypto {
|
impl Crypto {
|
||||||
/// Encrypt account secret
|
/// Encrypt account secret
|
||||||
pub fn with_secret(secret: &Secret, password: &Password, iterations: NonZeroU32) -> Result<Self, crypto::Error> {
|
pub fn with_secret(secret: &Secret, password: &Password, iterations: u32) -> Result<Self, crypto::Error> {
|
||||||
Crypto::with_plain(&*secret, password, iterations)
|
Crypto::with_plain(secret.as_ref(), password, iterations)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encrypt custom plain data
|
/// Encrypt custom plain data
|
||||||
pub fn with_plain(plain: &[u8], password: &Password, iterations: NonZeroU32) -> Result<Self, crypto::Error> {
|
pub fn with_plain(plain: &[u8], password: &Password, iterations: u32) -> Result<Self, crypto::Error> {
|
||||||
let salt: [u8; 32] = Random::random();
|
let salt: [u8; 32] = Random::random();
|
||||||
let iv: [u8; 16] = Random::random();
|
let iv: [u8; 16] = Random::random();
|
||||||
|
|
||||||
@ -160,17 +159,13 @@ impl Crypto {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ethkey::{Generator, Random};
|
use ethkey::{Generator, Random};
|
||||||
use super::{Crypto, Error, NonZeroU32};
|
use super::{Crypto, Error};
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(10240).expect("10240 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn crypto_with_secret_create() {
|
fn crypto_with_secret_create() {
|
||||||
let keypair = Random.generate().unwrap();
|
let keypair = Random.generate().unwrap();
|
||||||
let passwd = "this is sparta".into();
|
let passwd = "this is sparta".into();
|
||||||
let crypto = Crypto::with_secret(keypair.secret(), &passwd, *ITERATIONS).unwrap();
|
let crypto = Crypto::with_secret(keypair.secret(), &passwd, 10240).unwrap();
|
||||||
let secret = crypto.secret(&passwd).unwrap();
|
let secret = crypto.secret(&passwd).unwrap();
|
||||||
assert_eq!(keypair.secret(), &secret);
|
assert_eq!(keypair.secret(), &secret);
|
||||||
}
|
}
|
||||||
@ -178,7 +173,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn crypto_with_secret_invalid_password() {
|
fn crypto_with_secret_invalid_password() {
|
||||||
let keypair = Random.generate().unwrap();
|
let keypair = Random.generate().unwrap();
|
||||||
let crypto = Crypto::with_secret(keypair.secret(), &"this is sparta".into(), *ITERATIONS).unwrap();
|
let crypto = Crypto::with_secret(keypair.secret(), &"this is sparta".into(), 10240).unwrap();
|
||||||
assert_matches!(crypto.secret(&"this is sparta!".into()), Err(Error::InvalidPassword))
|
assert_matches!(crypto.secret(&"this is sparta!".into()), Err(Error::InvalidPassword))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -186,7 +181,7 @@ mod tests {
|
|||||||
fn crypto_with_null_plain_data() {
|
fn crypto_with_null_plain_data() {
|
||||||
let original_data = b"";
|
let original_data = b"";
|
||||||
let passwd = "this is sparta".into();
|
let passwd = "this is sparta".into();
|
||||||
let crypto = Crypto::with_plain(&original_data[..], &passwd, *ITERATIONS).unwrap();
|
let crypto = Crypto::with_plain(&original_data[..], &passwd, 10240).unwrap();
|
||||||
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
||||||
assert_eq!(original_data[..], *decrypted_data);
|
assert_eq!(original_data[..], *decrypted_data);
|
||||||
}
|
}
|
||||||
@ -195,7 +190,7 @@ mod tests {
|
|||||||
fn crypto_with_tiny_plain_data() {
|
fn crypto_with_tiny_plain_data() {
|
||||||
let original_data = b"{}";
|
let original_data = b"{}";
|
||||||
let passwd = "this is sparta".into();
|
let passwd = "this is sparta".into();
|
||||||
let crypto = Crypto::with_plain(&original_data[..], &passwd, *ITERATIONS).unwrap();
|
let crypto = Crypto::with_plain(&original_data[..], &passwd, 10240).unwrap();
|
||||||
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
||||||
assert_eq!(original_data[..], *decrypted_data);
|
assert_eq!(original_data[..], *decrypted_data);
|
||||||
}
|
}
|
||||||
@ -204,7 +199,7 @@ mod tests {
|
|||||||
fn crypto_with_huge_plain_data() {
|
fn crypto_with_huge_plain_data() {
|
||||||
let original_data: Vec<_> = (1..65536).map(|i| (i % 256) as u8).collect();
|
let original_data: Vec<_> = (1..65536).map(|i| (i % 256) as u8).collect();
|
||||||
let passwd = "this is sparta".into();
|
let passwd = "this is sparta".into();
|
||||||
let crypto = Crypto::with_plain(&original_data, &passwd, *ITERATIONS).unwrap();
|
let crypto = Crypto::with_plain(&original_data, &passwd, 10240).unwrap();
|
||||||
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
let decrypted_data = crypto.decrypt(&passwd).unwrap();
|
||||||
assert_eq!(&original_data, &decrypted_data);
|
assert_eq!(&original_data, &decrypted_data);
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use json;
|
use json;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum Prf {
|
pub enum Prf {
|
||||||
@ -24,7 +23,7 @@ pub enum Prf {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct Pbkdf2 {
|
pub struct Pbkdf2 {
|
||||||
pub c: NonZeroU32,
|
pub c: u32,
|
||||||
pub dklen: u32,
|
pub dklen: u32,
|
||||||
pub prf: Prf,
|
pub prf: Prf,
|
||||||
pub salt: Vec<u8>,
|
pub salt: Vec<u8>,
|
||||||
|
@ -20,7 +20,6 @@ use {json, Error};
|
|||||||
use account::Version;
|
use account::Version;
|
||||||
use crypto;
|
use crypto;
|
||||||
use super::crypto::Crypto;
|
use super::crypto::Crypto;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
|
|
||||||
/// Account representation.
|
/// Account representation.
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
@ -60,7 +59,7 @@ impl SafeAccount {
|
|||||||
keypair: &KeyPair,
|
keypair: &KeyPair,
|
||||||
id: [u8; 16],
|
id: [u8; 16],
|
||||||
password: &Password,
|
password: &Password,
|
||||||
iterations: NonZeroU32,
|
iterations: u32,
|
||||||
name: String,
|
name: String,
|
||||||
meta: String
|
meta: String
|
||||||
) -> Result<Self, crypto::Error> {
|
) -> Result<Self, crypto::Error> {
|
||||||
@ -136,7 +135,7 @@ impl SafeAccount {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new `VaultKeyFile` from the given `self`
|
/// Create a new `VaultKeyFile` from the given `self`
|
||||||
pub fn into_vault_file(self, iterations: NonZeroU32, password: &Password) -> Result<json::VaultKeyFile, Error> {
|
pub fn into_vault_file(self, iterations: u32, password: &Password) -> Result<json::VaultKeyFile, Error> {
|
||||||
let meta_plain = json::VaultKeyMeta {
|
let meta_plain = json::VaultKeyMeta {
|
||||||
address: self.address.into(),
|
address: self.address.into(),
|
||||||
name: Some(self.name),
|
name: Some(self.name),
|
||||||
@ -178,7 +177,7 @@ impl SafeAccount {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Change account's password.
|
/// Change account's password.
|
||||||
pub fn change_password(&self, old_password: &Password, new_password: &Password, iterations: NonZeroU32) -> Result<Self, Error> {
|
pub fn change_password(&self, old_password: &Password, new_password: &Password, iterations: u32) -> Result<Self, Error> {
|
||||||
let secret = self.crypto.secret(old_password)?;
|
let secret = self.crypto.secret(old_password)?;
|
||||||
let result = SafeAccount {
|
let result = SafeAccount {
|
||||||
id: self.id.clone(),
|
id: self.id.clone(),
|
||||||
@ -201,19 +200,14 @@ impl SafeAccount {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ethkey::{Generator, Random, verify_public, Message};
|
use ethkey::{Generator, Random, verify_public, Message};
|
||||||
use super::{SafeAccount, NonZeroU32};
|
use super::SafeAccount;
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(10240).expect("10240 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn sign_and_verify_public() {
|
fn sign_and_verify_public() {
|
||||||
let keypair = Random.generate().unwrap();
|
let keypair = Random.generate().unwrap();
|
||||||
let password = "hello world".into();
|
let password = "hello world".into();
|
||||||
let message = Message::default();
|
let message = Message::default();
|
||||||
let account = SafeAccount::create(&keypair, [0u8; 16], &password, *ITERATIONS, "Test".to_owned(), "{}".to_owned());
|
let account = SafeAccount::create(&keypair, [0u8; 16], &password, 10240, "Test".to_owned(), "{}".to_owned());
|
||||||
let signature = account.unwrap().sign(&password, &message).unwrap();
|
let signature = account.unwrap().sign(&password, &message).unwrap();
|
||||||
assert!(verify_public(keypair.public(), &signature, &message).unwrap());
|
assert!(verify_public(keypair.public(), &signature, &message).unwrap());
|
||||||
}
|
}
|
||||||
@ -223,9 +217,10 @@ mod tests {
|
|||||||
let keypair = Random.generate().unwrap();
|
let keypair = Random.generate().unwrap();
|
||||||
let first_password = "hello world".into();
|
let first_password = "hello world".into();
|
||||||
let sec_password = "this is sparta".into();
|
let sec_password = "this is sparta".into();
|
||||||
|
let i = 10240;
|
||||||
let message = Message::default();
|
let message = Message::default();
|
||||||
let account = SafeAccount::create(&keypair, [0u8; 16], &first_password, *ITERATIONS, "Test".to_owned(), "{}".to_owned()).unwrap();
|
let account = SafeAccount::create(&keypair, [0u8; 16], &first_password, i, "Test".to_owned(), "{}".to_owned()).unwrap();
|
||||||
let new_account = account.change_password(&first_password, &sec_password, *ITERATIONS).unwrap();
|
let new_account = account.change_password(&first_password, &sec_password, i).unwrap();
|
||||||
assert!(account.sign(&first_password, &message).is_ok());
|
assert!(account.sign(&first_password, &message).is_ok());
|
||||||
assert!(account.sign(&sec_password, &message).is_err());
|
assert!(account.sign(&sec_password, &message).is_err());
|
||||||
assert!(new_account.sign(&first_password, &message).is_err());
|
assert!(new_account.sign(&first_password, &message).is_err());
|
||||||
|
@ -61,7 +61,6 @@ pub fn find_unique_filename_using_random_suffix(parent_path: &Path, original_fil
|
|||||||
/// Create a new file and restrict permissions to owner only. It errors if the file already exists.
|
/// Create a new file and restrict permissions to owner only. It errors if the file already exists.
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn create_new_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
pub fn create_new_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
||||||
use libc;
|
|
||||||
use std::os::unix::fs::OpenOptionsExt;
|
use std::os::unix::fs::OpenOptionsExt;
|
||||||
|
|
||||||
fs::OpenOptions::new()
|
fs::OpenOptions::new()
|
||||||
@ -83,7 +82,6 @@ pub fn create_new_file_with_permissions_to_owner(file_path: &Path) -> io::Result
|
|||||||
/// Create a new file and restrict permissions to owner only. It replaces the existing file if it already exists.
|
/// Create a new file and restrict permissions to owner only. It replaces the existing file if it already exists.
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn replace_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
pub fn replace_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
||||||
use libc;
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
|
||||||
let file = fs::File::create(file_path)?;
|
let file = fs::File::create(file_path)?;
|
||||||
@ -356,16 +354,11 @@ mod test {
|
|||||||
extern crate tempdir;
|
extern crate tempdir;
|
||||||
|
|
||||||
use std::{env, fs};
|
use std::{env, fs};
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use super::{KeyDirectory, RootDiskDirectory, VaultKey};
|
use super::{KeyDirectory, RootDiskDirectory, VaultKey};
|
||||||
use account::SafeAccount;
|
use account::SafeAccount;
|
||||||
use ethkey::{Random, Generator};
|
use ethkey::{Random, Generator};
|
||||||
use self::tempdir::TempDir;
|
use self::tempdir::TempDir;
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(1024).expect("1024 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_create_new_account() {
|
fn should_create_new_account() {
|
||||||
// given
|
// given
|
||||||
@ -376,7 +369,7 @@ mod test {
|
|||||||
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let account = SafeAccount::create(&keypair, [0u8; 16], &password, *ITERATIONS, "Test".to_owned(), "{}".to_owned());
|
let account = SafeAccount::create(&keypair, [0u8; 16], &password, 1024, "Test".to_owned(), "{}".to_owned());
|
||||||
let res = directory.insert(account.unwrap());
|
let res = directory.insert(account.unwrap());
|
||||||
|
|
||||||
// then
|
// then
|
||||||
@ -397,7 +390,7 @@ mod test {
|
|||||||
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let account = SafeAccount::create(&keypair, [0u8; 16], &password, *ITERATIONS, "Test".to_owned(), "{}".to_owned()).unwrap();
|
let account = SafeAccount::create(&keypair, [0u8; 16], &password, 1024, "Test".to_owned(), "{}".to_owned()).unwrap();
|
||||||
let filename = "test".to_string();
|
let filename = "test".to_string();
|
||||||
let dedup = true;
|
let dedup = true;
|
||||||
|
|
||||||
@ -433,7 +426,7 @@ mod test {
|
|||||||
|
|
||||||
// and when
|
// and when
|
||||||
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
|
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
|
||||||
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(&password, *ITERATIONS));
|
let vault = directory.as_vault_provider().unwrap().create(vault_name, VaultKey::new(&password, 1024));
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assert!(vault.is_ok());
|
assert!(vault.is_ok());
|
||||||
@ -441,7 +434,7 @@ mod test {
|
|||||||
assert!(after_root_items_count > before_root_items_count);
|
assert!(after_root_items_count > before_root_items_count);
|
||||||
|
|
||||||
// and when
|
// and when
|
||||||
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(&password, *ITERATIONS));
|
let vault = directory.as_vault_provider().unwrap().open(vault_name, VaultKey::new(&password, 1024));
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assert!(vault.is_ok());
|
assert!(vault.is_ok());
|
||||||
@ -458,9 +451,8 @@ mod test {
|
|||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let directory = RootDiskDirectory::create(&temp_path).unwrap();
|
let directory = RootDiskDirectory::create(&temp_path).unwrap();
|
||||||
let vault_provider = directory.as_vault_provider().unwrap();
|
let vault_provider = directory.as_vault_provider().unwrap();
|
||||||
let iter = NonZeroU32::new(1).expect("1 > 0; qed");
|
vault_provider.create("vault1", VaultKey::new(&"password1".into(), 1)).unwrap();
|
||||||
vault_provider.create("vault1", VaultKey::new(&"password1".into(), iter)).unwrap();
|
vault_provider.create("vault2", VaultKey::new(&"password2".into(), 1)).unwrap();
|
||||||
vault_provider.create("vault2", VaultKey::new(&"password2".into(), iter)).unwrap();
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
let vaults = vault_provider.list_vaults().unwrap();
|
let vaults = vault_provider.list_vaults().unwrap();
|
||||||
@ -482,7 +474,7 @@ mod test {
|
|||||||
|
|
||||||
let keypair = Random.generate().unwrap();
|
let keypair = Random.generate().unwrap();
|
||||||
let password = "test pass".into();
|
let password = "test pass".into();
|
||||||
let account = SafeAccount::create(&keypair, [0u8; 16], &password, *ITERATIONS, "Test".to_owned(), "{}".to_owned());
|
let account = SafeAccount::create(&keypair, [0u8; 16], &password, 1024, "Test".to_owned(), "{}".to_owned());
|
||||||
directory.insert(account.unwrap()).expect("Account should be inserted ok");
|
directory.insert(account.unwrap()).expect("Account should be inserted ok");
|
||||||
|
|
||||||
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
|
let new_hash = directory.files_hash().expect("New files hash should be calculated ok");
|
||||||
|
@ -68,7 +68,7 @@ impl KeyDirectory for MemoryDirectory {
|
|||||||
fn unique_repr(&self) -> Result<u64, Error> {
|
fn unique_repr(&self) -> Result<u64, Error> {
|
||||||
let mut val = 0u64;
|
let mut val = 0u64;
|
||||||
let accounts = self.accounts.read();
|
let accounts = self.accounts.read();
|
||||||
for acc in accounts.keys() { val = val ^ acc.low_u64() }
|
for acc in accounts.keys() { val = val ^ acc.to_low_u64_be() }
|
||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
//! Accounts Directory
|
//! Accounts Directory
|
||||||
|
|
||||||
use ethkey::Password;
|
use ethkey::Password;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::path::{PathBuf};
|
use std::path::{PathBuf};
|
||||||
use {SafeAccount, Error};
|
use {SafeAccount, Error};
|
||||||
|
|
||||||
@ -42,7 +41,7 @@ pub struct VaultKey {
|
|||||||
/// Vault password
|
/// Vault password
|
||||||
pub password: Password,
|
pub password: Password,
|
||||||
/// Number of iterations to produce a derived key from password
|
/// Number of iterations to produce a derived key from password
|
||||||
pub iterations: NonZeroU32,
|
pub iterations: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Keys directory
|
/// Keys directory
|
||||||
@ -97,7 +96,7 @@ pub use self::vault::VaultDiskDirectory;
|
|||||||
|
|
||||||
impl VaultKey {
|
impl VaultKey {
|
||||||
/// Create new vault key
|
/// Create new vault key
|
||||||
pub fn new(password: &Password, iterations: NonZeroU32) -> Self {
|
pub fn new(password: &Password, iterations: u32) -> Self {
|
||||||
VaultKey {
|
VaultKey {
|
||||||
password: password.clone(),
|
password: password.clone(),
|
||||||
iterations: iterations,
|
iterations: iterations,
|
||||||
|
@ -282,17 +282,11 @@ mod test {
|
|||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use super::VaultKey;
|
use super::VaultKey;
|
||||||
use super::{VAULT_FILE_NAME, check_vault_name, make_vault_dir_path, create_vault_file, read_vault_file, VaultDiskDirectory};
|
use super::{VAULT_FILE_NAME, check_vault_name, make_vault_dir_path, create_vault_file, read_vault_file, VaultDiskDirectory};
|
||||||
use self::tempdir::TempDir;
|
use self::tempdir::TempDir;
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(1024).expect("1024 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_vault_name_succeeds() {
|
fn check_vault_name_succeeds() {
|
||||||
assert!(check_vault_name("vault"));
|
assert!(check_vault_name("vault"));
|
||||||
@ -331,7 +325,7 @@ mod test {
|
|||||||
fn create_vault_file_succeeds() {
|
fn create_vault_file_succeeds() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password".into(), 1024);
|
||||||
let mut vault_dir: PathBuf = temp_path.path().into();
|
let mut vault_dir: PathBuf = temp_path.path().into();
|
||||||
vault_dir.push("vault");
|
vault_dir.push("vault");
|
||||||
fs::create_dir_all(&vault_dir).unwrap();
|
fs::create_dir_all(&vault_dir).unwrap();
|
||||||
@ -350,7 +344,7 @@ mod test {
|
|||||||
fn read_vault_file_succeeds() {
|
fn read_vault_file_succeeds() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password".into(), 1024);
|
||||||
let vault_file_contents = r#"{"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"758696c8dc6378ab9b25bb42790da2f5"},"ciphertext":"54eb50683717d41caaeb12ea969f2c159daada5907383f26f327606a37dc7168","kdf":"pbkdf2","kdfparams":{"c":1024,"dklen":32,"prf":"hmac-sha256","salt":"3c320fa566a1a7963ac8df68a19548d27c8f40bf92ef87c84594dcd5bbc402b6"},"mac":"9e5c2314c2a0781962db85611417c614bd6756666b6b1e93840f5b6ed895f003"}}"#;
|
let vault_file_contents = r#"{"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"758696c8dc6378ab9b25bb42790da2f5"},"ciphertext":"54eb50683717d41caaeb12ea969f2c159daada5907383f26f327606a37dc7168","kdf":"pbkdf2","kdfparams":{"c":1024,"dklen":32,"prf":"hmac-sha256","salt":"3c320fa566a1a7963ac8df68a19548d27c8f40bf92ef87c84594dcd5bbc402b6"},"mac":"9e5c2314c2a0781962db85611417c614bd6756666b6b1e93840f5b6ed895f003"}}"#;
|
||||||
let dir: PathBuf = temp_path.path().into();
|
let dir: PathBuf = temp_path.path().into();
|
||||||
let mut vault_file_path: PathBuf = dir.clone();
|
let mut vault_file_path: PathBuf = dir.clone();
|
||||||
@ -371,7 +365,7 @@ mod test {
|
|||||||
fn read_vault_file_fails() {
|
fn read_vault_file_fails() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password1".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password1".into(), 1024);
|
||||||
let dir: PathBuf = temp_path.path().into();
|
let dir: PathBuf = temp_path.path().into();
|
||||||
let mut vault_file_path: PathBuf = dir.clone();
|
let mut vault_file_path: PathBuf = dir.clone();
|
||||||
vault_file_path.push(VAULT_FILE_NAME);
|
vault_file_path.push(VAULT_FILE_NAME);
|
||||||
@ -400,7 +394,7 @@ mod test {
|
|||||||
fn vault_directory_can_be_created() {
|
fn vault_directory_can_be_created() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password".into(), 1024);
|
||||||
let dir: PathBuf = temp_path.path().into();
|
let dir: PathBuf = temp_path.path().into();
|
||||||
|
|
||||||
// when
|
// when
|
||||||
@ -420,7 +414,7 @@ mod test {
|
|||||||
fn vault_directory_cannot_be_created_if_already_exists() {
|
fn vault_directory_cannot_be_created_if_already_exists() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password".into(), 1024);
|
||||||
let dir: PathBuf = temp_path.path().into();
|
let dir: PathBuf = temp_path.path().into();
|
||||||
let mut vault_dir = dir.clone();
|
let mut vault_dir = dir.clone();
|
||||||
vault_dir.push("vault");
|
vault_dir.push("vault");
|
||||||
@ -437,7 +431,7 @@ mod test {
|
|||||||
fn vault_directory_cannot_be_opened_if_not_exists() {
|
fn vault_directory_cannot_be_opened_if_not_exists() {
|
||||||
// given
|
// given
|
||||||
let temp_path = TempDir::new("").unwrap();
|
let temp_path = TempDir::new("").unwrap();
|
||||||
let key = VaultKey::new(&"password".into(), *ITERATIONS);
|
let key = VaultKey::new(&"password".into(), 1024);
|
||||||
let dir: PathBuf = temp_path.path().into();
|
let dir: PathBuf = temp_path.path().into();
|
||||||
|
|
||||||
// when
|
// when
|
||||||
|
@ -15,12 +15,12 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use std::time::{Instant, Duration};
|
use std::time::{Instant, Duration};
|
||||||
|
|
||||||
|
use crypto::KEY_ITERATIONS;
|
||||||
use random::Random;
|
use random::Random;
|
||||||
use ethkey::{self, Signature, Password, Address, Message, Secret, Public, KeyPair, ExtendedKeyPair};
|
use ethkey::{self, Signature, Password, Address, Message, Secret, Public, KeyPair, ExtendedKeyPair};
|
||||||
use accounts_dir::{KeyDirectory, VaultKeyDirectory, VaultKey, SetKeyError};
|
use accounts_dir::{KeyDirectory, VaultKeyDirectory, VaultKey, SetKeyError};
|
||||||
@ -29,12 +29,6 @@ use presale::PresaleWallet;
|
|||||||
use json::{self, Uuid, OpaqueKeyFile};
|
use json::{self, Uuid, OpaqueKeyFile};
|
||||||
use {import, Error, SimpleSecretStore, SecretStore, SecretVaultRef, StoreAccountRef, Derivation, OpaqueSecret};
|
use {import, Error, SimpleSecretStore, SecretStore, SecretVaultRef, StoreAccountRef, Derivation, OpaqueSecret};
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref KEY_ITERATIONS: NonZeroU32 =
|
|
||||||
NonZeroU32::new(crypto::KEY_ITERATIONS as u32).expect("KEY_ITERATIONS > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Accounts store.
|
/// Accounts store.
|
||||||
pub struct EthStore {
|
pub struct EthStore {
|
||||||
store: EthMultiStore,
|
store: EthMultiStore,
|
||||||
@ -43,11 +37,11 @@ pub struct EthStore {
|
|||||||
impl EthStore {
|
impl EthStore {
|
||||||
/// Open a new accounts store with given key directory backend.
|
/// Open a new accounts store with given key directory backend.
|
||||||
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
|
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
|
||||||
Self::open_with_iterations(directory, *KEY_ITERATIONS)
|
Self::open_with_iterations(directory, KEY_ITERATIONS as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open a new account store with given key directory backend and custom number of iterations.
|
/// Open a new account store with given key directory backend and custom number of iterations.
|
||||||
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: NonZeroU32) -> Result<Self, Error> {
|
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> {
|
||||||
Ok(EthStore {
|
Ok(EthStore {
|
||||||
store: EthMultiStore::open_with_iterations(directory, iterations)?,
|
store: EthMultiStore::open_with_iterations(directory, iterations)?,
|
||||||
})
|
})
|
||||||
@ -263,7 +257,7 @@ impl SecretStore for EthStore {
|
|||||||
/// Similar to `EthStore` but may store many accounts (with different passwords) for the same `Address`
|
/// Similar to `EthStore` but may store many accounts (with different passwords) for the same `Address`
|
||||||
pub struct EthMultiStore {
|
pub struct EthMultiStore {
|
||||||
dir: Box<KeyDirectory>,
|
dir: Box<KeyDirectory>,
|
||||||
iterations: NonZeroU32,
|
iterations: u32,
|
||||||
// order lock: cache, then vaults
|
// order lock: cache, then vaults
|
||||||
cache: RwLock<BTreeMap<StoreAccountRef, Vec<SafeAccount>>>,
|
cache: RwLock<BTreeMap<StoreAccountRef, Vec<SafeAccount>>>,
|
||||||
vaults: Mutex<HashMap<String, Box<VaultKeyDirectory>>>,
|
vaults: Mutex<HashMap<String, Box<VaultKeyDirectory>>>,
|
||||||
@ -279,11 +273,11 @@ struct Timestamp {
|
|||||||
impl EthMultiStore {
|
impl EthMultiStore {
|
||||||
/// Open new multi-accounts store with given key directory backend.
|
/// Open new multi-accounts store with given key directory backend.
|
||||||
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
|
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
|
||||||
Self::open_with_iterations(directory, *KEY_ITERATIONS)
|
Self::open_with_iterations(directory, KEY_ITERATIONS as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open new multi-accounts store with given key directory backend and custom number of iterations for new keys.
|
/// Open new multi-accounts store with given key directory backend and custom number of iterations for new keys.
|
||||||
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: NonZeroU32) -> Result<Self, Error> {
|
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> {
|
||||||
let store = EthMultiStore {
|
let store = EthMultiStore {
|
||||||
dir: directory,
|
dir: directory,
|
||||||
vaults: Mutex::new(HashMap::new()),
|
vaults: Mutex::new(HashMap::new()),
|
||||||
@ -1090,7 +1084,7 @@ mod tests {
|
|||||||
SecretVaultRef::Root,
|
SecretVaultRef::Root,
|
||||||
&address,
|
&address,
|
||||||
&"test".into(),
|
&"test".into(),
|
||||||
Derivation::HardHash(H256::from(0)),
|
Derivation::HardHash(H256::zero()),
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
// there should be 2 accounts in the store
|
// there should be 2 accounts in the store
|
||||||
|
@ -41,7 +41,7 @@ impl str::FromStr for Crypto {
|
|||||||
|
|
||||||
impl From<Crypto> for String {
|
impl From<Crypto> for String {
|
||||||
fn from(c: Crypto) -> Self {
|
fn from(c: Crypto) -> Self {
|
||||||
serde_json::to_string(&c).expect("serialization cannot fail, cause all crypto keys are strings")
|
serde_json::to_string(&c).expect("Serialization cannot fail, because all crypto keys are strings")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use serde::{Serialize, Serializer, Deserialize, Deserializer};
|
use serde::{Serialize, Serializer, Deserialize, Deserializer};
|
||||||
use serde::de::{Visitor, Error as SerdeError};
|
use serde::de::{Visitor, Error as SerdeError};
|
||||||
use super::{Error, Bytes};
|
use super::{Error, Bytes};
|
||||||
@ -109,7 +108,7 @@ impl<'a> Visitor<'a> for PrfVisitor {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct Pbkdf2 {
|
pub struct Pbkdf2 {
|
||||||
pub c: NonZeroU32,
|
pub c: u32,
|
||||||
pub dklen: u32,
|
pub dklen: u32,
|
||||||
pub prf: Prf,
|
pub prf: Prf,
|
||||||
pub salt: Bytes,
|
pub salt: Bytes,
|
||||||
|
@ -41,11 +41,6 @@ impl VaultFile {
|
|||||||
mod test {
|
mod test {
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use json::{VaultFile, Crypto, Cipher, Aes128Ctr, Kdf, Pbkdf2, Prf};
|
use json::{VaultFile, Crypto, Cipher, Aes128Ctr, Kdf, Pbkdf2, Prf};
|
||||||
use std::num::NonZeroU32;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(1024).expect("1024 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn to_and_from_json() {
|
fn to_and_from_json() {
|
||||||
@ -56,7 +51,7 @@ mod test {
|
|||||||
}),
|
}),
|
||||||
ciphertext: "4d6938a1f49b7782".into(),
|
ciphertext: "4d6938a1f49b7782".into(),
|
||||||
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
||||||
c: *ITERATIONS,
|
c: 1024,
|
||||||
dklen: 32,
|
dklen: 32,
|
||||||
prf: Prf::HmacSha256,
|
prf: Prf::HmacSha256,
|
||||||
salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(),
|
salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(),
|
||||||
@ -81,7 +76,7 @@ mod test {
|
|||||||
}),
|
}),
|
||||||
ciphertext: "4d6938a1f49b7782".into(),
|
ciphertext: "4d6938a1f49b7782".into(),
|
||||||
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
||||||
c: *ITERATIONS,
|
c: 1024,
|
||||||
dklen: 32,
|
dklen: 32,
|
||||||
prf: Prf::HmacSha256,
|
prf: Prf::HmacSha256,
|
||||||
salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(),
|
salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(),
|
||||||
|
@ -106,11 +106,6 @@ mod test {
|
|||||||
use serde_json;
|
use serde_json;
|
||||||
use json::{VaultKeyFile, Version, Crypto, Cipher, Aes128Ctr, Kdf, Pbkdf2, Prf,
|
use json::{VaultKeyFile, Version, Crypto, Cipher, Aes128Ctr, Kdf, Pbkdf2, Prf,
|
||||||
insert_vault_name_to_json_meta, remove_vault_name_from_json_meta};
|
insert_vault_name_to_json_meta, remove_vault_name_from_json_meta};
|
||||||
use std::num::NonZeroU32;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(10240).expect("10240 > 0; qed");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn to_and_from_json() {
|
fn to_and_from_json() {
|
||||||
@ -123,7 +118,7 @@ mod test {
|
|||||||
}),
|
}),
|
||||||
ciphertext: "4befe0a66d9a4b6fec8e39eb5c90ac5dafdeaab005fff1af665fd1f9af925c91".into(),
|
ciphertext: "4befe0a66d9a4b6fec8e39eb5c90ac5dafdeaab005fff1af665fd1f9af925c91".into(),
|
||||||
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
||||||
c: *ITERATIONS,
|
c: 10240,
|
||||||
dklen: 32,
|
dklen: 32,
|
||||||
prf: Prf::HmacSha256,
|
prf: Prf::HmacSha256,
|
||||||
salt: "f17731e84ecac390546692dbd4ccf6a3a2720dc9652984978381e61c28a471b2".into(),
|
salt: "f17731e84ecac390546692dbd4ccf6a3a2720dc9652984978381e61c28a471b2".into(),
|
||||||
@ -136,7 +131,7 @@ mod test {
|
|||||||
}),
|
}),
|
||||||
ciphertext: "fef0d113d7576c1702daf380ad6f4c5408389e57991cae2a174facd74bd549338e1014850bddbab7eb486ff5f5c9c5532800c6a6d4db2be2212cd5cd3769244ab230e1f369e8382a9e6d7c0a".into(),
|
ciphertext: "fef0d113d7576c1702daf380ad6f4c5408389e57991cae2a174facd74bd549338e1014850bddbab7eb486ff5f5c9c5532800c6a6d4db2be2212cd5cd3769244ab230e1f369e8382a9e6d7c0a".into(),
|
||||||
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
kdf: Kdf::Pbkdf2(Pbkdf2 {
|
||||||
c: *ITERATIONS,
|
c: 10240,
|
||||||
dklen: 32,
|
dklen: 32,
|
||||||
prf: Prf::HmacSha256,
|
prf: Prf::HmacSha256,
|
||||||
salt: "aca82865174a82249a198814b263f43a631f272cbf7ed329d0f0839d259c652a".into(),
|
salt: "aca82865174a82249a198814b263f43a631f272cbf7ed329d0f0839d259c652a".into(),
|
||||||
|
@ -36,8 +36,6 @@ extern crate ethereum_types;
|
|||||||
extern crate ethkey as _ethkey;
|
extern crate ethkey as _ethkey;
|
||||||
extern crate parity_wordlist;
|
extern crate parity_wordlist;
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use json;
|
use json;
|
||||||
use ethkey::{Address, Secret, KeyPair, Password};
|
use ethkey::{Address, Secret, KeyPair, Password};
|
||||||
@ -59,8 +58,7 @@ impl PresaleWallet {
|
|||||||
let mut derived_key = [0u8; 32];
|
let mut derived_key = [0u8; 32];
|
||||||
let salt = pbkdf2::Salt(password.as_bytes());
|
let salt = pbkdf2::Salt(password.as_bytes());
|
||||||
let sec = pbkdf2::Secret(password.as_bytes());
|
let sec = pbkdf2::Secret(password.as_bytes());
|
||||||
let iter = NonZeroU32::new(2000).expect("2000 > 0; qed");
|
pbkdf2::sha256(2000, salt, sec, &mut derived_key);
|
||||||
pbkdf2::sha256(iter, salt, sec, &mut derived_key);
|
|
||||||
|
|
||||||
let mut key = vec![0; self.ciphertext.len()];
|
let mut key = vec![0; self.ciphertext.len()];
|
||||||
let len = crypto::aes::decrypt_128_cbc(&derived_key[0..16], &self.iv, &self.ciphertext, &mut key)
|
let len = crypto::aes::decrypt_128_cbc(&derived_key[0..16], &self.iv, &self.ciphertext, &mut key)
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use rand::{Rng, OsRng};
|
use rand::{Rng, RngCore, rngs::OsRng, distributions::Alphanumeric};
|
||||||
|
|
||||||
pub trait Random {
|
pub trait Random {
|
||||||
fn random() -> Self where Self: Sized;
|
fn random() -> Self where Self: Sized;
|
||||||
@ -41,5 +41,5 @@ impl Random for [u8; 32] {
|
|||||||
/// Generate a random string of given length.
|
/// Generate a random string of given length.
|
||||||
pub fn random_string(length: usize) -> String {
|
pub fn random_string(length: usize) -> String {
|
||||||
let mut rng = OsRng::new().expect("Not able to operate without random source.");
|
let mut rng = OsRng::new().expect("Not able to operate without random source.");
|
||||||
rng.gen_ascii_chars().take(length).collect()
|
rng.sample_iter(&Alphanumeric).take(length).collect()
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
extern crate rand;
|
extern crate rand;
|
||||||
extern crate ethstore;
|
extern crate ethstore;
|
||||||
|
extern crate ethereum_types;
|
||||||
|
|
||||||
mod util;
|
mod util;
|
||||||
|
|
||||||
@ -23,6 +24,8 @@ use ethstore::{EthStore, SimpleSecretStore, SecretVaultRef, StoreAccountRef};
|
|||||||
use ethstore::ethkey::{Random, Generator, Secret, KeyPair, verify_address};
|
use ethstore::ethkey::{Random, Generator, Secret, KeyPair, verify_address};
|
||||||
use ethstore::accounts_dir::RootDiskDirectory;
|
use ethstore::accounts_dir::RootDiskDirectory;
|
||||||
use util::TransientDir;
|
use util::TransientDir;
|
||||||
|
use ethereum_types::Address;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn secret_store_create() {
|
fn secret_store_create() {
|
||||||
@ -114,9 +117,9 @@ fn secret_store_laod_geth_files() {
|
|||||||
let dir = RootDiskDirectory::at(test_path());
|
let dir = RootDiskDirectory::at(test_path());
|
||||||
let store = EthStore::open(Box::new(dir)).unwrap();
|
let store = EthStore::open(Box::new(dir)).unwrap();
|
||||||
assert_eq!(store.accounts().unwrap(), vec![
|
assert_eq!(store.accounts().unwrap(), vec![
|
||||||
StoreAccountRef::root("3f49624084b67849c7b4e805c5988c21a430f9d9".into()),
|
StoreAccountRef::root(Address::from_str("3f49624084b67849c7b4e805c5988c21a430f9d9").unwrap()),
|
||||||
StoreAccountRef::root("5ba4dcf897e97c2bdf8315b9ef26c13c085988cf".into()),
|
StoreAccountRef::root(Address::from_str("5ba4dcf897e97c2bdf8315b9ef26c13c085988cf").unwrap()),
|
||||||
StoreAccountRef::root("63121b431a52f8043c16fcf0d1df9cb7b5f66649".into()),
|
StoreAccountRef::root(Address::from_str("63121b431a52f8043c16fcf0d1df9cb7b5f66649").unwrap()),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,8 +128,8 @@ fn secret_store_load_pat_files() {
|
|||||||
let dir = RootDiskDirectory::at(pat_path());
|
let dir = RootDiskDirectory::at(pat_path());
|
||||||
let store = EthStore::open(Box::new(dir)).unwrap();
|
let store = EthStore::open(Box::new(dir)).unwrap();
|
||||||
assert_eq!(store.accounts().unwrap(), vec![
|
assert_eq!(store.accounts().unwrap(), vec![
|
||||||
StoreAccountRef::root("3f49624084b67849c7b4e805c5988c21a430f9d9".into()),
|
StoreAccountRef::root(Address::from_str("3f49624084b67849c7b4e805c5988c21a430f9d9").unwrap()),
|
||||||
StoreAccountRef::root("5ba4dcf897e97c2bdf8315b9ef26c13c085988cf".into()),
|
StoreAccountRef::root(Address::from_str("5ba4dcf897e97c2bdf8315b9ef26c13c085988cf").unwrap()),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,8 +143,8 @@ fn test_decrypting_files_with_short_ciphertext() {
|
|||||||
let store = EthStore::open(Box::new(dir)).unwrap();
|
let store = EthStore::open(Box::new(dir)).unwrap();
|
||||||
let accounts = store.accounts().unwrap();
|
let accounts = store.accounts().unwrap();
|
||||||
assert_eq!(accounts, vec![
|
assert_eq!(accounts, vec![
|
||||||
StoreAccountRef::root("31e9d1e6d844bd3a536800ef8d8be6a9975db509".into()),
|
StoreAccountRef::root(Address::from_str("31e9d1e6d844bd3a536800ef8d8be6a9975db509").unwrap()),
|
||||||
StoreAccountRef::root("d1e64e5480bfaf733ba7d48712decb8227797a4e".into()),
|
StoreAccountRef::root(Address::from_str("d1e64e5480bfaf733ba7d48712decb8227797a4e").unwrap()),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
let message = Default::default();
|
let message = Default::default();
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{env, fs};
|
use std::{env, fs};
|
||||||
use rand::{Rng, OsRng};
|
use rand::{RngCore, rngs::OsRng};
|
||||||
use ethstore::accounts_dir::{KeyDirectory, RootDiskDirectory};
|
use ethstore::accounts_dir::{KeyDirectory, RootDiskDirectory};
|
||||||
use ethstore::{Error, SafeAccount};
|
use ethstore::{Error, SafeAccount};
|
||||||
|
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
[package]
|
|
||||||
description = "Fake hardware-wallet, for OS' that don't support libusb"
|
|
||||||
name = "fake-hardware-wallet"
|
|
||||||
version = "0.0.1"
|
|
||||||
license = "GPL-3.0"
|
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ethereum-types = "0.4"
|
|
||||||
ethkey = { path = "../../accounts/ethkey" }
|
|
@ -1,101 +0,0 @@
|
|||||||
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
|
||||||
// This file is part of Parity Ethereum.
|
|
||||||
|
|
||||||
// Parity Ethereum is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
|
|
||||||
// Parity Ethereum is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! Dummy module for platforms that does not provide support for hardware wallets (libusb)
|
|
||||||
|
|
||||||
extern crate ethereum_types;
|
|
||||||
extern crate ethkey;
|
|
||||||
|
|
||||||
use std::fmt;
|
|
||||||
use ethereum_types::U256;
|
|
||||||
use ethkey::{Address, Signature};
|
|
||||||
|
|
||||||
pub struct WalletInfo {
|
|
||||||
pub address: Address,
|
|
||||||
pub name: String,
|
|
||||||
pub manufacturer: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
/// `ErrorType` for devices with no `hardware wallet`
|
|
||||||
pub enum Error {
|
|
||||||
NoWallet,
|
|
||||||
KeyNotFound,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TransactionInfo {
|
|
||||||
/// Nonce
|
|
||||||
pub nonce: U256,
|
|
||||||
/// Gas price
|
|
||||||
pub gas_price: U256,
|
|
||||||
/// Gas limit
|
|
||||||
pub gas_limit: U256,
|
|
||||||
/// Receiver
|
|
||||||
pub to: Option<Address>,
|
|
||||||
/// Value
|
|
||||||
pub value: U256,
|
|
||||||
/// Data
|
|
||||||
pub data: Vec<u8>,
|
|
||||||
/// Chain ID
|
|
||||||
pub chain_id: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum KeyPath {
|
|
||||||
/// Ethereum.
|
|
||||||
Ethereum,
|
|
||||||
/// Ethereum classic.
|
|
||||||
EthereumClassic,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `HardwareWalletManager` for devices with no `hardware wallet`
|
|
||||||
pub struct HardwareWalletManager;
|
|
||||||
|
|
||||||
impl HardwareWalletManager {
|
|
||||||
pub fn new() -> Result<Self, Error> {
|
|
||||||
Err(Error::NoWallet)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_key_path(&self, _key_path: KeyPath) {}
|
|
||||||
|
|
||||||
pub fn wallet_info(&self, _: &Address) -> Option<WalletInfo> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn list_wallets(&self) -> Vec<WalletInfo> {
|
|
||||||
Vec::with_capacity(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn list_locked_wallets(&self) -> Result<Vec<String>, Error> {
|
|
||||||
Err(Error::NoWallet)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pin_matrix_ack(&self, _: &str, _: &str) -> Result<bool, Error> {
|
|
||||||
Err(Error::NoWallet)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sign_transaction(&self, _address: &Address, _transaction: &TransactionInfo, _rlp_transaction: &[u8]) -> Result<Signature, Error> {
|
|
||||||
Err(Error::NoWallet) }
|
|
||||||
|
|
||||||
pub fn sign_message(&self, _address: &Address, _msg: &[u8]) -> Result<Signature, Error> {
|
|
||||||
Err(Error::NoWallet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "No hardware wallet!!")
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,21 +0,0 @@
|
|||||||
[package]
|
|
||||||
description = "Hardware wallet support."
|
|
||||||
homepage = "http://parity.io"
|
|
||||||
license = "GPL-3.0"
|
|
||||||
name = "hardware-wallet"
|
|
||||||
version = "1.12.0"
|
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
log = "0.4"
|
|
||||||
parking_lot = "0.7"
|
|
||||||
protobuf = "1.4"
|
|
||||||
hidapi = { git = "https://github.com/paritytech/hidapi-rs" }
|
|
||||||
libusb = { git = "https://github.com/paritytech/libusb-rs" }
|
|
||||||
trezor-sys = { git = "https://github.com/paritytech/trezor-sys" }
|
|
||||||
ethkey = { path = "../ethkey" }
|
|
||||||
ethereum-types = "0.4"
|
|
||||||
semver = "0.9"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
rustc-hex = "1.0"
|
|
File diff suppressed because one or more lines are too long
@ -1,402 +0,0 @@
|
|||||||
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
|
||||||
// This file is part of Parity Ethereum.
|
|
||||||
|
|
||||||
// Parity Ethereum is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
|
|
||||||
// Parity Ethereum is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! Hardware wallet management.
|
|
||||||
|
|
||||||
#![warn(missing_docs)]
|
|
||||||
#![warn(warnings)]
|
|
||||||
|
|
||||||
extern crate ethereum_types;
|
|
||||||
extern crate ethkey;
|
|
||||||
extern crate hidapi;
|
|
||||||
extern crate libusb;
|
|
||||||
extern crate parking_lot;
|
|
||||||
extern crate protobuf;
|
|
||||||
extern crate semver;
|
|
||||||
extern crate trezor_sys;
|
|
||||||
|
|
||||||
#[macro_use] extern crate log;
|
|
||||||
#[cfg(test)] extern crate rustc_hex;
|
|
||||||
|
|
||||||
mod ledger;
|
|
||||||
mod trezor;
|
|
||||||
|
|
||||||
use std::sync::{Arc, atomic, atomic::AtomicBool, Weak};
|
|
||||||
use std::{fmt, time::Duration};
|
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
use ethereum_types::U256;
|
|
||||||
use ethkey::{Address, Signature};
|
|
||||||
use parking_lot::Mutex;
|
|
||||||
|
|
||||||
const HID_GLOBAL_USAGE_PAGE: u16 = 0xFF00;
|
|
||||||
const HID_USB_DEVICE_CLASS: u8 = 0;
|
|
||||||
const MAX_POLLING_DURATION: Duration = Duration::from_millis(500);
|
|
||||||
const USB_EVENT_POLLING_INTERVAL: Duration = Duration::from_millis(500);
|
|
||||||
|
|
||||||
/// `HardwareWallet` device
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Device {
|
|
||||||
path: String,
|
|
||||||
info: WalletInfo,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `Wallet` trait
|
|
||||||
pub trait Wallet<'a> {
|
|
||||||
/// Error
|
|
||||||
type Error;
|
|
||||||
/// Transaction data format
|
|
||||||
type Transaction;
|
|
||||||
|
|
||||||
/// Sign transaction data with wallet managing `address`.
|
|
||||||
fn sign_transaction(&self, address: &Address, transaction: Self::Transaction) -> Result<Signature, Self::Error>;
|
|
||||||
|
|
||||||
/// Set key derivation path for a chain.
|
|
||||||
fn set_key_path(&self, key_path: KeyPath);
|
|
||||||
|
|
||||||
/// Re-populate device list
|
|
||||||
/// Note, this assumes all devices are iterated over and updated
|
|
||||||
fn update_devices(&self, device_direction: DeviceDirection) -> Result<usize, Self::Error>;
|
|
||||||
|
|
||||||
/// Read device info
|
|
||||||
fn read_device(&self, usb: &hidapi::HidApi, dev_info: &hidapi::HidDeviceInfo) -> Result<Device, Self::Error>;
|
|
||||||
|
|
||||||
/// List connected and acknowledged wallets
|
|
||||||
fn list_devices(&self) -> Vec<WalletInfo>;
|
|
||||||
|
|
||||||
/// List locked wallets
|
|
||||||
/// This may be moved if it is the wrong assumption, for example this is not supported by Ledger
|
|
||||||
/// Then this method return a empty vector
|
|
||||||
fn list_locked_devices(&self) -> Vec<String>;
|
|
||||||
|
|
||||||
/// Get wallet info.
|
|
||||||
fn get_wallet(&self, address: &Address) -> Option<WalletInfo>;
|
|
||||||
|
|
||||||
/// Generate ethereum address for a Wallet
|
|
||||||
fn get_address(&self, device: &hidapi::HidDevice) -> Result<Option<Address>, Self::Error>;
|
|
||||||
|
|
||||||
/// Open a device using `device path`
|
|
||||||
/// Note, f - is a closure that borrows HidResult<HidDevice>
|
|
||||||
/// HidDevice is in turn a type alias for a `c_void function pointer`
|
|
||||||
/// For further information see:
|
|
||||||
/// * <https://github.com/paritytech/hidapi-rs>
|
|
||||||
/// * <https://github.com/rust-lang/libc>
|
|
||||||
fn open_path<R, F>(&self, f: F) -> Result<R, Self::Error>
|
|
||||||
where F: Fn() -> Result<R, &'static str>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hardware wallet error.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
/// Ledger device error.
|
|
||||||
LedgerDevice(ledger::Error),
|
|
||||||
/// Trezor device error
|
|
||||||
TrezorDevice(trezor::Error),
|
|
||||||
/// USB error.
|
|
||||||
Usb(libusb::Error),
|
|
||||||
/// HID error
|
|
||||||
Hid(String),
|
|
||||||
/// Hardware wallet not found for specified key.
|
|
||||||
KeyNotFound,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This is the transaction info we need to supply to Trezor message. It's more
|
|
||||||
/// or less a duplicate of `ethcore::transaction::Transaction`, but we can't
|
|
||||||
/// import ethcore here as that would be a circular dependency.
|
|
||||||
pub struct TransactionInfo {
|
|
||||||
/// Nonce
|
|
||||||
pub nonce: U256,
|
|
||||||
/// Gas price
|
|
||||||
pub gas_price: U256,
|
|
||||||
/// Gas limit
|
|
||||||
pub gas_limit: U256,
|
|
||||||
/// Receiver
|
|
||||||
pub to: Option<Address>,
|
|
||||||
/// Value
|
|
||||||
pub value: U256,
|
|
||||||
/// Data
|
|
||||||
pub data: Vec<u8>,
|
|
||||||
/// Chain ID
|
|
||||||
pub chain_id: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hardware wallet information.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct WalletInfo {
|
|
||||||
/// Wallet device name.
|
|
||||||
pub name: String,
|
|
||||||
/// Wallet device manufacturer.
|
|
||||||
pub manufacturer: String,
|
|
||||||
/// Wallet device serial number.
|
|
||||||
pub serial: String,
|
|
||||||
/// Ethereum address.
|
|
||||||
pub address: Address,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Key derivation paths used on hardware wallets.
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
|
||||||
pub enum KeyPath {
|
|
||||||
/// Ethereum.
|
|
||||||
Ethereum,
|
|
||||||
/// Ethereum classic.
|
|
||||||
EthereumClassic,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
|
||||||
match *self {
|
|
||||||
Error::KeyNotFound => write!(f, "Key not found for given address."),
|
|
||||||
Error::LedgerDevice(ref e) => write!(f, "{}", e),
|
|
||||||
Error::TrezorDevice(ref e) => write!(f, "{}", e),
|
|
||||||
Error::Usb(ref e) => write!(f, "{}", e),
|
|
||||||
Error::Hid(ref e) => write!(f, "{}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ledger::Error> for Error {
|
|
||||||
fn from(err: ledger::Error) -> Self {
|
|
||||||
match err {
|
|
||||||
ledger::Error::KeyNotFound => Error::KeyNotFound,
|
|
||||||
_ => Error::LedgerDevice(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<trezor::Error> for Error {
|
|
||||||
fn from(err: trezor::Error) -> Self {
|
|
||||||
match err {
|
|
||||||
trezor::Error::KeyNotFound => Error::KeyNotFound,
|
|
||||||
_ => Error::TrezorDevice(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<libusb::Error> for Error {
|
|
||||||
fn from(err: libusb::Error) -> Self {
|
|
||||||
Error::Usb(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Specifies the direction of the `HardwareWallet` i.e, whether it arrived or left
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub enum DeviceDirection {
|
|
||||||
/// Device arrived
|
|
||||||
Arrived,
|
|
||||||
/// Device left
|
|
||||||
Left,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for DeviceDirection {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
DeviceDirection::Arrived => write!(f, "arrived"),
|
|
||||||
DeviceDirection::Left => write!(f, "left"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hardware wallet management interface.
|
|
||||||
pub struct HardwareWalletManager {
|
|
||||||
exiting: Arc<AtomicBool>,
|
|
||||||
ledger: Arc<ledger::Manager>,
|
|
||||||
trezor: Arc<trezor::Manager>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HardwareWalletManager {
|
|
||||||
/// Hardware wallet constructor
|
|
||||||
pub fn new() -> Result<Self, Error> {
|
|
||||||
let exiting = Arc::new(AtomicBool::new(false));
|
|
||||||
let hidapi = Arc::new(Mutex::new(hidapi::HidApi::new().map_err(|e| Error::Hid(e.to_string().clone()))?));
|
|
||||||
let ledger = ledger::Manager::new(hidapi.clone());
|
|
||||||
let trezor = trezor::Manager::new(hidapi.clone());
|
|
||||||
let usb_context = Arc::new(libusb::Context::new()?);
|
|
||||||
|
|
||||||
let l = ledger.clone();
|
|
||||||
let t = trezor.clone();
|
|
||||||
let exit = exiting.clone();
|
|
||||||
|
|
||||||
// Subscribe to all vendor IDs (VIDs) and product IDs (PIDs)
|
|
||||||
// This means that the `HardwareWalletManager` is responsible to validate the detected device
|
|
||||||
usb_context.register_callback(
|
|
||||||
None, None, Some(HID_USB_DEVICE_CLASS),
|
|
||||||
Box::new(EventHandler::new(
|
|
||||||
Arc::downgrade(&ledger),
|
|
||||||
Arc::downgrade(&trezor)
|
|
||||||
))
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Hardware event subscriber thread
|
|
||||||
thread::Builder::new()
|
|
||||||
.name("hw_wallet_manager".to_string())
|
|
||||||
.spawn(move || {
|
|
||||||
if let Err(e) = l.update_devices(DeviceDirection::Arrived) {
|
|
||||||
debug!(target: "hw", "Ledger couldn't connect at startup, error: {}", e);
|
|
||||||
}
|
|
||||||
if let Err(e) = t.update_devices(DeviceDirection::Arrived) {
|
|
||||||
debug!(target: "hw", "Trezor couldn't connect at startup, error: {}", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
while !exit.load(atomic::Ordering::Acquire) {
|
|
||||||
if let Err(e) = usb_context.handle_events(Some(USB_EVENT_POLLING_INTERVAL)) {
|
|
||||||
debug!(target: "hw", "HardwareWalletManager event handler error: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.ok();
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
exiting,
|
|
||||||
trezor,
|
|
||||||
ledger,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select key derivation path for a chain.
|
|
||||||
/// Currently, only one hard-coded keypath is supported
|
|
||||||
/// It is managed by `ethcore/account_provider`
|
|
||||||
pub fn set_key_path(&self, key_path: KeyPath) {
|
|
||||||
self.ledger.set_key_path(key_path);
|
|
||||||
self.trezor.set_key_path(key_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List connected wallets. This only returns wallets that are ready to be used.
|
|
||||||
pub fn list_wallets(&self) -> Vec<WalletInfo> {
|
|
||||||
let mut wallets = Vec::new();
|
|
||||||
wallets.extend(self.ledger.list_devices());
|
|
||||||
wallets.extend(self.trezor.list_devices());
|
|
||||||
wallets
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a list of paths to locked hardware wallets
|
|
||||||
/// This is only applicable to Trezor because Ledger only appears as
|
|
||||||
/// a device when it is unlocked
|
|
||||||
pub fn list_locked_wallets(&self) -> Result<Vec<String>, Error> {
|
|
||||||
Ok(self.trezor.list_locked_devices())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get connected wallet info.
|
|
||||||
pub fn wallet_info(&self, address: &Address) -> Option<WalletInfo> {
|
|
||||||
if let Some(info) = self.ledger.get_wallet(address) {
|
|
||||||
Some(info)
|
|
||||||
} else {
|
|
||||||
self.trezor.get_wallet(address)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign a message with the wallet (only supported by Ledger)
|
|
||||||
pub fn sign_message(&self, address: &Address, msg: &[u8]) -> Result<Signature, Error> {
|
|
||||||
if self.ledger.get_wallet(address).is_some() {
|
|
||||||
Ok(self.ledger.sign_message(address, msg)?)
|
|
||||||
} else if self.trezor.get_wallet(address).is_some() {
|
|
||||||
Err(Error::TrezorDevice(trezor::Error::NoSigningMessage))
|
|
||||||
} else {
|
|
||||||
Err(Error::KeyNotFound)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign transaction data with wallet managing `address`.
|
|
||||||
pub fn sign_transaction(&self, address: &Address, t_info: &TransactionInfo, encoded_transaction: &[u8]) -> Result<Signature, Error> {
|
|
||||||
if self.ledger.get_wallet(address).is_some() {
|
|
||||||
Ok(self.ledger.sign_transaction(address, encoded_transaction)?)
|
|
||||||
} else if self.trezor.get_wallet(address).is_some() {
|
|
||||||
Ok(self.trezor.sign_transaction(address, t_info)?)
|
|
||||||
} else {
|
|
||||||
Err(Error::KeyNotFound)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Send a pin to a device at a certain path to unlock it
|
|
||||||
/// This is only applicable to Trezor because Ledger only appears as
|
|
||||||
/// a device when it is unlocked
|
|
||||||
pub fn pin_matrix_ack(&self, path: &str, pin: &str) -> Result<bool, Error> {
|
|
||||||
self.trezor.pin_matrix_ack(path, pin).map_err(Error::TrezorDevice)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for HardwareWalletManager {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
// Indicate to the USB Hotplug handler that it
|
|
||||||
// shall terminate but don't wait for it to terminate.
|
|
||||||
// If it doesn't terminate for some reason USB Hotplug events will be handled
|
|
||||||
// even if the HardwareWalletManger has been dropped
|
|
||||||
self.exiting.store(true, atomic::Ordering::Release);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hardware wallet event handler
|
|
||||||
///
|
|
||||||
/// Note, that this runs to completion and race-conditions can't occur but it can
|
|
||||||
/// stop other events for being processed with an infinite loop or similar
|
|
||||||
struct EventHandler {
|
|
||||||
ledger: Weak<ledger::Manager>,
|
|
||||||
trezor: Weak<trezor::Manager>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventHandler {
|
|
||||||
/// Trezor event handler constructor
|
|
||||||
pub fn new(ledger: Weak<ledger::Manager>, trezor: Weak<trezor::Manager>) -> Self {
|
|
||||||
Self { ledger, trezor }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_device_info(device: &libusb::Device) -> Result<(u16, u16), Error> {
|
|
||||||
let desc = device.device_descriptor()?;
|
|
||||||
Ok((desc.vendor_id(), desc.product_id()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl libusb::Hotplug for EventHandler {
|
|
||||||
fn device_arrived(&mut self, device: libusb::Device) {
|
|
||||||
// Upgrade reference to an Arc
|
|
||||||
if let (Some(ledger), Some(trezor)) = (self.ledger.upgrade(), self.trezor.upgrade()) {
|
|
||||||
// Version ID and Product ID are available
|
|
||||||
if let Ok((vid, pid)) = Self::extract_device_info(&device) {
|
|
||||||
if trezor::is_valid_trezor(vid, pid) {
|
|
||||||
if !trezor::try_connect_polling(&trezor, &MAX_POLLING_DURATION, DeviceDirection::Arrived) {
|
|
||||||
trace!(target: "hw", "Trezor device was detected but connection failed");
|
|
||||||
}
|
|
||||||
} else if ledger::is_valid_ledger(vid, pid) {
|
|
||||||
if !ledger::try_connect_polling(&ledger, &MAX_POLLING_DURATION, DeviceDirection::Arrived) {
|
|
||||||
trace!(target: "hw", "Ledger device was detected but connection failed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn device_left(&mut self, device: libusb::Device) {
|
|
||||||
// Upgrade reference to an Arc
|
|
||||||
if let (Some(ledger), Some(trezor)) = (self.ledger.upgrade(), self.trezor.upgrade()) {
|
|
||||||
// Version ID and Product ID are available
|
|
||||||
if let Ok((vid, pid)) = Self::extract_device_info(&device) {
|
|
||||||
if trezor::is_valid_trezor(vid, pid) {
|
|
||||||
if !trezor::try_connect_polling(&trezor, &MAX_POLLING_DURATION, DeviceDirection::Left) {
|
|
||||||
trace!(target: "hw", "Trezor device was detected but disconnection failed");
|
|
||||||
}
|
|
||||||
} else if ledger::is_valid_ledger(vid, pid) {
|
|
||||||
if !ledger::try_connect_polling(&ledger, &MAX_POLLING_DURATION, DeviceDirection::Left) {
|
|
||||||
trace!(target: "hw", "Ledger device was detected but disconnection failed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper to determine if a device is a valid HID
|
|
||||||
pub fn is_valid_hid_device(usage_page: u16, interface_number: i32) -> bool {
|
|
||||||
usage_page == HID_GLOBAL_USAGE_PAGE || interface_number == HID_USB_DEVICE_CLASS as i32
|
|
||||||
}
|
|
@ -1,463 +0,0 @@
|
|||||||
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
|
||||||
// This file is part of Parity Ethereum.
|
|
||||||
|
|
||||||
// Parity Ethereum is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
|
|
||||||
// Parity Ethereum is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! Trezor hardware wallet module. Supports Trezor v1.
|
|
||||||
//! See <http://doc.satoshilabs.com/trezor-tech/api-protobuf.html>
|
|
||||||
//! and <https://github.com/trezor/trezor-common/blob/master/protob/protocol.md>
|
|
||||||
//! for protocol details.
|
|
||||||
|
|
||||||
use std::cmp::{min, max};
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
use ethereum_types::{U256, H256, Address};
|
|
||||||
use ethkey::Signature;
|
|
||||||
use hidapi;
|
|
||||||
use libusb;
|
|
||||||
use parking_lot::{Mutex, RwLock};
|
|
||||||
use protobuf::{self, Message, ProtobufEnum};
|
|
||||||
use super::{DeviceDirection, WalletInfo, TransactionInfo, KeyPath, Wallet, Device, is_valid_hid_device};
|
|
||||||
use trezor_sys::messages::{EthereumAddress, PinMatrixAck, MessageType, EthereumTxRequest, EthereumSignTx, EthereumGetAddress, EthereumTxAck, ButtonAck};
|
|
||||||
|
|
||||||
/// Trezor v1 vendor ID
|
|
||||||
const TREZOR_VID: u16 = 0x534c;
|
|
||||||
/// Trezor product IDs
|
|
||||||
const TREZOR_PIDS: [u16; 1] = [0x0001];
|
|
||||||
|
|
||||||
const ETH_DERIVATION_PATH: [u32; 5] = [0x8000_002C, 0x8000_003C, 0x8000_0000, 0, 0]; // m/44'/60'/0'/0/0
|
|
||||||
const ETC_DERIVATION_PATH: [u32; 5] = [0x8000_002C, 0x8000_003D, 0x8000_0000, 0, 0]; // m/44'/61'/0'/0/0
|
|
||||||
|
|
||||||
/// Hardware wallet error.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
/// Ethereum wallet protocol error.
|
|
||||||
Protocol(&'static str),
|
|
||||||
/// Hidapi error.
|
|
||||||
Usb(hidapi::HidError),
|
|
||||||
/// Libusb error
|
|
||||||
LibUsb(libusb::Error),
|
|
||||||
/// Device with request key is not available.
|
|
||||||
KeyNotFound,
|
|
||||||
/// Signing has been cancelled by user.
|
|
||||||
UserCancel,
|
|
||||||
/// The Message Type given in the trezor RPC call is not something we recognize
|
|
||||||
BadMessageType,
|
|
||||||
/// Trying to read from a closed device at the given path
|
|
||||||
LockedDevice(String),
|
|
||||||
/// Signing messages are not supported by Trezor
|
|
||||||
NoSigningMessage,
|
|
||||||
/// No device arrived
|
|
||||||
NoDeviceArrived,
|
|
||||||
/// No device left
|
|
||||||
NoDeviceLeft,
|
|
||||||
/// Invalid PID or VID
|
|
||||||
InvalidDevice,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
|
||||||
match *self {
|
|
||||||
Error::Protocol(ref s) => write!(f, "Trezor protocol error: {}", s),
|
|
||||||
Error::Usb(ref e) => write!(f, "USB communication error: {}", e),
|
|
||||||
Error::LibUsb(ref e) => write!(f, "LibUSB communication error: {}", e),
|
|
||||||
Error::KeyNotFound => write!(f, "Key not found"),
|
|
||||||
Error::UserCancel => write!(f, "Operation has been cancelled"),
|
|
||||||
Error::BadMessageType => write!(f, "Bad Message Type in RPC call"),
|
|
||||||
Error::LockedDevice(ref s) => write!(f, "Device is locked, needs PIN to perform operations: {}", s),
|
|
||||||
Error::NoSigningMessage=> write!(f, "Signing messages are not supported by Trezor"),
|
|
||||||
Error::NoDeviceArrived => write!(f, "No device arrived"),
|
|
||||||
Error::NoDeviceLeft => write!(f, "No device left"),
|
|
||||||
Error::InvalidDevice => write!(f, "Device with non-supported product ID or vendor ID was detected"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<hidapi::HidError> for Error {
|
|
||||||
fn from(err: hidapi::HidError) -> Self {
|
|
||||||
Error::Usb(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<libusb::Error> for Error {
|
|
||||||
fn from(err: libusb::Error) -> Self {
|
|
||||||
Error::LibUsb(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<protobuf::ProtobufError> for Error {
|
|
||||||
fn from(_: protobuf::ProtobufError) -> Self {
|
|
||||||
Error::Protocol(&"Could not read response from Trezor Device")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Trezor device manager
|
|
||||||
pub struct Manager {
|
|
||||||
usb: Arc<Mutex<hidapi::HidApi>>,
|
|
||||||
devices: RwLock<Vec<Device>>,
|
|
||||||
locked_devices: RwLock<Vec<String>>,
|
|
||||||
key_path: RwLock<KeyPath>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// HID Version used for the Trezor device
|
|
||||||
enum HidVersion {
|
|
||||||
V1,
|
|
||||||
V2,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Manager {
|
|
||||||
/// Create a new instance.
|
|
||||||
pub fn new(usb: Arc<Mutex<hidapi::HidApi>>) -> Arc<Self> {
|
|
||||||
Arc::new(Self {
|
|
||||||
usb,
|
|
||||||
devices: RwLock::new(Vec::new()),
|
|
||||||
locked_devices: RwLock::new(Vec::new()),
|
|
||||||
key_path: RwLock::new(KeyPath::Ethereum),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pin_matrix_ack(&self, device_path: &str, pin: &str) -> Result<bool, Error> {
|
|
||||||
let unlocked = {
|
|
||||||
let usb = self.usb.lock();
|
|
||||||
let device = self.open_path(|| usb.open_path(&device_path))?;
|
|
||||||
let t = MessageType::MessageType_PinMatrixAck;
|
|
||||||
let mut m = PinMatrixAck::new();
|
|
||||||
m.set_pin(pin.to_string());
|
|
||||||
self.send_device_message(&device, t, &m)?;
|
|
||||||
let (resp_type, _) = self.read_device_response(&device)?;
|
|
||||||
match resp_type {
|
|
||||||
// Getting an Address back means it's unlocked, this is undocumented behavior
|
|
||||||
MessageType::MessageType_EthereumAddress => Ok(true),
|
|
||||||
// Getting anything else means we didn't unlock it
|
|
||||||
_ => Ok(false),
|
|
||||||
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.update_devices(DeviceDirection::Arrived)?;
|
|
||||||
unlocked
|
|
||||||
}
|
|
||||||
|
|
||||||
fn u256_to_be_vec(&self, val: &U256) -> Vec<u8> {
|
|
||||||
let mut buf = [0_u8; 32];
|
|
||||||
val.to_big_endian(&mut buf);
|
|
||||||
buf.iter().skip_while(|x| **x == 0).cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn signing_loop(&self, handle: &hidapi::HidDevice, chain_id: &Option<u64>, data: &[u8]) -> Result<Signature, Error> {
|
|
||||||
let (resp_type, bytes) = self.read_device_response(&handle)?;
|
|
||||||
match resp_type {
|
|
||||||
MessageType::MessageType_Cancel => Err(Error::UserCancel),
|
|
||||||
MessageType::MessageType_ButtonRequest => {
|
|
||||||
self.send_device_message(handle, MessageType::MessageType_ButtonAck, &ButtonAck::new())?;
|
|
||||||
// Signing loop goes back to the top and reading blocks
|
|
||||||
// for up to 5 minutes waiting for response from the device
|
|
||||||
// if the user doesn't click any button within 5 minutes you
|
|
||||||
// get a signing error and the device sort of locks up on the signing screen
|
|
||||||
self.signing_loop(handle, chain_id, data)
|
|
||||||
}
|
|
||||||
MessageType::MessageType_EthereumTxRequest => {
|
|
||||||
let resp: EthereumTxRequest = protobuf::core::parse_from_bytes(&bytes)?;
|
|
||||||
if resp.has_data_length() {
|
|
||||||
let mut msg = EthereumTxAck::new();
|
|
||||||
let len = resp.get_data_length() as usize;
|
|
||||||
msg.set_data_chunk(data[..len].to_vec());
|
|
||||||
self.send_device_message(handle, MessageType::MessageType_EthereumTxAck, &msg)?;
|
|
||||||
self.signing_loop(handle, chain_id, &data[len..])
|
|
||||||
} else {
|
|
||||||
let v = resp.get_signature_v();
|
|
||||||
let r = H256::from_slice(resp.get_signature_r());
|
|
||||||
let s = H256::from_slice(resp.get_signature_s());
|
|
||||||
if let Some(c_id) = *chain_id {
|
|
||||||
// If there is a chain_id supplied, Trezor will return a v
|
|
||||||
// part of the signature that is already adjusted for EIP-155,
|
|
||||||
// so v' = v + 2 * chain_id + 35, but code further down the
|
|
||||||
// pipeline will already do this transformation, so remove it here
|
|
||||||
let adjustment = 35 + 2 * c_id as u32;
|
|
||||||
Ok(Signature::from_rsv(&r, &s, (max(v, adjustment) - adjustment) as u8))
|
|
||||||
} else {
|
|
||||||
// If there isn't a chain_id, v will be returned as v + 27
|
|
||||||
let adjusted_v = if v < 27 { v } else { v - 27 };
|
|
||||||
Ok(Signature::from_rsv(&r, &s, adjusted_v as u8))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MessageType::MessageType_Failure => Err(Error::Protocol("Last message sent to Trezor failed")),
|
|
||||||
_ => Err(Error::Protocol("Unexpected response from Trezor device.")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn send_device_message(&self, device: &hidapi::HidDevice, msg_type: MessageType, msg: &Message) -> Result<usize, Error> {
|
|
||||||
let msg_id = msg_type as u16;
|
|
||||||
let mut message = msg.write_to_bytes()?;
|
|
||||||
let msg_size = message.len();
|
|
||||||
let mut data = Vec::new();
|
|
||||||
let hid_version = self.probe_hid_version(device)?;
|
|
||||||
// Magic constants
|
|
||||||
data.push(b'#');
|
|
||||||
data.push(b'#');
|
|
||||||
// Convert msg_id to BE and split into bytes
|
|
||||||
data.push(((msg_id >> 8) & 0xFF) as u8);
|
|
||||||
data.push((msg_id & 0xFF) as u8);
|
|
||||||
// Convert msg_size to BE and split into bytes
|
|
||||||
data.push(((msg_size >> 24) & 0xFF) as u8);
|
|
||||||
data.push(((msg_size >> 16) & 0xFF) as u8);
|
|
||||||
data.push(((msg_size >> 8) & 0xFF) as u8);
|
|
||||||
data.push((msg_size & 0xFF) as u8);
|
|
||||||
data.append(&mut message);
|
|
||||||
while data.len() % 63 > 0 {
|
|
||||||
data.push(0);
|
|
||||||
}
|
|
||||||
let mut total_written = 0;
|
|
||||||
for chunk in data.chunks(63) {
|
|
||||||
let mut padded_chunk = match hid_version {
|
|
||||||
HidVersion::V1 => vec![b'?'],
|
|
||||||
HidVersion::V2 => vec![0, b'?'],
|
|
||||||
};
|
|
||||||
padded_chunk.extend_from_slice(&chunk);
|
|
||||||
total_written += device.write(&padded_chunk)?;
|
|
||||||
}
|
|
||||||
Ok(total_written)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn probe_hid_version(&self, device: &hidapi::HidDevice) -> Result<HidVersion, Error> {
|
|
||||||
let mut buf2 = [0xFF_u8; 65];
|
|
||||||
buf2[0] = 0;
|
|
||||||
buf2[1] = 63;
|
|
||||||
let mut buf1 = [0xFF_u8; 64];
|
|
||||||
buf1[0] = 63;
|
|
||||||
if device.write(&buf2)? == 65 {
|
|
||||||
Ok(HidVersion::V2)
|
|
||||||
} else if device.write(&buf1)? == 64 {
|
|
||||||
Ok(HidVersion::V1)
|
|
||||||
} else {
|
|
||||||
Err(Error::Usb("Unable to determine HID Version"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_device_response(&self, device: &hidapi::HidDevice) -> Result<(MessageType, Vec<u8>), Error> {
|
|
||||||
let protocol_err = Error::Protocol(&"Unexpected wire response from Trezor Device");
|
|
||||||
let mut buf = vec![0; 64];
|
|
||||||
|
|
||||||
let first_chunk = device.read_timeout(&mut buf, 300_000)?;
|
|
||||||
if first_chunk < 9 || buf[0] != b'?' || buf[1] != b'#' || buf[2] != b'#' {
|
|
||||||
return Err(protocol_err);
|
|
||||||
}
|
|
||||||
let msg_type = MessageType::from_i32(((buf[3] as i32 & 0xFF) << 8) + (buf[4] as i32 & 0xFF)).ok_or(protocol_err)?;
|
|
||||||
let msg_size = ((buf[5] as u32 & 0xFF) << 24) + ((buf[6] as u32 & 0xFF) << 16) + ((buf[7] as u32 & 0xFF) << 8) + (buf[8] as u32 & 0xFF);
|
|
||||||
let mut data = Vec::new();
|
|
||||||
data.extend_from_slice(&buf[9..]);
|
|
||||||
while data.len() < (msg_size as usize) {
|
|
||||||
device.read_timeout(&mut buf, 10_000)?;
|
|
||||||
data.extend_from_slice(&buf[1..]);
|
|
||||||
}
|
|
||||||
Ok((msg_type, data[..msg_size as usize].to_vec()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Wallet<'a> for Manager {
|
|
||||||
type Error = Error;
|
|
||||||
type Transaction = &'a TransactionInfo;
|
|
||||||
|
|
||||||
fn sign_transaction(&self, address: &Address, t_info: Self::Transaction) ->
|
|
||||||
Result<Signature, Error> {
|
|
||||||
let usb = self.usb.lock();
|
|
||||||
let devices = self.devices.read();
|
|
||||||
let device = devices.iter().find(|d| &d.info.address == address).ok_or(Error::KeyNotFound)?;
|
|
||||||
let handle = self.open_path(|| usb.open_path(&device.path))?;
|
|
||||||
let msg_type = MessageType::MessageType_EthereumSignTx;
|
|
||||||
let mut message = EthereumSignTx::new();
|
|
||||||
match *self.key_path.read() {
|
|
||||||
KeyPath::Ethereum => message.set_address_n(ETH_DERIVATION_PATH.to_vec()),
|
|
||||||
KeyPath::EthereumClassic => message.set_address_n(ETC_DERIVATION_PATH.to_vec()),
|
|
||||||
}
|
|
||||||
message.set_nonce(self.u256_to_be_vec(&t_info.nonce));
|
|
||||||
message.set_gas_limit(self.u256_to_be_vec(&t_info.gas_limit));
|
|
||||||
message.set_gas_price(self.u256_to_be_vec(&t_info.gas_price));
|
|
||||||
message.set_value(self.u256_to_be_vec(&t_info.value));
|
|
||||||
|
|
||||||
if let Some(addr) = t_info.to {
|
|
||||||
message.set_to(addr.to_vec())
|
|
||||||
}
|
|
||||||
let first_chunk_length = min(t_info.data.len(), 1024);
|
|
||||||
let chunk = &t_info.data[0..first_chunk_length];
|
|
||||||
message.set_data_initial_chunk(chunk.to_vec());
|
|
||||||
message.set_data_length(t_info.data.len() as u32);
|
|
||||||
if let Some(c_id) = t_info.chain_id {
|
|
||||||
message.set_chain_id(c_id as u32);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.send_device_message(&handle, msg_type, &message)?;
|
|
||||||
|
|
||||||
self.signing_loop(&handle, &t_info.chain_id, &t_info.data[first_chunk_length..])
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_key_path(&self, key_path: KeyPath) {
|
|
||||||
*self.key_path.write() = key_path;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_devices(&self, device_direction: DeviceDirection) -> Result<usize, Error> {
|
|
||||||
let mut usb = self.usb.lock();
|
|
||||||
usb.refresh_devices();
|
|
||||||
let devices = usb.devices();
|
|
||||||
let num_prev_devices = self.devices.read().len();
|
|
||||||
|
|
||||||
let detected_devices = devices.iter()
|
|
||||||
.filter(|&d| is_valid_trezor(d.vendor_id, d.product_id) &&
|
|
||||||
is_valid_hid_device(d.usage_page, d.interface_number)
|
|
||||||
)
|
|
||||||
.fold(Vec::new(), |mut v, d| {
|
|
||||||
match self.read_device(&usb, &d) {
|
|
||||||
Ok(info) => {
|
|
||||||
trace!(target: "hw", "Found device: {:?}", info);
|
|
||||||
v.push(info);
|
|
||||||
}
|
|
||||||
Err(e) => trace!(target: "hw", "Error reading device info: {}", e),
|
|
||||||
};
|
|
||||||
v
|
|
||||||
});
|
|
||||||
|
|
||||||
let num_curr_devices = detected_devices.len();
|
|
||||||
*self.devices.write() = detected_devices;
|
|
||||||
|
|
||||||
match device_direction {
|
|
||||||
DeviceDirection::Arrived => {
|
|
||||||
if num_curr_devices > num_prev_devices {
|
|
||||||
Ok(num_curr_devices - num_prev_devices)
|
|
||||||
} else {
|
|
||||||
Err(Error::NoDeviceArrived)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
DeviceDirection::Left => {
|
|
||||||
if num_prev_devices > num_curr_devices {
|
|
||||||
Ok(num_prev_devices - num_curr_devices)
|
|
||||||
} else {
|
|
||||||
Err(Error::NoDeviceLeft)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_device(&self, usb: &hidapi::HidApi, dev_info: &hidapi::HidDeviceInfo) -> Result<Device, Error> {
|
|
||||||
let handle = self.open_path(|| usb.open_path(&dev_info.path))?;
|
|
||||||
let manufacturer = dev_info.manufacturer_string.clone().unwrap_or_else(|| "Unknown".to_owned());
|
|
||||||
let name = dev_info.product_string.clone().unwrap_or_else(|| "Unknown".to_owned());
|
|
||||||
let serial = dev_info.serial_number.clone().unwrap_or_else(|| "Unknown".to_owned());
|
|
||||||
match self.get_address(&handle) {
|
|
||||||
Ok(Some(addr)) => {
|
|
||||||
Ok(Device {
|
|
||||||
path: dev_info.path.clone(),
|
|
||||||
info: WalletInfo {
|
|
||||||
name,
|
|
||||||
manufacturer,
|
|
||||||
serial,
|
|
||||||
address: addr,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Ok(None) => Err(Error::LockedDevice(dev_info.path.clone())),
|
|
||||||
Err(e) => Err(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn list_devices(&self) -> Vec<WalletInfo> {
|
|
||||||
self.devices.read().iter().map(|d| d.info.clone()).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn list_locked_devices(&self) -> Vec<String> {
|
|
||||||
(*self.locked_devices.read()).clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_wallet(&self, address: &Address) -> Option<WalletInfo> {
|
|
||||||
self.devices.read().iter().find(|d| &d.info.address == address).map(|d| d.info.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_address(&self, device: &hidapi::HidDevice) -> Result<Option<Address>, Error> {
|
|
||||||
let typ = MessageType::MessageType_EthereumGetAddress;
|
|
||||||
let mut message = EthereumGetAddress::new();
|
|
||||||
match *self.key_path.read() {
|
|
||||||
KeyPath::Ethereum => message.set_address_n(ETH_DERIVATION_PATH.to_vec()),
|
|
||||||
KeyPath::EthereumClassic => message.set_address_n(ETC_DERIVATION_PATH.to_vec()),
|
|
||||||
}
|
|
||||||
message.set_show_display(false);
|
|
||||||
self.send_device_message(&device, typ, &message)?;
|
|
||||||
|
|
||||||
let (resp_type, bytes) = self.read_device_response(&device)?;
|
|
||||||
match resp_type {
|
|
||||||
MessageType::MessageType_EthereumAddress => {
|
|
||||||
let response: EthereumAddress = protobuf::core::parse_from_bytes(&bytes)?;
|
|
||||||
Ok(Some(From::from(response.get_address())))
|
|
||||||
}
|
|
||||||
_ => Ok(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn open_path<R, F>(&self, f: F) -> Result<R, Error>
|
|
||||||
where F: Fn() -> Result<R, &'static str>
|
|
||||||
{
|
|
||||||
f().map_err(Into::into)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Poll the device in maximum `max_polling_duration` if it doesn't succeed
|
|
||||||
pub fn try_connect_polling(trezor: &Manager, duration: &Duration, dir: DeviceDirection) -> bool {
|
|
||||||
let start_time = Instant::now();
|
|
||||||
while start_time.elapsed() <= *duration {
|
|
||||||
if let Ok(num_devices) = trezor.update_devices(dir) {
|
|
||||||
trace!(target: "hw", "{} Trezor devices {}", num_devices, dir);
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if the detected device is a Trezor device by checking both the product ID and the vendor ID
|
|
||||||
pub fn is_valid_trezor(vid: u16, pid: u16) -> bool {
|
|
||||||
vid == TREZOR_VID && TREZOR_PIDS.contains(&pid)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore]
|
|
||||||
/// This test can't be run without an actual trezor device connected
|
|
||||||
/// (and unlocked) attached to the machine that's running the test
|
|
||||||
fn test_signature() {
|
|
||||||
use ethereum_types::Address;
|
|
||||||
use MAX_POLLING_DURATION;
|
|
||||||
use super::HardwareWalletManager;
|
|
||||||
|
|
||||||
let manager = HardwareWalletManager::new().unwrap();
|
|
||||||
|
|
||||||
assert_eq!(try_connect_polling(&manager.trezor, &MAX_POLLING_DURATION, DeviceDirection::Arrived), true);
|
|
||||||
|
|
||||||
let addr: Address = manager.list_wallets()
|
|
||||||
.iter()
|
|
||||||
.filter(|d| d.name == "TREZOR".to_string() && d.manufacturer == "SatoshiLabs".to_string())
|
|
||||||
.nth(0)
|
|
||||||
.map(|d| d.address)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let t_info = TransactionInfo {
|
|
||||||
nonce: U256::from(1),
|
|
||||||
gas_price: U256::from(100),
|
|
||||||
gas_limit: U256::from(21_000),
|
|
||||||
to: Some(Address::from(1337)),
|
|
||||||
chain_id: Some(1),
|
|
||||||
value: U256::from(1_000_000),
|
|
||||||
data: (&[1u8; 3000]).to_vec(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let signature = manager.trezor.sign_transaction(&addr, &t_info);
|
|
||||||
assert!(signature.is_ok());
|
|
||||||
}
|
|
@ -17,7 +17,6 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use ethstore::{Error as SSError};
|
use ethstore::{Error as SSError};
|
||||||
use hardware_wallet::{Error as HardwareError};
|
|
||||||
|
|
||||||
/// Signing error
|
/// Signing error
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -26,8 +25,6 @@ pub enum SignError {
|
|||||||
NotUnlocked,
|
NotUnlocked,
|
||||||
/// Account does not exist.
|
/// Account does not exist.
|
||||||
NotFound,
|
NotFound,
|
||||||
/// Low-level hardware device error.
|
|
||||||
Hardware(HardwareError),
|
|
||||||
/// Low-level error from store
|
/// Low-level error from store
|
||||||
SStore(SSError),
|
SStore(SSError),
|
||||||
}
|
}
|
||||||
@ -37,18 +34,11 @@ impl fmt::Display for SignError {
|
|||||||
match *self {
|
match *self {
|
||||||
SignError::NotUnlocked => write!(f, "Account is locked"),
|
SignError::NotUnlocked => write!(f, "Account is locked"),
|
||||||
SignError::NotFound => write!(f, "Account does not exist"),
|
SignError::NotFound => write!(f, "Account does not exist"),
|
||||||
SignError::Hardware(ref e) => write!(f, "{}", e),
|
|
||||||
SignError::SStore(ref e) => write!(f, "{}", e),
|
SignError::SStore(ref e) => write!(f, "{}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<HardwareError> for SignError {
|
|
||||||
fn from(e: HardwareError) -> Self {
|
|
||||||
SignError::Hardware(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SSError> for SignError {
|
impl From<SSError> for SignError {
|
||||||
fn from(e: SSError) -> Self {
|
fn from(e: SSError) -> Self {
|
||||||
SignError::SStore(e)
|
SignError::SStore(e)
|
||||||
|
@ -22,28 +22,23 @@ mod account_data;
|
|||||||
mod error;
|
mod error;
|
||||||
mod stores;
|
mod stores;
|
||||||
|
|
||||||
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
|
|
||||||
extern crate fake_hardware_wallet as hardware_wallet;
|
|
||||||
|
|
||||||
use self::account_data::{Unlock, AccountData};
|
use self::account_data::{Unlock, AccountData};
|
||||||
use self::stores::AddressBook;
|
use self::stores::AddressBook;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::time::{Instant, Duration};
|
use std::time::{Instant, Duration};
|
||||||
|
|
||||||
use common_types::transaction::{Action, Transaction};
|
|
||||||
use ethkey::{Address, Message, Public, Secret, Password, Random, Generator};
|
use ethkey::{Address, Message, Public, Secret, Password, Random, Generator};
|
||||||
use ethstore::accounts_dir::MemoryDirectory;
|
use ethstore::accounts_dir::MemoryDirectory;
|
||||||
use ethstore::{
|
use ethstore::{
|
||||||
SimpleSecretStore, SecretStore, EthStore, EthMultiStore,
|
SimpleSecretStore, SecretStore, EthStore, EthMultiStore,
|
||||||
random_string, SecretVaultRef, StoreAccountRef, OpaqueSecret,
|
random_string, SecretVaultRef, StoreAccountRef, OpaqueSecret,
|
||||||
};
|
};
|
||||||
use log::{warn, debug};
|
use log::warn;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
pub use ethkey::Signature;
|
pub use ethkey::Signature;
|
||||||
pub use ethstore::{Derivation, IndexDerivation, KeyFile, Error};
|
pub use ethstore::{Derivation, IndexDerivation, KeyFile, Error};
|
||||||
pub use hardware_wallet::{Error as HardwareError, HardwareWalletManager, KeyPath, TransactionInfo};
|
|
||||||
|
|
||||||
pub use self::account_data::AccountMeta;
|
pub use self::account_data::AccountMeta;
|
||||||
pub use self::error::SignError;
|
pub use self::error::SignError;
|
||||||
@ -53,10 +48,6 @@ type AccountToken = Password;
|
|||||||
/// Account management settings.
|
/// Account management settings.
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct AccountProviderSettings {
|
pub struct AccountProviderSettings {
|
||||||
/// Enable hardware wallet support.
|
|
||||||
pub enable_hardware_wallets: bool,
|
|
||||||
/// Use the classic chain key on the hardware wallet.
|
|
||||||
pub hardware_wallet_classic_key: bool,
|
|
||||||
/// Store raw account secret when unlocking the account permanently.
|
/// Store raw account secret when unlocking the account permanently.
|
||||||
pub unlock_keep_secret: bool,
|
pub unlock_keep_secret: bool,
|
||||||
/// Disallowed accounts.
|
/// Disallowed accounts.
|
||||||
@ -76,8 +67,6 @@ pub struct AccountProvider {
|
|||||||
sstore: Box<SecretStore>,
|
sstore: Box<SecretStore>,
|
||||||
/// Accounts unlocked with rolling tokens
|
/// Accounts unlocked with rolling tokens
|
||||||
transient_sstore: EthMultiStore,
|
transient_sstore: EthMultiStore,
|
||||||
/// Accounts in hardware wallets.
|
|
||||||
hardware_store: Option<HardwareWalletManager>,
|
|
||||||
/// When unlocking account permanently we additionally keep a raw secret in memory
|
/// When unlocking account permanently we additionally keep a raw secret in memory
|
||||||
/// to increase the performance of transaction signing.
|
/// to increase the performance of transaction signing.
|
||||||
unlock_keep_secret: bool,
|
unlock_keep_secret: bool,
|
||||||
@ -92,18 +81,6 @@ fn transient_sstore() -> EthMultiStore {
|
|||||||
impl AccountProvider {
|
impl AccountProvider {
|
||||||
/// Creates new account provider.
|
/// Creates new account provider.
|
||||||
pub fn new(sstore: Box<SecretStore>, settings: AccountProviderSettings) -> Self {
|
pub fn new(sstore: Box<SecretStore>, settings: AccountProviderSettings) -> Self {
|
||||||
let mut hardware_store = None;
|
|
||||||
|
|
||||||
if settings.enable_hardware_wallets {
|
|
||||||
match HardwareWalletManager::new() {
|
|
||||||
Ok(manager) => {
|
|
||||||
manager.set_key_path(if settings.hardware_wallet_classic_key { KeyPath::EthereumClassic } else { KeyPath::Ethereum });
|
|
||||||
hardware_store = Some(manager)
|
|
||||||
},
|
|
||||||
Err(e) => debug!("Error initializing hardware wallets: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(accounts) = sstore.accounts() {
|
if let Ok(accounts) = sstore.accounts() {
|
||||||
for account in accounts.into_iter().filter(|a| settings.blacklisted_accounts.contains(&a.address)) {
|
for account in accounts.into_iter().filter(|a| settings.blacklisted_accounts.contains(&a.address)) {
|
||||||
warn!("Local Account {} has a blacklisted (known to be weak) address and will be ignored",
|
warn!("Local Account {} has a blacklisted (known to be weak) address and will be ignored",
|
||||||
@ -121,9 +98,8 @@ impl AccountProvider {
|
|||||||
unlocked_secrets: RwLock::new(HashMap::new()),
|
unlocked_secrets: RwLock::new(HashMap::new()),
|
||||||
unlocked: RwLock::new(HashMap::new()),
|
unlocked: RwLock::new(HashMap::new()),
|
||||||
address_book: RwLock::new(address_book),
|
address_book: RwLock::new(address_book),
|
||||||
sstore: sstore,
|
sstore,
|
||||||
transient_sstore: transient_sstore(),
|
transient_sstore: transient_sstore(),
|
||||||
hardware_store: hardware_store,
|
|
||||||
unlock_keep_secret: settings.unlock_keep_secret,
|
unlock_keep_secret: settings.unlock_keep_secret,
|
||||||
blacklisted_accounts: settings.blacklisted_accounts,
|
blacklisted_accounts: settings.blacklisted_accounts,
|
||||||
}
|
}
|
||||||
@ -137,7 +113,6 @@ impl AccountProvider {
|
|||||||
address_book: RwLock::new(AddressBook::transient()),
|
address_book: RwLock::new(AddressBook::transient()),
|
||||||
sstore: Box::new(EthStore::open(Box::new(MemoryDirectory::default())).expect("MemoryDirectory load always succeeds; qed")),
|
sstore: Box::new(EthStore::open(Box::new(MemoryDirectory::default())).expect("MemoryDirectory load always succeeds; qed")),
|
||||||
transient_sstore: transient_sstore(),
|
transient_sstore: transient_sstore(),
|
||||||
hardware_store: None,
|
|
||||||
unlock_keep_secret: false,
|
unlock_keep_secret: false,
|
||||||
blacklisted_accounts: vec![],
|
blacklisted_accounts: vec![],
|
||||||
}
|
}
|
||||||
@ -219,34 +194,6 @@ impl AccountProvider {
|
|||||||
Ok(self.accounts()?.first().cloned().unwrap_or_default())
|
Ok(self.accounts()?.first().cloned().unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns addresses of hardware accounts.
|
|
||||||
pub fn hardware_accounts(&self) -> Result<Vec<Address>, Error> {
|
|
||||||
if let Some(accounts) = self.hardware_store.as_ref().map(|h| h.list_wallets()) {
|
|
||||||
if !accounts.is_empty() {
|
|
||||||
return Ok(accounts.into_iter().map(|a| a.address).collect());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(Error::Custom("No hardware wallet accounts were found".into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a list of paths to locked hardware wallets
|
|
||||||
pub fn locked_hardware_accounts(&self) -> Result<Vec<String>, SignError> {
|
|
||||||
match self.hardware_store.as_ref().map(|h| h.list_locked_wallets()) {
|
|
||||||
None => Err(SignError::NotFound),
|
|
||||||
Some(Err(e)) => Err(SignError::Hardware(e)),
|
|
||||||
Some(Ok(s)) => Ok(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Provide a pin to a locked hardware wallet on USB path to unlock it
|
|
||||||
pub fn hardware_pin_matrix_ack(&self, path: &str, pin: &str) -> Result<bool, SignError> {
|
|
||||||
match self.hardware_store.as_ref().map(|h| h.pin_matrix_ack(path, pin)) {
|
|
||||||
None => Err(SignError::NotFound),
|
|
||||||
Some(Err(e)) => Err(SignError::Hardware(e)),
|
|
||||||
Some(Ok(s)) => Ok(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns each address along with metadata.
|
/// Returns each address along with metadata.
|
||||||
pub fn addresses_info(&self) -> HashMap<Address, AccountMeta> {
|
pub fn addresses_info(&self) -> HashMap<Address, AccountMeta> {
|
||||||
self.address_book.read().get()
|
self.address_book.read().get()
|
||||||
@ -277,29 +224,8 @@ impl AccountProvider {
|
|||||||
Ok(r)
|
Ok(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns each hardware account along with name and meta.
|
|
||||||
pub fn hardware_accounts_info(&self) -> Result<HashMap<Address, AccountMeta>, Error> {
|
|
||||||
let r = self.hardware_accounts()?
|
|
||||||
.into_iter()
|
|
||||||
.map(|address| (address.clone(), self.account_meta(address).ok().unwrap_or_default()))
|
|
||||||
.collect();
|
|
||||||
Ok(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns each hardware account along with name and meta.
|
|
||||||
pub fn is_hardware_address(&self, address: &Address) -> bool {
|
|
||||||
self.hardware_store.as_ref().and_then(|s| s.wallet_info(address)).is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns each account along with name and meta.
|
/// Returns each account along with name and meta.
|
||||||
pub fn account_meta(&self, address: Address) -> Result<AccountMeta, Error> {
|
pub fn account_meta(&self, address: Address) -> Result<AccountMeta, Error> {
|
||||||
if let Some(info) = self.hardware_store.as_ref().and_then(|s| s.wallet_info(&address)) {
|
|
||||||
Ok(AccountMeta {
|
|
||||||
name: info.name,
|
|
||||||
meta: info.manufacturer,
|
|
||||||
uuid: None,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
let account = self.sstore.account_ref(&address)?;
|
let account = self.sstore.account_ref(&address)?;
|
||||||
Ok(AccountMeta {
|
Ok(AccountMeta {
|
||||||
name: self.sstore.name(&account)?,
|
name: self.sstore.name(&account)?,
|
||||||
@ -307,7 +233,6 @@ impl AccountProvider {
|
|||||||
uuid: self.sstore.uuid(&account).ok().map(Into::into), // allowed to not have a Uuid
|
uuid: self.sstore.uuid(&account).ok().map(Into::into), // allowed to not have a Uuid
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns account public key.
|
/// Returns account public key.
|
||||||
pub fn account_public(&self, address: Address, password: &Password) -> Result<Public, Error> {
|
pub fn account_public(&self, address: Address, password: &Password) -> Result<Public, Error> {
|
||||||
@ -370,10 +295,7 @@ impl AccountProvider {
|
|||||||
let _ = self.sstore.sign(&account, &password, &Default::default())?;
|
let _ = self.sstore.sign(&account, &password, &Default::default())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let data = AccountData {
|
let data = AccountData { unlock, password };
|
||||||
unlock: unlock,
|
|
||||||
password: password,
|
|
||||||
};
|
|
||||||
|
|
||||||
unlocked.insert(account, data);
|
unlocked.insert(account, data);
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -575,36 +497,6 @@ impl AccountProvider {
|
|||||||
self.sstore.set_vault_meta(name, meta)
|
self.sstore.set_vault_meta(name, meta)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sign message with hardware wallet.
|
|
||||||
pub fn sign_message_with_hardware(&self, address: &Address, message: &[u8]) -> Result<Signature, SignError> {
|
|
||||||
match self.hardware_store.as_ref().map(|s| s.sign_message(address, message)) {
|
|
||||||
None | Some(Err(HardwareError::KeyNotFound)) => Err(SignError::NotFound),
|
|
||||||
Some(Err(e)) => Err(From::from(e)),
|
|
||||||
Some(Ok(s)) => Ok(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign transaction with hardware wallet.
|
|
||||||
pub fn sign_transaction_with_hardware(&self, address: &Address, transaction: &Transaction, chain_id: Option<u64>, rlp_encoded_transaction: &[u8]) -> Result<Signature, SignError> {
|
|
||||||
let t_info = TransactionInfo {
|
|
||||||
nonce: transaction.nonce,
|
|
||||||
gas_price: transaction.gas_price,
|
|
||||||
gas_limit: transaction.gas,
|
|
||||||
to: match transaction.action {
|
|
||||||
Action::Create => None,
|
|
||||||
Action::Call(ref to) => Some(to.clone()),
|
|
||||||
},
|
|
||||||
value: transaction.value,
|
|
||||||
data: transaction.data.to_vec(),
|
|
||||||
chain_id: chain_id,
|
|
||||||
};
|
|
||||||
match self.hardware_store.as_ref().map(|s| s.sign_transaction(&address, &t_info, rlp_encoded_transaction)) {
|
|
||||||
None | Some(Err(HardwareError::KeyNotFound)) => Err(SignError::NotFound),
|
|
||||||
Some(Err(e)) => Err(From::from(e)),
|
|
||||||
Some(Ok(s)) => Ok(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -636,7 +528,7 @@ mod tests {
|
|||||||
let derived_addr = ap.derive_account(
|
let derived_addr = ap.derive_account(
|
||||||
&kp.address(),
|
&kp.address(),
|
||||||
None,
|
None,
|
||||||
Derivation::SoftHash(H256::from(999)),
|
Derivation::SoftHash(H256::from_low_u64_be(999)),
|
||||||
false,
|
false,
|
||||||
).expect("Derivation should not fail");
|
).expect("Derivation should not fail");
|
||||||
|
|
||||||
@ -654,7 +546,7 @@ mod tests {
|
|||||||
let derived_addr = ap.derive_account(
|
let derived_addr = ap.derive_account(
|
||||||
&kp.address(),
|
&kp.address(),
|
||||||
None,
|
None,
|
||||||
Derivation::SoftHash(H256::from(999)),
|
Derivation::SoftHash(H256::from_low_u64_be(999)),
|
||||||
true,
|
true,
|
||||||
).expect("Derivation should not fail");
|
).expect("Derivation should not fail");
|
||||||
|
|
||||||
@ -675,7 +567,7 @@ mod tests {
|
|||||||
let derived_addr = ap.derive_account(
|
let derived_addr = ap.derive_account(
|
||||||
&kp.address(),
|
&kp.address(),
|
||||||
None,
|
None,
|
||||||
Derivation::SoftHash(H256::from(1999)),
|
Derivation::SoftHash(H256::from_low_u64_be(1999)),
|
||||||
true,
|
true,
|
||||||
).expect("Derivation should not fail");
|
).expect("Derivation should not fail");
|
||||||
ap.unlock_account_permanently(derived_addr, "base".into())
|
ap.unlock_account_permanently(derived_addr, "base".into())
|
||||||
@ -687,7 +579,7 @@ mod tests {
|
|||||||
let signed_msg2 = ap.sign_derived(
|
let signed_msg2 = ap.sign_derived(
|
||||||
&kp.address(),
|
&kp.address(),
|
||||||
None,
|
None,
|
||||||
Derivation::SoftHash(H256::from(1999)),
|
Derivation::SoftHash(H256::from_low_u64_be(1999)),
|
||||||
msg,
|
msg,
|
||||||
).expect("Derived signing with existing unlocked account should not fail");
|
).expect("Derived signing with existing unlocked account should not fail");
|
||||||
|
|
||||||
|
@ -153,7 +153,7 @@ impl<K: hash::Hash + Eq, V> DiskMap<K, V> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::AddressBook;
|
use super::{AddressBook, Address};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
use crate::account_data::AccountMeta;
|
use crate::account_data::AccountMeta;
|
||||||
@ -162,12 +162,12 @@ mod tests {
|
|||||||
fn should_save_and_reload_address_book() {
|
fn should_save_and_reload_address_book() {
|
||||||
let tempdir = TempDir::new("").unwrap();
|
let tempdir = TempDir::new("").unwrap();
|
||||||
let mut b = AddressBook::new(tempdir.path());
|
let mut b = AddressBook::new(tempdir.path());
|
||||||
b.set_name(1.into(), "One".to_owned());
|
b.set_name(Address::from_low_u64_be(1), "One".to_owned());
|
||||||
b.set_meta(1.into(), "{1:1}".to_owned());
|
b.set_meta(Address::from_low_u64_be(1), "{1:1}".to_owned());
|
||||||
let b = AddressBook::new(tempdir.path());
|
let b = AddressBook::new(tempdir.path());
|
||||||
assert_eq!(b.get(), vec![
|
assert_eq!(b.get(), vec![
|
||||||
(1, AccountMeta {name: "One".to_owned(), meta: "{1:1}".to_owned(), uuid: None})
|
(Address::from_low_u64_be(1), AccountMeta {name: "One".to_owned(), meta: "{1:1}".to_owned(), uuid: None})
|
||||||
].into_iter().map(|(a, b)| (a.into(), b)).collect::<HashMap<_, _>>());
|
].into_iter().collect::<HashMap<_, _>>());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -175,15 +175,15 @@ mod tests {
|
|||||||
let tempdir = TempDir::new("").unwrap();
|
let tempdir = TempDir::new("").unwrap();
|
||||||
let mut b = AddressBook::new(tempdir.path());
|
let mut b = AddressBook::new(tempdir.path());
|
||||||
|
|
||||||
b.set_name(1.into(), "One".to_owned());
|
b.set_name(Address::from_low_u64_be(1), "One".to_owned());
|
||||||
b.set_name(2.into(), "Two".to_owned());
|
b.set_name(Address::from_low_u64_be(2), "Two".to_owned());
|
||||||
b.set_name(3.into(), "Three".to_owned());
|
b.set_name(Address::from_low_u64_be(3), "Three".to_owned());
|
||||||
b.remove(2.into());
|
b.remove(Address::from_low_u64_be(2).into());
|
||||||
|
|
||||||
let b = AddressBook::new(tempdir.path());
|
let b = AddressBook::new(tempdir.path());
|
||||||
assert_eq!(b.get(), vec![
|
assert_eq!(b.get(), vec![
|
||||||
(1, AccountMeta{name: "One".to_owned(), meta: "{}".to_owned(), uuid: None}),
|
(Address::from_low_u64_be(1), AccountMeta{name: "One".to_owned(), meta: "{}".to_owned(), uuid: None}),
|
||||||
(3, AccountMeta{name: "Three".to_owned(), meta: "{}".to_owned(), uuid: None}),
|
(Address::from_low_u64_be(3), AccountMeta{name: "Three".to_owned(), meta: "{}".to_owned(), uuid: None}),
|
||||||
].into_iter().map(|(a, b)| (a.into(), b)).collect::<HashMap<_, _>>());
|
].into_iter().collect::<HashMap<_, _>>());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Chain Specification"
|
||||||
name = "chainspec"
|
name = "chainspec"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Marek Kotewicz <marek@parity.io>"]
|
authors = ["Marek Kotewicz <marek@parity.io>"]
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["Parity <admin@parity.io>"]
|
description = "Parity Ethereum CLI Signer Tool"
|
||||||
description = "Parity Cli Tool"
|
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "cli-signer"
|
name = "cli-signer"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
authors = ["Parity <admin@parity.io>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
futures = "0.1"
|
futures = "0.1"
|
||||||
rpassword = "1.0"
|
rpassword = "1.0"
|
||||||
parity-rpc = { path = "../rpc" }
|
parity-rpc = { path = "../rpc" }
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["Parity <admin@parity.io>"]
|
description = "Parity Ethereum RPC Client"
|
||||||
description = "Parity Rpc Client"
|
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "parity-rpc-client"
|
name = "parity-rpc-client"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
authors = ["Parity <admin@parity.io>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
futures = "0.1"
|
futures = "0.1"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
@ -15,7 +15,7 @@ serde_json = "1.0"
|
|||||||
url = "1.2.0"
|
url = "1.2.0"
|
||||||
matches = "0.1"
|
matches = "0.1"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
jsonrpc-core = "10.0.1"
|
jsonrpc-core = "12.0.0"
|
||||||
jsonrpc-ws-server = "10.0.1"
|
jsonrpc-ws-server = "12.0.0"
|
||||||
parity-rpc = { path = "../../rpc" }
|
parity-rpc = { path = "../../rpc" }
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
|
128
docs/CHANGELOG-2.4.md
Normal file
128
docs/CHANGELOG-2.4.md
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
## Parity-Ethereum [v2.4.9](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.9)
|
||||||
|
|
||||||
|
Parity Ethereum v2.4.9-stable is a security update which addresses servo/rust-smallvec#148
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* cargo update -p smallvec ([#10822](https://github.com/paritytech/parity-ethereum/pull/10822))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.8](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.8)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.8-stable is a bugfix release that improves performance and stability.
|
||||||
|
|
||||||
|
* Blockchain: fix reset chain
|
||||||
|
* State tests: treat empty accounts the same as non-existant accounts (EIP 1052)
|
||||||
|
* Aura: fix Timestamp Overflow
|
||||||
|
* Networking: support discovery-only peers (geth bootnodes)
|
||||||
|
* Snapshotting: fix unclean shutdown while snappshotting is under way
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* ethcore/res: activate atlantis classic hf on block 8772000 ([#10766](https://github.com/paritytech/parity-ethereum/pull/10766))
|
||||||
|
* fix docker tags for publishing ([#10741](https://github.com/paritytech/parity-ethereum/pull/10741))
|
||||||
|
* Reset blockchain properly ([#10669](https://github.com/paritytech/parity-ethereum/pull/10669))
|
||||||
|
* adds rpc error message for --no-ancient-blocks ([#10608](https://github.com/paritytech/parity-ethereum/pull/10608))
|
||||||
|
* Treat empty account the same as non-exist accounts in EIP-1052 ([#10775](https://github.com/paritytech/parity-ethereum/pull/10775))
|
||||||
|
* fix: aura don't add `SystemTime::now()` ([#10720](https://github.com/paritytech/parity-ethereum/pull/10720))
|
||||||
|
* DevP2p: Get node IP address and udp port from Socket, if not included in PING packet ([#10705](https://github.com/paritytech/parity-ethereum/pull/10705))
|
||||||
|
* Revert "fix: aura don't add `SystemTime::now()` ([#10720](https://github.com/paritytech/parity-ethereum/pull/10720))"
|
||||||
|
* Add a way to signal shutdown to snapshotting threads ([#10744](https://github.com/paritytech/parity-ethereum/pull/10744))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.7](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.7)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.7-stable is a bugfix release that improves performance and stability.
|
||||||
|
|
||||||
|
Among others, it enables the _Atlantis_ hardfork on **Morden** and **Kotti** Classic networks.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* [CI] allow cargo audit to fail ([#10676](https://github.com/paritytech/parity-ethereum/pull/10676))
|
||||||
|
* new image ([#10673](https://github.com/paritytech/parity-ethereum/pull/10673))
|
||||||
|
* Update publishing ([#10644](https://github.com/paritytech/parity-ethereum/pull/10644))
|
||||||
|
* enable lto for release builds ([#10717](https://github.com/paritytech/parity-ethereum/pull/10717))
|
||||||
|
* Use RUSTFLAGS to set the optimization level ([#10719](https://github.com/paritytech/parity-ethereum/pull/10719))
|
||||||
|
* ethcore: enable ECIP-1054 for classic ([#10731](https://github.com/paritytech/parity-ethereum/pull/10731))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.6](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.6)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.6-stable is a bugfix release that improves performance and stability.
|
||||||
|
|
||||||
|
Among others, it enables the Petersburg hardfork on **Rinkeby** and **POA-Core** Network, as well as the **Kovan** Network community hardfork.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* ci: publish docs debug ([#10638](https://github.com/paritytech/parity-ethereum/pull/10638))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.5](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.5)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.5-stable is a bugfix release that improves performance and stability. This release improves memory optimizations around timestamp handling and stabilizes the 2.4 release branch.
|
||||||
|
|
||||||
|
As of today, Parity-Ethereum 2.3 reaches end of life and everyone is encouraged to upgrade.
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.4](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.4)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.4-beta is a bugfix release that improves performance and stability. This patch release removes the dead chain configs for Easthub and Ethereum Social.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* fix(rpc-types): replace uint and hash with `ethereum_types v0.4` ([#10217](https://github.com/paritytech/parity-ethereum/pull/10217))
|
||||||
|
* chore(bump ethereum-types) ([#10396](https://github.com/paritytech/parity-ethereum/pull/10396))
|
||||||
|
* fix(light eth_gasPrice): ask network if not in cache ([#10535](https://github.com/paritytech/parity-ethereum/pull/10535))
|
||||||
|
* fix(light account response): update `tx_queue` ([#10545](https://github.com/paritytech/parity-ethereum/pull/10545))
|
||||||
|
* fix(bump dependencies) ([#10540](https://github.com/paritytech/parity-ethereum/pull/10540))
|
||||||
|
* tx-pool: check transaction readiness before replacing ([#10526](https://github.com/paritytech/parity-ethereum/pull/10526))
|
||||||
|
* fix #10390 ([#10391](https://github.com/paritytech/parity-ethereum/pull/10391))
|
||||||
|
* private-tx: replace error_chain ([#10510](https://github.com/paritytech/parity-ethereum/pull/10510))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.3](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.3)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.3-beta is a bugfix release that improves performance and stability. This patch release contains a critical bug fix where serving light clients previously led to client crashes. Upgrading is highly recommended.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* Add additional request tests ([#10503](https://github.com/paritytech/parity-ethereum/pull/10503))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.2](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.2)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.2-beta is a bugfix release that improves performance and stability.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* Сaching through docker volume ([#10477](https://github.com/paritytech/parity-ethereum/pull/10477))
|
||||||
|
* fix win&mac build ([#10486](https://github.com/paritytech/parity-ethereum/pull/10486))
|
||||||
|
* fix(extract `timestamp_checked_add` as lib) ([#10383](https://github.com/paritytech/parity-ethereum/pull/10383))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.1](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.1)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.1-beta is a bugfix release that improves performance and stability.
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* Implement parity_versionInfo & parity_setChain on LC; fix parity_setChain ([#10312](https://github.com/paritytech/parity-ethereum/pull/10312))
|
||||||
|
* CI publish to aws ([#10446](https://github.com/paritytech/parity-ethereum/pull/10446))
|
||||||
|
* CI aws git checkout ([#10451](https://github.com/paritytech/parity-ethereum/pull/10451))
|
||||||
|
* Revert "CI aws git checkout ([#10451](https://github.com/paritytech/parity-ethereum/pull/10451))" (#10456)
|
||||||
|
* Revert "CI aws git checkout ([#10451](https://github.com/paritytech/parity-ethereum/pull/10451))"
|
||||||
|
* Tests parallelized ([#10452](https://github.com/paritytech/parity-ethereum/pull/10452))
|
||||||
|
* Ensure static validator set changes are recognized ([#10467](https://github.com/paritytech/parity-ethereum/pull/10467))
|
||||||
|
|
||||||
|
## Parity-Ethereum [v2.4.0](https://github.com/paritytech/parity-ethereum/releases/tag/v2.4.0)
|
||||||
|
|
||||||
|
Parity-Ethereum 2.4.0-beta is our trifortnightly minor version release coming with a lot of new features as well as bugfixes and performance improvements.
|
||||||
|
|
||||||
|
Notable changes:
|
||||||
|
- Account management is now deprecated ([#10213](https://github.com/paritytech/parity-ethereum/pull/10213))
|
||||||
|
- Local accounts can now be specified via CLI ([#9960](https://github.com/paritytech/parity-ethereum/pull/9960))
|
||||||
|
- Chains can now be reset to a particular block via CLI ([#9782](https://github.com/paritytech/parity-ethereum/pull/9782))
|
||||||
|
- Ethash now additionally implements ProgPoW ([#9762](https://github.com/paritytech/parity-ethereum/pull/9762))
|
||||||
|
- The `eip1283DisableTransition` flag was added to revert EIP-1283 ([#10214](https://github.com/paritytech/parity-ethereum/pull/10214))
|
||||||
|
|
||||||
|
The full list of included changes:
|
||||||
|
|
||||||
|
* revert some changes, could be buggy ([#10399](https://github.com/paritytech/parity-ethereum/pull/10399))
|
||||||
|
* 10000 > 5000 ([#10422](https://github.com/paritytech/parity-ethereum/pull/10422))
|
||||||
|
* fix panic when logging directory does not exist, closes #10420 ([#10424](https://github.com/paritytech/parity-ethereum/pull/10424))
|
||||||
|
* fix underflow in pip, closes #10419 ([#10423](https://github.com/paritytech/parity-ethereum/pull/10423))
|
||||||
|
* ci: clean up gitlab-ci.yml leftovers from previous merge ([#10429](https://github.com/paritytech/parity-ethereum/pull/10429))
|
||||||
|
* Update hardcoded headers for Foundation, Ropsten, Kovan and Classic ([#10417](https://github.com/paritytech/parity-ethereum/pull/10417))
|
||||||
|
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Ethash & ProgPoW Implementations"
|
||||||
name = "ethash"
|
name = "ethash"
|
||||||
version = "1.12.0"
|
version = "1.12.0"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
@ -6,8 +7,8 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
crunchy = "0.1.0"
|
crunchy = "0.1.0"
|
||||||
either = "1.0.0"
|
either = "1.0.0"
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
memmap = "0.6"
|
memmap = "0.6"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
|
@ -52,14 +52,14 @@ mod progpow;
|
|||||||
pub use cache::{NodeCacheBuilder, OptimizeFor};
|
pub use cache::{NodeCacheBuilder, OptimizeFor};
|
||||||
pub use compute::{ProofOfWork, quick_get_difficulty, slow_hash_block_number};
|
pub use compute::{ProofOfWork, quick_get_difficulty, slow_hash_block_number};
|
||||||
use compute::Light;
|
use compute::Light;
|
||||||
use ethereum_types::{U256, U512};
|
use ethereum_types::{BigEndianHash, U256, U512};
|
||||||
use keccak::H256;
|
use keccak::H256;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
pub use seed_compute::SeedHashCompute;
|
pub use seed_compute::SeedHashCompute;
|
||||||
pub use shared::ETHASH_EPOCH_LENGTH;
|
pub use shared::ETHASH_EPOCH_LENGTH;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::convert::TryFrom;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
struct LightCache {
|
struct LightCache {
|
||||||
@ -161,12 +161,12 @@ impl EthashManager {
|
|||||||
|
|
||||||
/// Convert an Ethash boundary to its original difficulty. Basically just `f(x) = 2^256 / x`.
|
/// Convert an Ethash boundary to its original difficulty. Basically just `f(x) = 2^256 / x`.
|
||||||
pub fn boundary_to_difficulty(boundary: ðereum_types::H256) -> U256 {
|
pub fn boundary_to_difficulty(boundary: ðereum_types::H256) -> U256 {
|
||||||
difficulty_to_boundary_aux(&**boundary)
|
difficulty_to_boundary_aux(&boundary.into_uint())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert an Ethash difficulty to the target boundary. Basically just `f(x) = 2^256 / x`.
|
/// Convert an Ethash difficulty to the target boundary. Basically just `f(x) = 2^256 / x`.
|
||||||
pub fn difficulty_to_boundary(difficulty: &U256) -> ethereum_types::H256 {
|
pub fn difficulty_to_boundary(difficulty: &U256) -> ethereum_types::H256 {
|
||||||
difficulty_to_boundary_aux(difficulty).into()
|
BigEndianHash::from_uint(&difficulty_to_boundary_aux(difficulty))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn difficulty_to_boundary_aux<T: Into<U512>>(difficulty: T) -> ethereum_types::U256 {
|
fn difficulty_to_boundary_aux<T: Into<U512>>(difficulty: T) -> ethereum_types::U256 {
|
||||||
@ -177,8 +177,8 @@ fn difficulty_to_boundary_aux<T: Into<U512>>(difficulty: T) -> ethereum_types::U
|
|||||||
if difficulty == U512::one() {
|
if difficulty == U512::one() {
|
||||||
U256::max_value()
|
U256::max_value()
|
||||||
} else {
|
} else {
|
||||||
// difficulty > 1, so result should never overflow 256 bits
|
const PROOF: &str = "difficulty > 1, so result never overflows 256 bits; qed";
|
||||||
U256::from((U512::one() << 256) / difficulty)
|
U256::try_from((U512::one() << 256) / difficulty).expect(PROOF)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,10 +203,10 @@ fn test_lru() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_difficulty_to_boundary() {
|
fn test_difficulty_to_boundary() {
|
||||||
use ethereum_types::H256;
|
use ethereum_types::{H256, BigEndianHash};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
assert_eq!(difficulty_to_boundary(&U256::from(1)), H256::from(U256::max_value()));
|
assert_eq!(difficulty_to_boundary(&U256::from(1)), BigEndianHash::from_uint(&U256::max_value()));
|
||||||
assert_eq!(difficulty_to_boundary(&U256::from(2)), H256::from_str("8000000000000000000000000000000000000000000000000000000000000000").unwrap());
|
assert_eq!(difficulty_to_boundary(&U256::from(2)), H256::from_str("8000000000000000000000000000000000000000000000000000000000000000").unwrap());
|
||||||
assert_eq!(difficulty_to_boundary(&U256::from(4)), H256::from_str("4000000000000000000000000000000000000000000000000000000000000000").unwrap());
|
assert_eq!(difficulty_to_boundary(&U256::from(4)), H256::from_str("4000000000000000000000000000000000000000000000000000000000000000").unwrap());
|
||||||
assert_eq!(difficulty_to_boundary(&U256::from(32)), H256::from_str("0800000000000000000000000000000000000000000000000000000000000000").unwrap());
|
assert_eq!(difficulty_to_boundary(&U256::from(32)), H256::from_str("0800000000000000000000000000000000000000000000000000000000000000").unwrap());
|
||||||
@ -220,9 +220,18 @@ fn test_difficulty_to_boundary_regression() {
|
|||||||
// https://github.com/paritytech/parity-ethereum/issues/8397
|
// https://github.com/paritytech/parity-ethereum/issues/8397
|
||||||
for difficulty in 1..9 {
|
for difficulty in 1..9 {
|
||||||
assert_eq!(U256::from(difficulty), boundary_to_difficulty(&difficulty_to_boundary(&difficulty.into())));
|
assert_eq!(U256::from(difficulty), boundary_to_difficulty(&difficulty_to_boundary(&difficulty.into())));
|
||||||
assert_eq!(H256::from(difficulty), difficulty_to_boundary(&boundary_to_difficulty(&difficulty.into())));
|
assert_eq!(
|
||||||
assert_eq!(U256::from(difficulty), boundary_to_difficulty(&boundary_to_difficulty(&difficulty.into()).into()));
|
H256::from_low_u64_be(difficulty),
|
||||||
assert_eq!(H256::from(difficulty), difficulty_to_boundary(&difficulty_to_boundary(&difficulty.into()).into()));
|
difficulty_to_boundary(&boundary_to_difficulty(&H256::from_low_u64_be(difficulty))),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
U256::from(difficulty),
|
||||||
|
boundary_to_difficulty(&BigEndianHash::from_uint(&boundary_to_difficulty(&H256::from_low_u64_be(difficulty)))),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
H256::from_low_u64_be(difficulty),
|
||||||
|
difficulty_to_boundary(&difficulty_to_boundary(&difficulty.into()).into_uint()),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,5 +244,5 @@ fn test_difficulty_to_boundary_panics_on_zero() {
|
|||||||
#[test]
|
#[test]
|
||||||
#[should_panic]
|
#[should_panic]
|
||||||
fn test_boundary_to_difficulty_panics_on_zero() {
|
fn test_boundary_to_difficulty_panics_on_zero() {
|
||||||
boundary_to_difficulty(ðereum_types::H256::from(0));
|
boundary_to_difficulty(ðereum_types::H256::zero());
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,22 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
//! ProgPoW (Programmatic Proof-of-Work) is the Ethereum network's proposed new Application-Specific Integrated
|
||||||
|
//! Circuit (ASIC) resistant Proof-of-Work mining algorithm.
|
||||||
|
//!
|
||||||
|
//! ProgPoW's aim is to reduce the efficiencies of specialized mining devices known as ASIC chips
|
||||||
|
//! (and accelerated GPU-based setups), and to maximize the performance of General Purpose Hardware (GPUs) to enable
|
||||||
|
//! more users to compete for new cryptocurrency awarded by the protocol.
|
||||||
|
//!
|
||||||
|
//! ASIC chips are those that are tailored to efficiently mining cryptocurrency based on a specific hashing algorithm.
|
||||||
|
//!
|
||||||
|
//! GPU mining setups are less specialised are struggle to compete for mining rewards.
|
||||||
|
//!
|
||||||
|
//! It would be a change from Ethereum's current PoW mining algorithm known as Ethash.
|
||||||
|
//!
|
||||||
|
//! ProgPoW audits have been proposed to analyse the efficiency of a ProgPoW ASICs over
|
||||||
|
//! GPUs and analysis of the economic impact on the Ethereum protocol.
|
||||||
|
|
||||||
use compute::{FNV_PRIME, calculate_dag_item};
|
use compute::{FNV_PRIME, calculate_dag_item};
|
||||||
use keccak::H256;
|
use keccak::H256;
|
||||||
use shared::{ETHASH_ACCESSES, ETHASH_MIX_BYTES, Node, get_data_size};
|
use shared::{ETHASH_ACCESSES, ETHASH_MIX_BYTES, Node, get_data_size};
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
description = "Ethcore library"
|
description = "Parity Ethereum (EthCore) Library"
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "ethcore"
|
name = "ethcore"
|
||||||
@ -7,34 +7,35 @@ version = "1.12.0"
|
|||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_term = "0.10"
|
account-db = { path = "account-db" }
|
||||||
|
ansi_term = "0.11"
|
||||||
blooms-db = { path = "../util/blooms-db", optional = true }
|
blooms-db = { path = "../util/blooms-db", optional = true }
|
||||||
bn = { git = "https://github.com/paritytech/bn", default-features = false }
|
|
||||||
byteorder = "1.0"
|
|
||||||
common-types = { path = "types" }
|
common-types = { path = "types" }
|
||||||
crossbeam = "0.4"
|
crossbeam-utils = "0.6"
|
||||||
|
derive_more = "0.14.0"
|
||||||
env_logger = { version = "0.5", optional = true }
|
env_logger = { version = "0.5", optional = true }
|
||||||
error-chain = { version = "0.12", default-features = false }
|
ethabi = "8.0"
|
||||||
ethabi = "6.0"
|
ethabi-contract = "8.0"
|
||||||
ethabi-contract = "6.0"
|
ethabi-derive = "8.0"
|
||||||
ethabi-derive = "6.0"
|
|
||||||
ethash = { path = "../ethash" }
|
ethash = { path = "../ethash" }
|
||||||
ethcore-blockchain = { path = "./blockchain" }
|
ethcore-blockchain = { path = "./blockchain" }
|
||||||
ethcore-bloom-journal = { path = "../util/bloom" }
|
ethcore-bloom-journal = { path = "../util/bloom" }
|
||||||
|
ethcore-builtin = { path = "./builtin" }
|
||||||
ethcore-call-contract = { path = "./call-contract" }
|
ethcore-call-contract = { path = "./call-contract" }
|
||||||
ethcore-db = { path = "./db" }
|
ethcore-db = { path = "./db" }
|
||||||
ethcore-io = { path = "../util/io" }
|
ethcore-io = { path = "../util/io" }
|
||||||
ethcore-miner = { path = "../miner" }
|
ethcore-miner = { path = "../miner" }
|
||||||
ethcore-stratum = { path = "../miner/stratum", optional = true }
|
ethcore-stratum = { path = "../miner/stratum", optional = true }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
ethjson = { path = "../json" }
|
ethjson = { path = "../json" }
|
||||||
ethkey = { path = "../accounts/ethkey" }
|
ethkey = { path = "../accounts/ethkey" }
|
||||||
evm = { path = "evm" }
|
evm = { path = "evm" }
|
||||||
hash-db = "0.11.0"
|
futures = "0.1"
|
||||||
heapsize = "0.4"
|
hash-db = "0.12.4"
|
||||||
|
parity-util-mem = "0.1"
|
||||||
itertools = "0.5"
|
itertools = "0.5"
|
||||||
journaldb = { path = "../util/journaldb" }
|
journaldb = { path = "../util/journaldb" }
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
keccak-hasher = { path = "../util/keccak-hasher" }
|
keccak-hasher = { path = "../util/keccak-hasher" }
|
||||||
kvdb = "0.1"
|
kvdb = "0.1"
|
||||||
kvdb-memorydb = "0.1"
|
kvdb-memorydb = "0.1"
|
||||||
@ -45,24 +46,24 @@ log = "0.4"
|
|||||||
lru-cache = "0.1"
|
lru-cache = "0.1"
|
||||||
macros = { path = "../util/macros" }
|
macros = { path = "../util/macros" }
|
||||||
memory-cache = { path = "../util/memory-cache" }
|
memory-cache = { path = "../util/memory-cache" }
|
||||||
memory-db = "0.11.0"
|
memory-db = "0.12.4"
|
||||||
num = { version = "0.1", default-features = false, features = ["bigint"] }
|
|
||||||
num_cpus = "1.2"
|
num_cpus = "1.2"
|
||||||
parity-bytes = "0.1"
|
parity-bytes = "0.1"
|
||||||
parity-crypto = "0.3.0"
|
|
||||||
parity-snappy = "0.1"
|
parity-snappy = "0.1"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
trie-db = "0.11.0"
|
pod-account = { path = "pod-account" }
|
||||||
|
trie-db = "0.12.4"
|
||||||
patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" }
|
patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" }
|
||||||
rand = "0.4"
|
rand = "0.6"
|
||||||
rayon = "1.0"
|
rayon = "1.1"
|
||||||
rlp = { version = "0.3.0", features = ["ethereum"] }
|
rlp = "0.4.0"
|
||||||
rlp_derive = { path = "../util/rlp-derive" }
|
rlp_derive = { path = "../util/rlp-derive" }
|
||||||
rustc-hex = "1.0"
|
rustc-hex = "1.0"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
|
state-account = { path = "state-account" }
|
||||||
stats = { path = "../util/stats" }
|
stats = { path = "../util/stats" }
|
||||||
tempdir = {version="0.3", optional = true}
|
tempdir = { version = "0.3", optional = true }
|
||||||
time-utils = { path = "../util/time-utils" }
|
time-utils = { path = "../util/time-utils" }
|
||||||
trace-time = "0.1"
|
trace-time = "0.1"
|
||||||
triehash-ethereum = { version = "0.2", path = "../util/triehash-ethereum" }
|
triehash-ethereum = { version = "0.2", path = "../util/triehash-ethereum" }
|
||||||
@ -70,18 +71,22 @@ unexpected = { path = "../util/unexpected" }
|
|||||||
using_queue = { path = "../miner/using-queue" }
|
using_queue = { path = "../miner/using-queue" }
|
||||||
vm = { path = "vm" }
|
vm = { path = "vm" }
|
||||||
wasm = { path = "wasm" }
|
wasm = { path = "wasm" }
|
||||||
|
rand_xorshift = "0.1.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
blooms-db = { path = "../util/blooms-db" }
|
blooms-db = { path = "../util/blooms-db" }
|
||||||
criterion = "0.2"
|
criterion = "0.2"
|
||||||
env_logger = "0.5"
|
env_logger = "0.5"
|
||||||
ethcore-accounts = { path = "../accounts" }
|
ethcore-accounts = { path = "../accounts" }
|
||||||
|
ethjson = { path = "../json", features = ["test-helpers"] }
|
||||||
|
ethkey = { path = "../accounts/ethkey" }
|
||||||
fetch = { path = "../util/fetch" }
|
fetch = { path = "../util/fetch" }
|
||||||
kvdb-rocksdb = "0.1.3"
|
kvdb-rocksdb = "0.1.3"
|
||||||
parity-runtime = { path = "../util/runtime" }
|
parity-runtime = { path = "../util/runtime" }
|
||||||
rlp_compress = { path = "../util/rlp-compress" }
|
rlp_compress = { path = "../util/rlp-compress" }
|
||||||
|
serde_json = "1.0"
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
trie-standardmap = "0.1"
|
trie-standardmap = "0.12.4"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
parity = ["work-notify", "price-info", "stratum"]
|
parity = ["work-notify", "price-info", "stratum"]
|
||||||
@ -106,8 +111,6 @@ evm-debug-tests = ["evm-debug", "evm/evm-debug-tests"]
|
|||||||
slow-blocks = []
|
slow-blocks = []
|
||||||
# Run JSON consensus tests.
|
# Run JSON consensus tests.
|
||||||
json-tests = ["env_logger", "test-helpers", "to-pod-full"]
|
json-tests = ["env_logger", "test-helpers", "to-pod-full"]
|
||||||
# Skip JSON consensus tests with pending issues.
|
|
||||||
ci-skip-tests = []
|
|
||||||
# Run memory/cpu heavy tests.
|
# Run memory/cpu heavy tests.
|
||||||
test-heavy = []
|
test-heavy = []
|
||||||
# Compile test helpers
|
# Compile test helpers
|
||||||
|
14
ethcore/account-db/Cargo.toml
Normal file
14
ethcore/account-db/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[package]
|
||||||
|
description = "DB backend wrapper for Account trie"
|
||||||
|
name = "account-db"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ethereum-types = "0.6"
|
||||||
|
hash-db = "0.12.4"
|
||||||
|
keccak-hash = "0.2.0"
|
||||||
|
keccak-hasher = { path = "../../util/keccak-hasher" }
|
||||||
|
kvdb = "0.1"
|
||||||
|
rlp = "0.4"
|
@ -16,23 +16,20 @@
|
|||||||
|
|
||||||
//! DB backend wrapper for Account trie
|
//! DB backend wrapper for Account trie
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use hash::{KECCAK_NULL_RLP, keccak};
|
use keccak_hash::{KECCAK_NULL_RLP, keccak};
|
||||||
use hash_db::{HashDB, AsHashDB};
|
use hash_db::{HashDB, AsHashDB, Prefix};
|
||||||
use keccak_hasher::KeccakHasher;
|
use keccak_hasher::KeccakHasher;
|
||||||
use kvdb::DBValue;
|
use kvdb::DBValue;
|
||||||
use rlp::NULL_RLP;
|
use rlp::NULL_RLP;
|
||||||
|
|
||||||
#[cfg(test)]
|
// Combines a key with an address hash to ensure uniqueness.
|
||||||
use ethereum_types::Address;
|
|
||||||
|
|
||||||
// combines a key with an address hash to ensure uniqueness.
|
|
||||||
// leaves the first 96 bits untouched in order to support partial key lookup.
|
// leaves the first 96 bits untouched in order to support partial key lookup.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
|
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
|
||||||
let mut dst = key.clone();
|
let mut dst = key.clone();
|
||||||
{
|
{
|
||||||
let last_src: &[u8] = &*address_hash;
|
let last_src: &[u8] = address_hash.as_bytes();
|
||||||
let last_dst: &mut [u8] = &mut *dst;
|
let last_dst: &mut [u8] = dst.as_bytes_mut();
|
||||||
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
|
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
|
||||||
*k ^= *a
|
*k ^= *a
|
||||||
}
|
}
|
||||||
@ -57,7 +54,7 @@ impl Default for Factory {
|
|||||||
impl Factory {
|
impl Factory {
|
||||||
/// Create a read-only accountdb.
|
/// Create a read-only accountdb.
|
||||||
/// This will panic when write operations are called.
|
/// This will panic when write operations are called.
|
||||||
pub fn readonly<'db>(&self, db: &'db HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Box<HashDB<KeccakHasher, DBValue> + 'db> {
|
pub fn readonly<'db>(&self, db: &'db dyn HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Box<dyn HashDB<KeccakHasher, DBValue> + 'db> {
|
||||||
match *self {
|
match *self {
|
||||||
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
|
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
|
||||||
Factory::Plain => Box::new(Wrapping(db)),
|
Factory::Plain => Box::new(Wrapping(db)),
|
||||||
@ -65,7 +62,7 @@ impl Factory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new mutable hashdb.
|
/// Create a new mutable hashdb.
|
||||||
pub fn create<'db>(&self, db: &'db mut HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Box<HashDB<KeccakHasher, DBValue> + 'db> {
|
pub fn create<'db>(&self, db: &'db mut dyn HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Box<dyn HashDB<KeccakHasher, DBValue> + 'db> {
|
||||||
match *self {
|
match *self {
|
||||||
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
|
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
|
||||||
Factory::Plain => Box::new(WrappingMut(db)),
|
Factory::Plain => Box::new(WrappingMut(db)),
|
||||||
@ -77,207 +74,189 @@ impl Factory {
|
|||||||
/// DB backend wrapper for Account trie
|
/// DB backend wrapper for Account trie
|
||||||
/// Transforms trie node keys for the database
|
/// Transforms trie node keys for the database
|
||||||
pub struct AccountDB<'db> {
|
pub struct AccountDB<'db> {
|
||||||
db: &'db HashDB<KeccakHasher, DBValue>,
|
db: &'db dyn HashDB<KeccakHasher, DBValue>,
|
||||||
address_hash: H256,
|
address_hash: H256,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> AccountDB<'db> {
|
impl<'db> AccountDB<'db> {
|
||||||
/// Create a new AccountDB from an address.
|
/// Create a new AccountDB from an address' hash.
|
||||||
#[cfg(test)]
|
pub fn from_hash(db: &'db dyn HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Self {
|
||||||
pub fn new(db: &'db HashDB<KeccakHasher, DBValue>, address: &Address) -> Self {
|
AccountDB { db, address_hash }
|
||||||
Self::from_hash(db, keccak(address))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new AcountDB from an address' hash.
|
|
||||||
pub fn from_hash(db: &'db HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Self {
|
|
||||||
AccountDB {
|
|
||||||
db: db,
|
|
||||||
address_hash: address_hash,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDB<'db> {
|
impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDB<'db> {
|
||||||
fn as_hash_db(&self) -> &HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db(&self) -> &dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
fn as_hash_db_mut(&mut self) -> &mut HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db_mut(&mut self) -> &mut dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDB<'db> {
|
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDB<'db> {
|
||||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return Some(DBValue::from_slice(&NULL_RLP));
|
return Some(DBValue::from_slice(&NULL_RLP));
|
||||||
}
|
}
|
||||||
self.db.get(&combine_key(&self.address_hash, key))
|
self.db.get(&combine_key(&self.address_hash, key), prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
self.db.contains(&combine_key(&self.address_hash, key))
|
self.db.contains(&combine_key(&self.address_hash, key), prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, _value: &[u8]) -> H256 {
|
fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, _key: H256, _value: DBValue) {
|
fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, _key: &H256) {
|
fn remove(&mut self, _key: &H256, _prefix: Prefix) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// DB backend wrapper for Account trie
|
/// DB backend wrapper for Account trie
|
||||||
pub struct AccountDBMut<'db> {
|
pub struct AccountDBMut<'db> {
|
||||||
db: &'db mut HashDB<KeccakHasher, DBValue>,
|
db: &'db mut dyn HashDB<KeccakHasher, DBValue>,
|
||||||
address_hash: H256,
|
address_hash: H256,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> AccountDBMut<'db> {
|
impl<'db> AccountDBMut<'db> {
|
||||||
/// Create a new AccountDB from an address.
|
/// Create a new `AccountDBMut` from an address' hash.
|
||||||
#[cfg(test)]
|
pub fn from_hash(db: &'db mut dyn HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Self {
|
||||||
pub fn new(db: &'db mut HashDB<KeccakHasher, DBValue>, address: &Address) -> Self {
|
AccountDBMut { db, address_hash }
|
||||||
Self::from_hash(db, keccak(address))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new AcountDB from an address' hash.
|
/// Create an `AccountDB` from an `AccountDBMut` (used in tests).
|
||||||
pub fn from_hash(db: &'db mut HashDB<KeccakHasher, DBValue>, address_hash: H256) -> Self {
|
|
||||||
AccountDBMut {
|
|
||||||
db: db,
|
|
||||||
address_hash: address_hash,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn immutable(&'db self) -> AccountDB<'db> {
|
pub fn immutable(&'db self) -> AccountDB<'db> {
|
||||||
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
|
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDBMut<'db>{
|
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDBMut<'db>{
|
||||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return Some(DBValue::from_slice(&NULL_RLP));
|
return Some(DBValue::from_slice(&NULL_RLP));
|
||||||
}
|
}
|
||||||
self.db.get(&combine_key(&self.address_hash, key))
|
self.db.get(&combine_key(&self.address_hash, key), prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
self.db.contains(&combine_key(&self.address_hash, key))
|
self.db.contains(&combine_key(&self.address_hash, key), prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
|
||||||
if value == &NULL_RLP {
|
if value == &NULL_RLP {
|
||||||
return KECCAK_NULL_RLP.clone();
|
return KECCAK_NULL_RLP.clone();
|
||||||
}
|
}
|
||||||
let k = keccak(value);
|
let k = keccak(value);
|
||||||
let ak = combine_key(&self.address_hash, &k);
|
let ak = combine_key(&self.address_hash, &k);
|
||||||
self.db.emplace(ak, DBValue::from_slice(value));
|
self.db.emplace(ak, prefix, DBValue::from_slice(value));
|
||||||
k
|
k
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: DBValue) {
|
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
|
||||||
if key == KECCAK_NULL_RLP {
|
if key == KECCAK_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let key = combine_key(&self.address_hash, &key);
|
let key = combine_key(&self.address_hash, &key);
|
||||||
self.db.emplace(key, value)
|
self.db.emplace(key, prefix, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &H256) {
|
fn remove(&mut self, key: &H256, prefix: Prefix) {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let key = combine_key(&self.address_hash, key);
|
let key = combine_key(&self.address_hash, key);
|
||||||
self.db.remove(&key)
|
self.db.remove(&key, prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDBMut<'db> {
|
impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDBMut<'db> {
|
||||||
fn as_hash_db(&self) -> &HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db(&self) -> &dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
fn as_hash_db_mut(&mut self) -> &mut HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db_mut(&mut self) -> &mut dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Wrapping<'db>(&'db HashDB<KeccakHasher, DBValue>);
|
struct Wrapping<'db>(&'db dyn HashDB<KeccakHasher, DBValue>);
|
||||||
|
|
||||||
impl<'db> AsHashDB<KeccakHasher, DBValue> for Wrapping<'db> {
|
impl<'db> AsHashDB<KeccakHasher, DBValue> for Wrapping<'db> {
|
||||||
fn as_hash_db(&self) -> &HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db(&self) -> &dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
fn as_hash_db_mut(&mut self) -> &mut HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db_mut(&mut self) -> &mut dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> HashDB<KeccakHasher, DBValue> for Wrapping<'db> {
|
impl<'db> HashDB<KeccakHasher, DBValue> for Wrapping<'db> {
|
||||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return Some(DBValue::from_slice(&NULL_RLP));
|
return Some(DBValue::from_slice(&NULL_RLP));
|
||||||
}
|
}
|
||||||
self.0.get(key)
|
self.0.get(key, prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
self.0.contains(key)
|
self.0.contains(key, prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, _value: &[u8]) -> H256 {
|
fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, _key: H256, _value: DBValue) {
|
fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, _key: &H256) {
|
fn remove(&mut self, _key: &H256, _prefix: Prefix) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct WrappingMut<'db>(&'db mut HashDB<KeccakHasher, DBValue>);
|
struct WrappingMut<'db>(&'db mut dyn HashDB<KeccakHasher, DBValue>);
|
||||||
impl<'db> AsHashDB<KeccakHasher, DBValue> for WrappingMut<'db> {
|
impl<'db> AsHashDB<KeccakHasher, DBValue> for WrappingMut<'db> {
|
||||||
fn as_hash_db(&self) -> &HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db(&self) -> &dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
fn as_hash_db_mut(&mut self) -> &mut HashDB<KeccakHasher, DBValue> { self }
|
fn as_hash_db_mut(&mut self) -> &mut dyn HashDB<KeccakHasher, DBValue> { self }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> HashDB<KeccakHasher, DBValue> for WrappingMut<'db>{
|
impl<'db> HashDB<KeccakHasher, DBValue> for WrappingMut<'db>{
|
||||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return Some(DBValue::from_slice(&NULL_RLP));
|
return Some(DBValue::from_slice(&NULL_RLP));
|
||||||
}
|
}
|
||||||
self.0.get(key)
|
self.0.get(key, prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
self.0.contains(key)
|
self.0.contains(key, prefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
|
||||||
if value == &NULL_RLP {
|
if value == &NULL_RLP {
|
||||||
return KECCAK_NULL_RLP.clone();
|
return KECCAK_NULL_RLP.clone();
|
||||||
}
|
}
|
||||||
self.0.insert(value)
|
self.0.insert(prefix, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: DBValue) {
|
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
|
||||||
if key == KECCAK_NULL_RLP {
|
if key == KECCAK_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.0.emplace(key, value)
|
self.0.emplace(key, prefix, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &H256) {
|
fn remove(&mut self, key: &H256, prefix: Prefix) {
|
||||||
if key == &KECCAK_NULL_RLP {
|
if key == &KECCAK_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.0.remove(key)
|
self.0.remove(key, prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -28,13 +28,13 @@ extern crate rustc_hex;
|
|||||||
use criterion::{Criterion, Bencher};
|
use criterion::{Criterion, Bencher};
|
||||||
use bytes::BytesRef;
|
use bytes::BytesRef;
|
||||||
use ethcore::builtin::Builtin;
|
use ethcore::builtin::Builtin;
|
||||||
use ethcore::machine::EthereumMachine;
|
use ethcore::machine::Machine;
|
||||||
use ethereum_types::U256;
|
use ethereum_types::H160;
|
||||||
use ethcore::ethereum::new_byzantium_test_machine;
|
use ethcore::ethereum::new_byzantium_test_machine;
|
||||||
use rustc_hex::FromHex;
|
use rustc_hex::FromHex;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref BYZANTIUM_MACHINE: EthereumMachine = new_byzantium_test_machine();
|
static ref BYZANTIUM_MACHINE: Machine = new_byzantium_test_machine();
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BuiltinBenchmark<'a> {
|
struct BuiltinBenchmark<'a> {
|
||||||
@ -46,8 +46,9 @@ struct BuiltinBenchmark<'a> {
|
|||||||
impl<'a> BuiltinBenchmark<'a> {
|
impl<'a> BuiltinBenchmark<'a> {
|
||||||
fn new(builtin_address: &'static str, input: &str, expected: &str) -> BuiltinBenchmark<'a> {
|
fn new(builtin_address: &'static str, input: &str, expected: &str) -> BuiltinBenchmark<'a> {
|
||||||
let builtins = BYZANTIUM_MACHINE.builtins();
|
let builtins = BYZANTIUM_MACHINE.builtins();
|
||||||
|
use std::str::FromStr;
|
||||||
let builtin = builtins.get(&builtin_address.into()).unwrap().clone();
|
let addr = H160::from_str(builtin_address).unwrap();
|
||||||
|
let builtin = builtins.get(&addr).unwrap().clone();
|
||||||
let input = FromHex::from_hex(input).unwrap();
|
let input = FromHex::from_hex(input).unwrap();
|
||||||
let expected = FromHex::from_hex(expected).unwrap();
|
let expected = FromHex::from_hex(expected).unwrap();
|
||||||
|
|
||||||
@ -56,10 +57,6 @@ impl<'a> BuiltinBenchmark<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn gas_cost(&self) -> U256 {
|
|
||||||
self.builtin.cost(&self.input)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run(&self, b: &mut Bencher) {
|
fn run(&self, b: &mut Bencher) {
|
||||||
let mut output = vec![0; self.expected.len()];
|
let mut output = vec![0; self.expected.len()];
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
description = "Ethcore blockchain database"
|
description = "Parity Ethereum Blockchain Database, Test Generator, Configuration, Caching, Importing Blocks, and Block Information"
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "ethcore-blockchain"
|
name = "ethcore-blockchain"
|
||||||
@ -8,26 +8,28 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_term = "0.10"
|
ansi_term = "0.11"
|
||||||
blooms-db = { path = "../../util/blooms-db" }
|
blooms-db = { path = "../../util/blooms-db" }
|
||||||
common-types = { path = "../types" }
|
common-types = { path = "../types" }
|
||||||
ethcore-db = { path = "../db" }
|
ethcore-db = { path = "../db" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
heapsize = "0.4"
|
keccak-hash = "0.2.0"
|
||||||
|
parity-util-mem = "0.1"
|
||||||
itertools = "0.5"
|
itertools = "0.5"
|
||||||
kvdb = "0.1"
|
kvdb = "0.1"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
parity-bytes = "0.1"
|
parity-bytes = "0.1"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
|
rand = "0.6"
|
||||||
rayon = "1.0"
|
rayon = "1.0"
|
||||||
rlp = { version = "0.3.0", features = ["ethereum"] }
|
rlp = "0.4.0"
|
||||||
rlp_compress = { path = "../../util/rlp-compress" }
|
rlp_compress = { path = "../../util/rlp-compress" }
|
||||||
rlp_derive = { path = "../../util/rlp-derive" }
|
rlp_derive = { path = "../../util/rlp-derive" }
|
||||||
|
triehash-ethereum = { version = "0.2", path = "../../util/triehash-ethereum" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
env_logger = "0.5"
|
env_logger = "0.5"
|
||||||
ethkey = { path = "../../accounts/ethkey" }
|
ethkey = { path = "../../accounts/ethkey" }
|
||||||
keccak-hash = "0.1"
|
|
||||||
rustc-hex = "1.0"
|
rustc-hex = "1.0"
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
kvdb-memorydb = "0.1"
|
kvdb-memorydb = "0.1"
|
||||||
|
@ -24,7 +24,8 @@ use common_types::header::Header;
|
|||||||
/// For GHOST fork-choice rule it would typically describe the block with highest
|
/// For GHOST fork-choice rule it would typically describe the block with highest
|
||||||
/// combined difficulty (usually the block with the highest block number).
|
/// combined difficulty (usually the block with the highest block number).
|
||||||
///
|
///
|
||||||
/// Sometimes refered as 'latest block'.
|
/// Sometimes referred as 'latest block'.
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct BestBlock {
|
pub struct BestBlock {
|
||||||
/// Best block decoded header.
|
/// Best block decoded header.
|
||||||
pub header: Header,
|
pub header: Header,
|
||||||
@ -35,7 +36,7 @@ pub struct BestBlock {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Best ancient block info. If the blockchain has a gap this keeps track of where it starts.
|
/// Best ancient block info. If the blockchain has a gap this keeps track of where it starts.
|
||||||
#[derive(Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct BestAncientBlock {
|
pub struct BestAncientBlock {
|
||||||
/// Best block hash.
|
/// Best block hash.
|
||||||
pub hash: H256,
|
pub hash: H256,
|
||||||
|
@ -39,10 +39,10 @@ use ethcore_db::cache_manager::CacheManager;
|
|||||||
use ethcore_db::keys::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions};
|
use ethcore_db::keys::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions};
|
||||||
use ethcore_db::{self as db, Writable, Readable, CacheUpdatePolicy};
|
use ethcore_db::{self as db, Writable, Readable, CacheUpdatePolicy};
|
||||||
use ethereum_types::{H256, Bloom, BloomRef, U256};
|
use ethereum_types::{H256, Bloom, BloomRef, U256};
|
||||||
use heapsize::HeapSizeOf;
|
use util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use kvdb::{DBTransaction, KeyValueDB};
|
use kvdb::{DBTransaction, KeyValueDB};
|
||||||
use log::{trace, warn, info};
|
use log::{trace, debug, warn, info};
|
||||||
use parity_bytes::Bytes;
|
use parity_bytes::Bytes;
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
@ -57,7 +57,7 @@ use crate::{CacheSize, ImportRoute, Config};
|
|||||||
/// Database backing `BlockChain`.
|
/// Database backing `BlockChain`.
|
||||||
pub trait BlockChainDB: Send + Sync {
|
pub trait BlockChainDB: Send + Sync {
|
||||||
/// Generic key value store.
|
/// Generic key value store.
|
||||||
fn key_value(&self) -> &Arc<KeyValueDB>;
|
fn key_value(&self) -> &Arc<dyn KeyValueDB>;
|
||||||
|
|
||||||
/// Header blooms database.
|
/// Header blooms database.
|
||||||
fn blooms(&self) -> &blooms_db::Database;
|
fn blooms(&self) -> &blooms_db::Database;
|
||||||
@ -85,7 +85,7 @@ pub trait BlockChainDB: Send + Sync {
|
|||||||
/// predefined config.
|
/// predefined config.
|
||||||
pub trait BlockChainDBHandler: Send + Sync {
|
pub trait BlockChainDBHandler: Send + Sync {
|
||||||
/// Open the predefined key-value database.
|
/// Open the predefined key-value database.
|
||||||
fn open(&self, path: &Path) -> io::Result<Arc<BlockChainDB>>;
|
fn open(&self, path: &Path) -> io::Result<Arc<dyn BlockChainDB>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Interface for querying blocks by hash and by number.
|
/// Interface for querying blocks by hash and by number.
|
||||||
@ -228,7 +228,7 @@ pub struct BlockChain {
|
|||||||
transaction_addresses: RwLock<HashMap<H256, TransactionAddress>>,
|
transaction_addresses: RwLock<HashMap<H256, TransactionAddress>>,
|
||||||
block_receipts: RwLock<HashMap<H256, BlockReceipts>>,
|
block_receipts: RwLock<HashMap<H256, BlockReceipts>>,
|
||||||
|
|
||||||
db: Arc<BlockChainDB>,
|
db: Arc<dyn BlockChainDB>,
|
||||||
|
|
||||||
cache_man: Mutex<CacheManager<CacheId>>,
|
cache_man: Mutex<CacheManager<CacheId>>,
|
||||||
|
|
||||||
@ -284,7 +284,7 @@ impl BlockProvider for BlockChain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read from DB and populate cache
|
// Read from DB and populate cache
|
||||||
let b = self.db.key_value().get(db::COL_HEADERS, hash)
|
let b = self.db.key_value().get(db::COL_HEADERS, hash.as_bytes())
|
||||||
.expect("Low level database error when fetching block header data. Some issue with disk?")?;
|
.expect("Low level database error when fetching block header data. Some issue with disk?")?;
|
||||||
|
|
||||||
let header = encoded::Header::new(decompress(&b, blocks_swapper()).into_vec());
|
let header = encoded::Header::new(decompress(&b, blocks_swapper()).into_vec());
|
||||||
@ -314,7 +314,7 @@ impl BlockProvider for BlockChain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read from DB and populate cache
|
// Read from DB and populate cache
|
||||||
let b = self.db.key_value().get(db::COL_BODIES, hash)
|
let b = self.db.key_value().get(db::COL_BODIES, hash.as_bytes())
|
||||||
.expect("Low level database error when fetching block body data. Some issue with disk?")?;
|
.expect("Low level database error when fetching block body data. Some issue with disk?")?;
|
||||||
|
|
||||||
let body = encoded::Body::new(decompress(&b, blocks_swapper()).into_vec());
|
let body = encoded::Body::new(decompress(&b, blocks_swapper()).into_vec());
|
||||||
@ -469,7 +469,7 @@ impl<'a> Iterator for AncestryWithMetadataIter<'a> {
|
|||||||
})
|
})
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
self.current = H256::default();
|
self.current = H256::zero();
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -481,7 +481,7 @@ impl<'a> Iterator for AncestryWithMetadataIter<'a> {
|
|||||||
/// Returns epoch transitions.
|
/// Returns epoch transitions.
|
||||||
pub struct EpochTransitionIter<'a> {
|
pub struct EpochTransitionIter<'a> {
|
||||||
chain: &'a BlockChain,
|
chain: &'a BlockChain,
|
||||||
prefix_iter: Box<Iterator<Item=(Box<[u8]>, Box<[u8]>)> + 'a>,
|
prefix_iter: Box<dyn Iterator<Item=(Box<[u8]>, Box<[u8]>)> + 'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for EpochTransitionIter<'a> {
|
impl<'a> Iterator for EpochTransitionIter<'a> {
|
||||||
@ -521,7 +521,7 @@ impl<'a> Iterator for EpochTransitionIter<'a> {
|
|||||||
|
|
||||||
impl BlockChain {
|
impl BlockChain {
|
||||||
/// Create new instance of blockchain from given Genesis.
|
/// Create new instance of blockchain from given Genesis.
|
||||||
pub fn new(config: Config, genesis: &[u8], db: Arc<BlockChainDB>) -> BlockChain {
|
pub fn new(config: Config, genesis: &[u8], db: Arc<dyn BlockChainDB>) -> BlockChain {
|
||||||
// 400 is the average size of the key
|
// 400 is the average size of the key
|
||||||
let cache_man = CacheManager::new(config.pref_cache_size, config.max_cache_size, 400);
|
let cache_man = CacheManager::new(config.pref_cache_size, config.max_cache_size, 400);
|
||||||
|
|
||||||
@ -572,13 +572,13 @@ impl BlockChain {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut batch = DBTransaction::new();
|
let mut batch = DBTransaction::new();
|
||||||
batch.put(db::COL_HEADERS, &hash, block.header_rlp().as_raw());
|
batch.put(db::COL_HEADERS, hash.as_bytes(), block.header_rlp().as_raw());
|
||||||
batch.put(db::COL_BODIES, &hash, &Self::block_to_body(genesis));
|
batch.put(db::COL_BODIES, hash.as_bytes(), &Self::block_to_body(genesis));
|
||||||
|
|
||||||
batch.write(db::COL_EXTRA, &hash, &details);
|
batch.write(db::COL_EXTRA, &hash, &details);
|
||||||
batch.write(db::COL_EXTRA, &header.number(), &hash);
|
batch.write(db::COL_EXTRA, &header.number(), &hash);
|
||||||
|
|
||||||
batch.put(db::COL_EXTRA, b"best", &hash);
|
batch.put(db::COL_EXTRA, b"best", hash.as_bytes());
|
||||||
bc.db.key_value().write(batch).expect("Low level database error when fetching 'best' block. Some issue with disk?");
|
bc.db.key_value().write(batch).expect("Low level database error when fetching 'best' block. Some issue with disk?");
|
||||||
hash
|
hash
|
||||||
}
|
}
|
||||||
@ -639,7 +639,7 @@ impl BlockChain {
|
|||||||
if hash != bc.genesis_hash() {
|
if hash != bc.genesis_hash() {
|
||||||
trace!("First block calculated: {:?}", hash);
|
trace!("First block calculated: {:?}", hash);
|
||||||
let mut batch = db.key_value().transaction();
|
let mut batch = db.key_value().transaction();
|
||||||
batch.put(db::COL_EXTRA, b"first", &hash);
|
batch.put(db::COL_EXTRA, b"first", hash.as_bytes());
|
||||||
db.key_value().write(batch).expect("Low level database error when writing 'first' block. Some issue with disk?");
|
db.key_value().write(batch).expect("Low level database error when writing 'first' block. Some issue with disk?");
|
||||||
bc.first_block = Some(hash);
|
bc.first_block = Some(hash);
|
||||||
}
|
}
|
||||||
@ -652,10 +652,7 @@ impl BlockChain {
|
|||||||
// and write them
|
// and write them
|
||||||
if let (Some(hash), Some(number)) = (best_ancient, best_ancient_number) {
|
if let (Some(hash), Some(number)) = (best_ancient, best_ancient_number) {
|
||||||
let mut best_ancient_block = bc.best_ancient_block.write();
|
let mut best_ancient_block = bc.best_ancient_block.write();
|
||||||
*best_ancient_block = Some(BestAncientBlock {
|
*best_ancient_block = Some(BestAncientBlock { hash, number });
|
||||||
hash: hash,
|
|
||||||
number: number,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -668,21 +665,6 @@ impl BlockChain {
|
|||||||
self.db.key_value().read_with_cache(db::COL_EXTRA, &self.block_details, parent).map_or(false, |d| d.children.contains(hash))
|
self.db.key_value().read_with_cache(db::COL_EXTRA, &self.block_details, parent).map_or(false, |d| d.children.contains(hash))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// fetches the list of blocks from best block to n, and n's parent hash
|
|
||||||
/// where n > 0
|
|
||||||
pub fn block_headers_from_best_block(&self, n: u32) -> Option<(Vec<encoded::Header>, H256)> {
|
|
||||||
let mut blocks = Vec::with_capacity(n as usize);
|
|
||||||
let mut hash = self.best_block_hash();
|
|
||||||
|
|
||||||
for _ in 0..n {
|
|
||||||
let current_hash = self.block_header_data(&hash)?;
|
|
||||||
hash = current_hash.parent_hash();
|
|
||||||
blocks.push(current_hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((blocks, hash))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a tree route between `from` and `to`, which is a tuple of:
|
/// Returns a tree route between `from` and `to`, which is a tuple of:
|
||||||
///
|
///
|
||||||
/// - a vector of hashes of all blocks, ordered from `from` to `to`.
|
/// - a vector of hashes of all blocks, ordered from `from` to `to`.
|
||||||
@ -728,6 +710,10 @@ impl BlockChain {
|
|||||||
///
|
///
|
||||||
/// If the tree route verges into pruned or unknown blocks,
|
/// If the tree route verges into pruned or unknown blocks,
|
||||||
/// `None` is returned.
|
/// `None` is returned.
|
||||||
|
///
|
||||||
|
/// `is_from_route_finalized` returns whether the `from` part of the
|
||||||
|
/// route contains a finalized block. This only holds if the two parts (from
|
||||||
|
/// and to) are on different branches, ie. on 2 different forks.
|
||||||
pub fn tree_route(&self, from: H256, to: H256) -> Option<TreeRoute> {
|
pub fn tree_route(&self, from: H256, to: H256) -> Option<TreeRoute> {
|
||||||
let mut from_branch = vec![];
|
let mut from_branch = vec![];
|
||||||
let mut is_from_route_finalized = false;
|
let mut is_from_route_finalized = false;
|
||||||
@ -741,9 +727,9 @@ impl BlockChain {
|
|||||||
// reset from && to to the same level
|
// reset from && to to the same level
|
||||||
while from_details.number > to_details.number {
|
while from_details.number > to_details.number {
|
||||||
from_branch.push(current_from);
|
from_branch.push(current_from);
|
||||||
|
is_from_route_finalized = is_from_route_finalized || from_details.is_finalized;
|
||||||
current_from = from_details.parent.clone();
|
current_from = from_details.parent.clone();
|
||||||
from_details = self.block_details(&from_details.parent)?;
|
from_details = self.block_details(&from_details.parent)?;
|
||||||
is_from_route_finalized = is_from_route_finalized || from_details.is_finalized;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
while to_details.number > from_details.number {
|
while to_details.number > from_details.number {
|
||||||
@ -757,9 +743,9 @@ impl BlockChain {
|
|||||||
// move to shared parent
|
// move to shared parent
|
||||||
while current_from != current_to {
|
while current_from != current_to {
|
||||||
from_branch.push(current_from);
|
from_branch.push(current_from);
|
||||||
|
is_from_route_finalized = is_from_route_finalized || from_details.is_finalized;
|
||||||
current_from = from_details.parent.clone();
|
current_from = from_details.parent.clone();
|
||||||
from_details = self.block_details(&from_details.parent)?;
|
from_details = self.block_details(&from_details.parent)?;
|
||||||
is_from_route_finalized = is_from_route_finalized || from_details.is_finalized;
|
|
||||||
|
|
||||||
to_branch.push(current_to);
|
to_branch.push(current_to);
|
||||||
current_to = to_details.parent.clone();
|
current_to = to_details.parent.clone();
|
||||||
@ -773,8 +759,8 @@ impl BlockChain {
|
|||||||
Some(TreeRoute {
|
Some(TreeRoute {
|
||||||
blocks: from_branch,
|
blocks: from_branch,
|
||||||
ancestor: current_from,
|
ancestor: current_from,
|
||||||
index: index,
|
index,
|
||||||
is_from_route_finalized: is_from_route_finalized,
|
is_from_route_finalized,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -803,8 +789,8 @@ impl BlockChain {
|
|||||||
let compressed_body = compress(&Self::block_to_body(block.raw()), blocks_swapper());
|
let compressed_body = compress(&Self::block_to_body(block.raw()), blocks_swapper());
|
||||||
|
|
||||||
// store block in db
|
// store block in db
|
||||||
batch.put(db::COL_HEADERS, &hash, &compressed_header);
|
batch.put(db::COL_HEADERS, hash.as_bytes(), &compressed_header);
|
||||||
batch.put(db::COL_BODIES, &hash, &compressed_body);
|
batch.put(db::COL_BODIES, hash.as_bytes(), &compressed_body);
|
||||||
|
|
||||||
let maybe_parent = self.block_details(&block_parent_hash);
|
let maybe_parent = self.block_details(&block_parent_hash);
|
||||||
|
|
||||||
@ -869,6 +855,14 @@ impl BlockChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// clears all caches for testing purposes
|
||||||
|
pub fn clear_cache(&self) {
|
||||||
|
self.block_bodies.write().clear();
|
||||||
|
self.block_details.write().clear();
|
||||||
|
self.block_hashes.write().clear();
|
||||||
|
self.block_headers.write().clear();
|
||||||
|
}
|
||||||
|
|
||||||
/// Update the best ancient block to the given hash, after checking that
|
/// Update the best ancient block to the given hash, after checking that
|
||||||
/// it's directly linked to the currently known best ancient block
|
/// it's directly linked to the currently known best ancient block
|
||||||
pub fn update_best_ancient_block(&self, hash: &H256) {
|
pub fn update_best_ancient_block(&self, hash: &H256) {
|
||||||
@ -939,7 +933,7 @@ impl BlockChain {
|
|||||||
*pending_best_ancient_block = Some(None);
|
*pending_best_ancient_block = Some(None);
|
||||||
} else if block_number > ancient_number {
|
} else if block_number > ancient_number {
|
||||||
trace!(target: "blockchain", "Updating the best ancient block to {}.", block_number);
|
trace!(target: "blockchain", "Updating the best ancient block to {}.", block_number);
|
||||||
batch.put(db::COL_EXTRA, b"ancient", &block_hash);
|
batch.put(db::COL_EXTRA, b"ancient", block_hash.as_bytes());
|
||||||
*pending_best_ancient_block = Some(Some(BestAncientBlock {
|
*pending_best_ancient_block = Some(Some(BestAncientBlock {
|
||||||
hash: *block_hash,
|
hash: *block_hash,
|
||||||
number: block_number,
|
number: block_number,
|
||||||
@ -970,6 +964,7 @@ impl BlockChain {
|
|||||||
/// Iterate over all epoch transitions.
|
/// Iterate over all epoch transitions.
|
||||||
/// This will only return transitions within the canonical chain.
|
/// This will only return transitions within the canonical chain.
|
||||||
pub fn epoch_transitions(&self) -> EpochTransitionIter {
|
pub fn epoch_transitions(&self) -> EpochTransitionIter {
|
||||||
|
debug!(target: "blockchain", "Iterating over all epoch transitions");
|
||||||
let iter = self.db.key_value().iter_from_prefix(db::COL_EXTRA, &EPOCH_KEY_PREFIX[..]);
|
let iter = self.db.key_value().iter_from_prefix(db::COL_EXTRA, &EPOCH_KEY_PREFIX[..]);
|
||||||
EpochTransitionIter {
|
EpochTransitionIter {
|
||||||
chain: self,
|
chain: self,
|
||||||
@ -995,7 +990,9 @@ impl BlockChain {
|
|||||||
pub fn epoch_transition_for(&self, parent_hash: H256) -> Option<EpochTransition> {
|
pub fn epoch_transition_for(&self, parent_hash: H256) -> Option<EpochTransition> {
|
||||||
// slow path: loop back block by block
|
// slow path: loop back block by block
|
||||||
for hash in self.ancestry_iter(parent_hash)? {
|
for hash in self.ancestry_iter(parent_hash)? {
|
||||||
|
trace!(target: "blockchain", "Got parent hash {} from ancestry_iter", hash);
|
||||||
let details = self.block_details(&hash)?;
|
let details = self.block_details(&hash)?;
|
||||||
|
trace!(target: "blockchain", "Block #{}: Got block details for parent hash {}", details.number, hash);
|
||||||
|
|
||||||
// look for transition in database.
|
// look for transition in database.
|
||||||
if let Some(transition) = self.epoch_transition(details.number, hash) {
|
if let Some(transition) = self.epoch_transition(details.number, hash) {
|
||||||
@ -1007,11 +1004,22 @@ impl BlockChain {
|
|||||||
//
|
//
|
||||||
// if `block_hash` is canonical it will only return transitions up to
|
// if `block_hash` is canonical it will only return transitions up to
|
||||||
// the parent.
|
// the parent.
|
||||||
if self.block_hash(details.number)? == hash {
|
match self.block_hash(details.number) {
|
||||||
|
Some(h) if h == hash => {
|
||||||
return self.epoch_transitions()
|
return self.epoch_transitions()
|
||||||
.map(|(_, t)| t)
|
.map(|(_, t)| t)
|
||||||
.take_while(|t| t.block_number <= details.number)
|
.take_while(|t| t.block_number <= details.number)
|
||||||
.last()
|
.last()
|
||||||
|
},
|
||||||
|
Some(h) => {
|
||||||
|
warn!(target: "blockchain", "Block #{}: Found non-canonical block hash {} (expected {})", details.number, h, hash);
|
||||||
|
|
||||||
|
trace!(target: "blockchain", "Block #{} Mismatched hashes. Ancestor {} != Own {}", details.number, hash, h);
|
||||||
|
trace!(target: "blockchain", " Ancestor {}: #{:#?}", hash, details);
|
||||||
|
trace!(target: "blockchain", " Own {}: #{:#?}", h, self.block_details(&h));
|
||||||
|
|
||||||
|
},
|
||||||
|
None => trace!(target: "blockchain", "Block #{}: hash {} not found in cache or DB", details.number, hash),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1079,8 +1087,8 @@ impl BlockChain {
|
|||||||
let compressed_body = compress(&Self::block_to_body(block.raw()), blocks_swapper());
|
let compressed_body = compress(&Self::block_to_body(block.raw()), blocks_swapper());
|
||||||
|
|
||||||
// store block in db
|
// store block in db
|
||||||
batch.put(db::COL_HEADERS, &hash, &compressed_header);
|
batch.put(db::COL_HEADERS, hash.as_bytes(), &compressed_header);
|
||||||
batch.put(db::COL_BODIES, &hash, &compressed_body);
|
batch.put(db::COL_BODIES, hash.as_bytes(), &compressed_body);
|
||||||
|
|
||||||
let info = self.block_info(&block.header_view(), route, &extras);
|
let info = self.block_info(&block.header_view(), route, &extras);
|
||||||
|
|
||||||
@ -1179,7 +1187,7 @@ impl BlockChain {
|
|||||||
{
|
{
|
||||||
let mut best_block = self.pending_best_block.write();
|
let mut best_block = self.pending_best_block.write();
|
||||||
if is_best && update.info.location != BlockLocation::Branch {
|
if is_best && update.info.location != BlockLocation::Branch {
|
||||||
batch.put(db::COL_EXTRA, b"best", &update.info.hash);
|
batch.put(db::COL_EXTRA, b"best", update.info.hash.as_bytes());
|
||||||
*best_block = Some(BestBlock {
|
*best_block = Some(BestBlock {
|
||||||
total_difficulty: update.info.total_difficulty,
|
total_difficulty: update.info.total_difficulty,
|
||||||
header: update.block.decode_header(),
|
header: update.block.decode_header(),
|
||||||
@ -1266,7 +1274,7 @@ impl BlockChain {
|
|||||||
current: if self.is_known(&first) {
|
current: if self.is_known(&first) {
|
||||||
first
|
first
|
||||||
} else {
|
} else {
|
||||||
H256::default() // zero hash
|
H256::zero() // zero hash
|
||||||
},
|
},
|
||||||
chain: self
|
chain: self
|
||||||
}
|
}
|
||||||
@ -1482,11 +1490,12 @@ impl BlockChain {
|
|||||||
|
|
||||||
/// Get current cache size.
|
/// Get current cache size.
|
||||||
pub fn cache_size(&self) -> CacheSize {
|
pub fn cache_size(&self) -> CacheSize {
|
||||||
|
let mut ops = new_malloc_size_ops();
|
||||||
CacheSize {
|
CacheSize {
|
||||||
blocks: self.block_headers.read().heap_size_of_children() + self.block_bodies.read().heap_size_of_children(),
|
blocks: self.block_headers.size_of(&mut ops) + self.block_bodies.size_of(&mut ops),
|
||||||
block_details: self.block_details.read().heap_size_of_children(),
|
block_details: self.block_details.size_of(&mut ops),
|
||||||
transaction_addresses: self.transaction_addresses.read().heap_size_of_children(),
|
transaction_addresses: self.transaction_addresses.size_of(&mut ops),
|
||||||
block_receipts: self.block_receipts.read().heap_size_of_children(),
|
block_receipts: self.block_receipts.size_of(&mut ops),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1521,12 +1530,13 @@ impl BlockChain {
|
|||||||
transaction_addresses.shrink_to_fit();
|
transaction_addresses.shrink_to_fit();
|
||||||
block_receipts.shrink_to_fit();
|
block_receipts.shrink_to_fit();
|
||||||
|
|
||||||
block_headers.heap_size_of_children() +
|
let mut ops = new_malloc_size_ops();
|
||||||
block_bodies.heap_size_of_children() +
|
block_headers.size_of(&mut ops) +
|
||||||
block_details.heap_size_of_children() +
|
block_bodies.size_of(&mut ops) +
|
||||||
block_hashes.heap_size_of_children() +
|
block_details.size_of(&mut ops) +
|
||||||
transaction_addresses.heap_size_of_children() +
|
block_hashes.size_of(&mut ops) +
|
||||||
block_receipts.heap_size_of_children()
|
transaction_addresses.size_of(&mut ops) +
|
||||||
|
block_receipts.size_of(&mut ops)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1578,17 +1588,18 @@ mod tests {
|
|||||||
use keccak_hash::keccak;
|
use keccak_hash::keccak;
|
||||||
use rustc_hex::FromHex;
|
use rustc_hex::FromHex;
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
struct TestBlockChainDB {
|
struct TestBlockChainDB {
|
||||||
_blooms_dir: TempDir,
|
_blooms_dir: TempDir,
|
||||||
_trace_blooms_dir: TempDir,
|
_trace_blooms_dir: TempDir,
|
||||||
blooms: blooms_db::Database,
|
blooms: blooms_db::Database,
|
||||||
trace_blooms: blooms_db::Database,
|
trace_blooms: blooms_db::Database,
|
||||||
key_value: Arc<KeyValueDB>,
|
key_value: Arc<dyn KeyValueDB>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockChainDB for TestBlockChainDB {
|
impl BlockChainDB for TestBlockChainDB {
|
||||||
fn key_value(&self) -> &Arc<KeyValueDB> {
|
fn key_value(&self) -> &Arc<dyn KeyValueDB> {
|
||||||
&self.key_value
|
&self.key_value
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1602,7 +1613,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates new test instance of `BlockChainDB`
|
/// Creates new test instance of `BlockChainDB`
|
||||||
pub fn new_db() -> Arc<BlockChainDB> {
|
pub fn new_db() -> Arc<dyn BlockChainDB> {
|
||||||
let blooms_dir = TempDir::new("").unwrap();
|
let blooms_dir = TempDir::new("").unwrap();
|
||||||
let trace_blooms_dir = TempDir::new("").unwrap();
|
let trace_blooms_dir = TempDir::new("").unwrap();
|
||||||
|
|
||||||
@ -1617,15 +1628,15 @@ mod tests {
|
|||||||
Arc::new(db)
|
Arc::new(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_chain(genesis: encoded::Block, db: Arc<BlockChainDB>) -> BlockChain {
|
fn new_chain(genesis: encoded::Block, db: Arc<dyn BlockChainDB>) -> BlockChain {
|
||||||
BlockChain::new(Config::default(), genesis.raw(), db)
|
BlockChain::new(Config::default(), genesis.raw(), db)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_block(db: &Arc<BlockChainDB>, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>) -> ImportRoute {
|
fn insert_block(db: &Arc<dyn BlockChainDB>, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>) -> ImportRoute {
|
||||||
insert_block_commit(db, bc, block, receipts, true)
|
insert_block_commit(db, bc, block, receipts, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_block_commit(db: &Arc<BlockChainDB>, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>, commit: bool) -> ImportRoute {
|
fn insert_block_commit(db: &Arc<dyn BlockChainDB>, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>, commit: bool) -> ImportRoute {
|
||||||
let mut batch = db.key_value().transaction();
|
let mut batch = db.key_value().transaction();
|
||||||
let res = insert_block_batch(&mut batch, bc, block, receipts);
|
let res = insert_block_batch(&mut batch, bc, block, receipts);
|
||||||
db.key_value().write(batch).unwrap();
|
db.key_value().write(batch).unwrap();
|
||||||
@ -1636,8 +1647,6 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn insert_block_batch(batch: &mut DBTransaction, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>) -> ImportRoute {
|
fn insert_block_batch(batch: &mut DBTransaction, bc: &BlockChain, block: encoded::Block, receipts: Vec<Receipt>) -> ImportRoute {
|
||||||
use crate::ExtrasInsert;
|
|
||||||
|
|
||||||
let fork_choice = {
|
let fork_choice = {
|
||||||
let header = block.header_view();
|
let header = block.header_view();
|
||||||
let parent_hash = header.parent_hash();
|
let parent_hash = header.parent_hash();
|
||||||
@ -2064,7 +2073,7 @@ mod tests {
|
|||||||
fn find_transaction_by_hash() {
|
fn find_transaction_by_hash() {
|
||||||
let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0af81e09f8c46ca322193edfda764fa7e88e81923f802f1d325ec0b0308ac2cd0a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008083023e38808454c98c8142a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421880102030405060708c0c0".from_hex().unwrap();
|
let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0af81e09f8c46ca322193edfda764fa7e88e81923f802f1d325ec0b0308ac2cd0a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008083023e38808454c98c8142a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421880102030405060708c0c0".from_hex().unwrap();
|
||||||
let b1 = "f904a8f901faa0ce1f26f798dd03c8782d63b3e42e79a64eaea5694ea686ac5d7ce3df5171d1aea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0a65c2364cd0f1542d761823dc0109c6b072f14c20459598c5455c274601438f4a070616ebd7ad2ed6fb7860cf7e9df00163842351c38a87cac2c1cb193895035a2a05c5b4fc43c2d45787f54e1ae7d27afdb4ad16dfc567c5692070d5c4556e0b1d7b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000183023ec683021536845685109780a029f07836e4e59229b3a065913afc27702642c683bba689910b2b2fd45db310d3888957e6d004a31802f902a7f85f800a8255f094aaaf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca0575da4e21b66fa764be5f74da9389e67693d066fb0d1312e19e17e501da00ecda06baf5a5327595f6619dfc2fcb3f2e6fb410b5810af3cb52d0e7508038e91a188f85f010a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba04fa966bf34b93abc1bcd665554b7f316b50f928477b50be0f3285ead29d18c5ba017bba0eeec1625ab433746955e125d46d80b7fdc97386c51266f842d8e02192ef85f020a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca004377418ae981cc32b1312b4a427a1d69a821b28db8584f5f2bd8c6d42458adaa053a1dba1af177fac92f3b6af0a9fa46a22adf56e686c93794b6a012bf254abf5f85f030a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca04fe13febd28a05f4fcb2f451d7ddc2dda56486d9f8c79a62b0ba4da775122615a0651b2382dd402df9ebc27f8cb4b2e0f3cea68dda2dca0ee9603608f0b6f51668f85f040a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba078e6a0ba086a08f8450e208a399bb2f2d2a0d984acd2517c7c7df66ccfab567da013254002cd45a97fac049ae00afbc43ed0d9961d0c56a3b2382c80ce41c198ddf85f050a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba0a7174d8f43ea71c8e3ca9477691add8d80ac8e0ed89d8d8b572041eef81f4a54a0534ea2e28ec4da3b5b944b18c51ec84a5cf35f5b3343c5fb86521fd2d388f506f85f060a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba034bd04065833536a10c77ee2a43a5371bc6d34837088b861dd9d4b7f44074b59a078807715786a13876d3455716a6b9cb2186b7a4887a5c31160fc877454958616c0".from_hex().unwrap();
|
let b1 = "f904a8f901faa0ce1f26f798dd03c8782d63b3e42e79a64eaea5694ea686ac5d7ce3df5171d1aea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0a65c2364cd0f1542d761823dc0109c6b072f14c20459598c5455c274601438f4a070616ebd7ad2ed6fb7860cf7e9df00163842351c38a87cac2c1cb193895035a2a05c5b4fc43c2d45787f54e1ae7d27afdb4ad16dfc567c5692070d5c4556e0b1d7b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000183023ec683021536845685109780a029f07836e4e59229b3a065913afc27702642c683bba689910b2b2fd45db310d3888957e6d004a31802f902a7f85f800a8255f094aaaf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca0575da4e21b66fa764be5f74da9389e67693d066fb0d1312e19e17e501da00ecda06baf5a5327595f6619dfc2fcb3f2e6fb410b5810af3cb52d0e7508038e91a188f85f010a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba04fa966bf34b93abc1bcd665554b7f316b50f928477b50be0f3285ead29d18c5ba017bba0eeec1625ab433746955e125d46d80b7fdc97386c51266f842d8e02192ef85f020a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca004377418ae981cc32b1312b4a427a1d69a821b28db8584f5f2bd8c6d42458adaa053a1dba1af177fac92f3b6af0a9fa46a22adf56e686c93794b6a012bf254abf5f85f030a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca04fe13febd28a05f4fcb2f451d7ddc2dda56486d9f8c79a62b0ba4da775122615a0651b2382dd402df9ebc27f8cb4b2e0f3cea68dda2dca0ee9603608f0b6f51668f85f040a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba078e6a0ba086a08f8450e208a399bb2f2d2a0d984acd2517c7c7df66ccfab567da013254002cd45a97fac049ae00afbc43ed0d9961d0c56a3b2382c80ce41c198ddf85f050a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba0a7174d8f43ea71c8e3ca9477691add8d80ac8e0ed89d8d8b572041eef81f4a54a0534ea2e28ec4da3b5b944b18c51ec84a5cf35f5b3343c5fb86521fd2d388f506f85f060a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba034bd04065833536a10c77ee2a43a5371bc6d34837088b861dd9d4b7f44074b59a078807715786a13876d3455716a6b9cb2186b7a4887a5c31160fc877454958616c0".from_hex().unwrap();
|
||||||
let b1_hash: H256 = "f53f268d23a71e85c7d6d83a9504298712b84c1a2ba220441c86eeda0bf0b6e3".into();
|
let b1_hash = H256::from_str("f53f268d23a71e85c7d6d83a9504298712b84c1a2ba220441c86eeda0bf0b6e3").unwrap();
|
||||||
|
|
||||||
let db = new_db();
|
let db = new_db();
|
||||||
let bc = new_chain(encoded::Block::new(genesis), db.clone());
|
let bc = new_chain(encoded::Block::new(genesis), db.clone());
|
||||||
@ -2137,7 +2146,7 @@ mod tests {
|
|||||||
let db = new_db();
|
let db = new_db();
|
||||||
let bc = new_chain(genesis.last().encoded(), db.clone());
|
let bc = new_chain(genesis.last().encoded(), db.clone());
|
||||||
insert_block(&db, &bc, b1.last().encoded(), vec![Receipt {
|
insert_block(&db, &bc, b1.last().encoded(), vec![Receipt {
|
||||||
outcome: TransactionOutcome::StateRoot(H256::default()),
|
outcome: TransactionOutcome::StateRoot(H256::zero()),
|
||||||
gas_used: 10_000.into(),
|
gas_used: 10_000.into(),
|
||||||
log_bloom: Default::default(),
|
log_bloom: Default::default(),
|
||||||
logs: vec![
|
logs: vec![
|
||||||
@ -2146,7 +2155,7 @@ mod tests {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
Receipt {
|
Receipt {
|
||||||
outcome: TransactionOutcome::StateRoot(H256::default()),
|
outcome: TransactionOutcome::StateRoot(H256::zero()),
|
||||||
gas_used: 10_000.into(),
|
gas_used: 10_000.into(),
|
||||||
log_bloom: Default::default(),
|
log_bloom: Default::default(),
|
||||||
logs: vec![
|
logs: vec![
|
||||||
@ -2155,7 +2164,7 @@ mod tests {
|
|||||||
}]);
|
}]);
|
||||||
insert_block(&db, &bc, b2.last().encoded(), vec![
|
insert_block(&db, &bc, b2.last().encoded(), vec![
|
||||||
Receipt {
|
Receipt {
|
||||||
outcome: TransactionOutcome::StateRoot(H256::default()),
|
outcome: TransactionOutcome::StateRoot(H256::zero()),
|
||||||
gas_used: 10_000.into(),
|
gas_used: 10_000.into(),
|
||||||
log_bloom: Default::default(),
|
log_bloom: Default::default(),
|
||||||
logs: vec![
|
logs: vec![
|
||||||
@ -2165,7 +2174,7 @@ mod tests {
|
|||||||
]);
|
]);
|
||||||
insert_block(&db, &bc, b3.last().encoded(), vec![
|
insert_block(&db, &bc, b3.last().encoded(), vec![
|
||||||
Receipt {
|
Receipt {
|
||||||
outcome: TransactionOutcome::StateRoot(H256::default()),
|
outcome: TransactionOutcome::StateRoot(H256::zero()),
|
||||||
gas_used: 10_000.into(),
|
gas_used: 10_000.into(),
|
||||||
log_bloom: Default::default(),
|
log_bloom: Default::default(),
|
||||||
logs: vec![
|
logs: vec![
|
||||||
@ -2244,11 +2253,11 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_bloom_filter_simple() {
|
fn test_bloom_filter_simple() {
|
||||||
let bloom_b1: Bloom = "00000020000000000000000000000000000000000000000002000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000400000000000000000000002000".into();
|
let bloom_b1 = Bloom::from_str("00000020000000000000000000000000000000000000000002000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000400000000000000000000002000").unwrap();
|
||||||
|
|
||||||
let bloom_b2: Bloom = "00000000000000000000000000000000000000000000020000001000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000".into();
|
let bloom_b2 = Bloom::from_str("00000000000000000000000000000000000000000000020000001000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
|
|
||||||
let bloom_ba: Bloom = "00000000000000000000000000000000000000000000020000000800000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000".into();
|
let bloom_ba = Bloom::from_str("00000000000000000000000000000000000000000000020000000800000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
|
|
||||||
let genesis = BlockBuilder::genesis();
|
let genesis = BlockBuilder::genesis();
|
||||||
let b1 = genesis.add_block_with(|| BlockOptions {
|
let b1 = genesis.add_block_with(|| BlockOptions {
|
||||||
@ -2312,11 +2321,11 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_insert_unordered() {
|
fn test_insert_unordered() {
|
||||||
let bloom_b1: Bloom = "00000020000000000000000000000000000000000000000002000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000400000000000000000000002000".into();
|
let bloom_b1 = Bloom::from_str("00000020000000000000000000000000000000000000000002000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000400000000000000000000002000").unwrap();
|
||||||
|
|
||||||
let bloom_b2: Bloom = "00000000000000000000000000000000000000000000020000001000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000".into();
|
let bloom_b2 = Bloom::from_str("00000000000000000000000000000000000000000000020000001000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
|
|
||||||
let bloom_b3: Bloom = "00000000000000000000000000000000000000000000020000000800000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000".into();
|
let bloom_b3 = Bloom::from_str("00000000000000000000000000000000000000000000020000000800000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000008000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
||||||
|
|
||||||
let genesis = BlockBuilder::genesis();
|
let genesis = BlockBuilder::genesis();
|
||||||
let b1 = genesis.add_block_with_bloom(bloom_b1);
|
let b1 = genesis.add_block_with_bloom(bloom_b1);
|
||||||
@ -2498,4 +2507,74 @@ mod tests {
|
|||||||
assert_eq!(bc.epoch_transition_for(fork_hash).unwrap().block_number, 0);
|
assert_eq!(bc.epoch_transition_for(fork_hash).unwrap().block_number, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tree_rout_with_finalization() {
|
||||||
|
let genesis = BlockBuilder::genesis();
|
||||||
|
let a = genesis.add_block();
|
||||||
|
// First branch
|
||||||
|
let a1 = a.add_block_with_random_transactions();
|
||||||
|
let a2 = a1.add_block_with_random_transactions();
|
||||||
|
let a3 = a2.add_block_with_random_transactions();
|
||||||
|
// Second branch
|
||||||
|
let b1 = a.add_block_with_random_transactions();
|
||||||
|
let b2 = b1.add_block_with_random_transactions();
|
||||||
|
|
||||||
|
let a_hash = a.last().hash();
|
||||||
|
let a1_hash = a1.last().hash();
|
||||||
|
let a2_hash = a2.last().hash();
|
||||||
|
let a3_hash = a3.last().hash();
|
||||||
|
let b2_hash = b2.last().hash();
|
||||||
|
|
||||||
|
let bootstrap_chain = |blocks: Vec<&BlockBuilder>| {
|
||||||
|
let db = new_db();
|
||||||
|
let bc = new_chain(genesis.last().encoded(), db.clone());
|
||||||
|
let mut batch = db.key_value().transaction();
|
||||||
|
for block in blocks {
|
||||||
|
insert_block_batch(&mut batch, &bc, block.last().encoded(), vec![]);
|
||||||
|
bc.commit();
|
||||||
|
}
|
||||||
|
db.key_value().write(batch).unwrap();
|
||||||
|
(db, bc)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mark_finalized = |block_hash: H256, db: &Arc<dyn BlockChainDB>, bc: &BlockChain| {
|
||||||
|
let mut batch = db.key_value().transaction();
|
||||||
|
bc.mark_finalized(&mut batch, block_hash).unwrap();
|
||||||
|
bc.commit();
|
||||||
|
db.key_value().write(batch).unwrap();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Case 1: fork, with finalized common ancestor
|
||||||
|
{
|
||||||
|
let (db, bc) = bootstrap_chain(vec![&a, &a1, &a2, &a3, &b1, &b2]);
|
||||||
|
assert_eq!(bc.best_block_hash(), a3_hash);
|
||||||
|
assert_eq!(bc.block_hash(2).unwrap(), a1_hash);
|
||||||
|
|
||||||
|
mark_finalized(a_hash, &db, &bc);
|
||||||
|
assert!(!bc.tree_route(a3_hash, b2_hash).unwrap().is_from_route_finalized);
|
||||||
|
assert!(!bc.tree_route(b2_hash, a3_hash).unwrap().is_from_route_finalized);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 2: fork with a finalized block on a branch
|
||||||
|
{
|
||||||
|
let (db, bc) = bootstrap_chain(vec![&a, &a1, &a2, &a3, &b1, &b2]);
|
||||||
|
assert_eq!(bc.best_block_hash(), a3_hash);
|
||||||
|
assert_eq!(bc.block_hash(2).unwrap(), a1_hash);
|
||||||
|
|
||||||
|
mark_finalized(a2_hash, &db, &bc);
|
||||||
|
assert!(bc.tree_route(a3_hash, b2_hash).unwrap().is_from_route_finalized);
|
||||||
|
assert!(!bc.tree_route(b2_hash, a3_hash).unwrap().is_from_route_finalized);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Case 3: no-fork, with a finalized block
|
||||||
|
{
|
||||||
|
let (db, bc) = bootstrap_chain(vec![&a, &a1, &a2]);
|
||||||
|
assert_eq!(bc.best_block_hash(), a2_hash);
|
||||||
|
|
||||||
|
mark_finalized(a1_hash, &db, &bc);
|
||||||
|
assert!(!bc.tree_route(a1_hash, a2_hash).unwrap().is_from_route_finalized);
|
||||||
|
assert!(!bc.tree_route(a2_hash, a1_hash).unwrap().is_from_route_finalized);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,11 +21,13 @@ use ethereum_types::{U256, H256, Bloom};
|
|||||||
|
|
||||||
use common_types::encoded;
|
use common_types::encoded;
|
||||||
use common_types::header::Header;
|
use common_types::header::Header;
|
||||||
use common_types::transaction::SignedTransaction;
|
use common_types::transaction::{SignedTransaction, Transaction, Action};
|
||||||
use common_types::view;
|
use common_types::view;
|
||||||
use common_types::views::BlockView;
|
use common_types::views::BlockView;
|
||||||
|
use keccak_hash::keccak;
|
||||||
use rlp::encode;
|
use rlp::encode;
|
||||||
use rlp_derive::RlpEncodable;
|
use rlp_derive::RlpEncodable;
|
||||||
|
use triehash_ethereum::ordered_trie_root;
|
||||||
|
|
||||||
/// Helper structure, used for encoding blocks.
|
/// Helper structure, used for encoding blocks.
|
||||||
#[derive(Default, Clone, RlpEncodable)]
|
#[derive(Default, Clone, RlpEncodable)]
|
||||||
@ -136,6 +138,29 @@ impl BlockBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a block with randomly generated transactions.
|
||||||
|
#[inline]
|
||||||
|
pub fn add_block_with_random_transactions(&self) -> Self {
|
||||||
|
// Maximum of ~50 transactions
|
||||||
|
let count = rand::random::<u8>() as usize / 5;
|
||||||
|
let transactions = std::iter::repeat_with(|| {
|
||||||
|
let data_len = rand::random::<u8>();
|
||||||
|
let data = std::iter::repeat_with(|| rand::random::<u8>())
|
||||||
|
.take(data_len as usize)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
Transaction {
|
||||||
|
nonce: 0.into(),
|
||||||
|
gas_price: 0.into(),
|
||||||
|
gas: 100_000.into(),
|
||||||
|
action: Action::Create,
|
||||||
|
value: 100.into(),
|
||||||
|
data,
|
||||||
|
}.sign(&keccak("").into(), None)
|
||||||
|
}).take(count);
|
||||||
|
|
||||||
|
self.add_block_with_transactions(transactions)
|
||||||
|
}
|
||||||
|
|
||||||
/// Add a block with given transactions.
|
/// Add a block with given transactions.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn add_block_with_transactions<T>(&self, transactions: T) -> Self
|
pub fn add_block_with_transactions<T>(&self, transactions: T) -> Self
|
||||||
@ -166,11 +191,15 @@ impl BlockBuilder {
|
|||||||
let mut block = Block::default();
|
let mut block = Block::default();
|
||||||
let metadata = get_metadata();
|
let metadata = get_metadata();
|
||||||
let block_number = parent_number + 1;
|
let block_number = parent_number + 1;
|
||||||
|
let transactions = metadata.transactions;
|
||||||
|
let transactions_root = ordered_trie_root(transactions.iter().map(rlp::encode));
|
||||||
|
|
||||||
block.header.set_parent_hash(parent_hash);
|
block.header.set_parent_hash(parent_hash);
|
||||||
block.header.set_number(block_number);
|
block.header.set_number(block_number);
|
||||||
block.header.set_log_bloom(metadata.bloom);
|
block.header.set_log_bloom(metadata.bloom);
|
||||||
block.header.set_difficulty(metadata.difficulty);
|
block.header.set_difficulty(metadata.difficulty);
|
||||||
block.transactions = metadata.transactions;
|
block.header.set_transactions_root(transactions_root);
|
||||||
|
block.transactions = transactions;
|
||||||
|
|
||||||
parent_hash = block.hash();
|
parent_hash = block.hash();
|
||||||
parent_number = block_number;
|
parent_number = block_number;
|
||||||
|
@ -68,7 +68,7 @@ impl From<BlockInfo> for ImportRoute {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ethereum_types::{H256, U256};
|
use ethereum_types::{U256, BigEndianHash};
|
||||||
use crate::block_info::{BlockInfo, BlockLocation, BranchBecomingCanonChainData};
|
use crate::block_info::{BlockInfo, BlockLocation, BranchBecomingCanonChainData};
|
||||||
use super::ImportRoute;
|
use super::ImportRoute;
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn import_route_branch() {
|
fn import_route_branch() {
|
||||||
let info = BlockInfo {
|
let info = BlockInfo {
|
||||||
hash: H256::from(U256::from(1)),
|
hash: BigEndianHash::from_uint(&U256::from(1)),
|
||||||
number: 0,
|
number: 0,
|
||||||
total_difficulty: U256::from(0),
|
total_difficulty: U256::from(0),
|
||||||
location: BlockLocation::Branch,
|
location: BlockLocation::Branch,
|
||||||
@ -93,14 +93,14 @@ mod tests {
|
|||||||
assert_eq!(ImportRoute::from(info), ImportRoute {
|
assert_eq!(ImportRoute::from(info), ImportRoute {
|
||||||
retracted: vec![],
|
retracted: vec![],
|
||||||
enacted: vec![],
|
enacted: vec![],
|
||||||
omitted: vec![H256::from(U256::from(1))],
|
omitted: vec![BigEndianHash::from_uint(&U256::from(1))],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn import_route_canon_chain() {
|
fn import_route_canon_chain() {
|
||||||
let info = BlockInfo {
|
let info = BlockInfo {
|
||||||
hash: H256::from(U256::from(1)),
|
hash: BigEndianHash::from_uint(&U256::from(1)),
|
||||||
number: 0,
|
number: 0,
|
||||||
total_difficulty: U256::from(0),
|
total_difficulty: U256::from(0),
|
||||||
location: BlockLocation::CanonChain,
|
location: BlockLocation::CanonChain,
|
||||||
@ -108,7 +108,7 @@ mod tests {
|
|||||||
|
|
||||||
assert_eq!(ImportRoute::from(info), ImportRoute {
|
assert_eq!(ImportRoute::from(info), ImportRoute {
|
||||||
retracted: vec![],
|
retracted: vec![],
|
||||||
enacted: vec![H256::from(U256::from(1))],
|
enacted: vec![BigEndianHash::from_uint(&U256::from(1))],
|
||||||
omitted: vec![],
|
omitted: vec![],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -116,19 +116,19 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn import_route_branch_becoming_canon_chain() {
|
fn import_route_branch_becoming_canon_chain() {
|
||||||
let info = BlockInfo {
|
let info = BlockInfo {
|
||||||
hash: H256::from(U256::from(2)),
|
hash: BigEndianHash::from_uint(&U256::from(2)),
|
||||||
number: 0,
|
number: 0,
|
||||||
total_difficulty: U256::from(0),
|
total_difficulty: U256::from(0),
|
||||||
location: BlockLocation::BranchBecomingCanonChain(BranchBecomingCanonChainData {
|
location: BlockLocation::BranchBecomingCanonChain(BranchBecomingCanonChainData {
|
||||||
ancestor: H256::from(U256::from(0)),
|
ancestor: BigEndianHash::from_uint(&U256::from(0)),
|
||||||
enacted: vec![H256::from(U256::from(1))],
|
enacted: vec![BigEndianHash::from_uint(&U256::from(1))],
|
||||||
retracted: vec![H256::from(U256::from(3)), H256::from(U256::from(4))],
|
retracted: vec![BigEndianHash::from_uint(&U256::from(3)), BigEndianHash::from_uint(&U256::from(4))],
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(ImportRoute::from(info), ImportRoute {
|
assert_eq!(ImportRoute::from(info), ImportRoute {
|
||||||
retracted: vec![H256::from(U256::from(3)), H256::from(U256::from(4))],
|
retracted: vec![BigEndianHash::from_uint(&U256::from(3)), BigEndianHash::from_uint(&U256::from(4))],
|
||||||
enacted: vec![H256::from(U256::from(1)), H256::from(U256::from(2))],
|
enacted: vec![BigEndianHash::from_uint(&U256::from(1)), BigEndianHash::from_uint(&U256::from(2))],
|
||||||
omitted: vec![],
|
omitted: vec![],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,9 @@
|
|||||||
|
|
||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
|
|
||||||
|
extern crate parity_util_mem as util_mem;
|
||||||
|
extern crate parity_util_mem as malloc_size_of;
|
||||||
|
|
||||||
mod best_block;
|
mod best_block;
|
||||||
mod block_info;
|
mod block_info;
|
||||||
mod blockchain;
|
mod blockchain;
|
||||||
|
22
ethcore/builtin/Cargo.toml
Normal file
22
ethcore/builtin/Cargo.toml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
[package]
|
||||||
|
description = "ethereum vm builtin"
|
||||||
|
name = "ethcore-builtin"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
|
edition = "2018"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
bn = { git = "https://github.com/paritytech/bn", default-features = false }
|
||||||
|
ethereum-types = "0.6.0"
|
||||||
|
ethjson = { path = "../../json" }
|
||||||
|
ethkey = { path = "../../accounts/ethkey" }
|
||||||
|
keccak-hash = "0.2.0"
|
||||||
|
log = "0.4"
|
||||||
|
num = { version = "0.1", default-features = false, features = ["bigint"] }
|
||||||
|
parity-bytes = "0.1"
|
||||||
|
eip-152 = { path = "../../util/EIP-152" }
|
||||||
|
parity-crypto = "0.4.0"
|
||||||
|
byteorder = "1.3.2"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
hex-literal = "0.2.1"
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum (EthCore) Contract Calls and Blockchain Service & Registry Information"
|
||||||
name = "ethcore-call-contract"
|
name = "ethcore-call-contract"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
@ -7,5 +8,5 @@ edition = "2018"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
types = { path = "../types", package = "common-types" }
|
types = { path = "../types", package = "common-types" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
bytes = { version = "0.1", package = "parity-bytes" }
|
bytes = { version = "0.1", package = "parity-bytes" }
|
||||||
|
@ -9,9 +9,9 @@ edition = "2018"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
common-types = { path = "../types" }
|
common-types = { path = "../types" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
heapsize = "0.4"
|
|
||||||
kvdb = "0.1"
|
kvdb = "0.1"
|
||||||
|
parity-util-mem = "0.1"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
rlp = { version = "0.3.0", features = ["ethereum"] }
|
rlp = "0.4.0"
|
||||||
rlp_derive = { path = "../../util/rlp-derive" }
|
rlp_derive = { path = "../../util/rlp-derive" }
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
//! Database utilities and definitions.
|
//! Database utilities and definitions.
|
||||||
|
|
||||||
use std::ops::Deref;
|
use std::convert::AsRef;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
@ -82,7 +82,7 @@ impl<K, V> Cache<K, V> for HashMap<K, V> where K: Hash + Eq {
|
|||||||
/// Should be used to get database key associated with given value.
|
/// Should be used to get database key associated with given value.
|
||||||
pub trait Key<T> {
|
pub trait Key<T> {
|
||||||
/// The db key associated with this value.
|
/// The db key associated with this value.
|
||||||
type Target: Deref<Target = [u8]>;
|
type Target: AsRef<[u8]>;
|
||||||
|
|
||||||
/// Returns db key.
|
/// Returns db key.
|
||||||
fn key(&self) -> Self::Target;
|
fn key(&self) -> Self::Target;
|
||||||
@ -91,16 +91,16 @@ pub trait Key<T> {
|
|||||||
/// Should be used to write value into database.
|
/// Should be used to write value into database.
|
||||||
pub trait Writable {
|
pub trait Writable {
|
||||||
/// Writes the value into the database.
|
/// Writes the value into the database.
|
||||||
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]>;
|
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: AsRef<[u8]>;
|
||||||
|
|
||||||
/// Deletes key from the databse.
|
/// Deletes key from the databse.
|
||||||
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: Deref<Target = [u8]>;
|
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: AsRef<[u8]>;
|
||||||
|
|
||||||
/// Writes the value into the database and updates the cache.
|
/// Writes the value into the database and updates the cache.
|
||||||
fn write_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where
|
fn write_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where
|
||||||
K: Key<T, Target = R> + Hash + Eq,
|
K: Key<T, Target = R> + Hash + Eq,
|
||||||
T: rlp::Encodable,
|
T: rlp::Encodable,
|
||||||
R: Deref<Target = [u8]> {
|
R: AsRef<[u8]> {
|
||||||
self.write(col, &key, &value);
|
self.write(col, &key, &value);
|
||||||
match policy {
|
match policy {
|
||||||
CacheUpdatePolicy::Overwrite => {
|
CacheUpdatePolicy::Overwrite => {
|
||||||
@ -116,7 +116,7 @@ pub trait Writable {
|
|||||||
fn extend_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, values: HashMap<K, T>, policy: CacheUpdatePolicy) where
|
fn extend_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, values: HashMap<K, T>, policy: CacheUpdatePolicy) where
|
||||||
K: Key<T, Target = R> + Hash + Eq,
|
K: Key<T, Target = R> + Hash + Eq,
|
||||||
T: rlp::Encodable,
|
T: rlp::Encodable,
|
||||||
R: Deref<Target = [u8]> {
|
R: AsRef<[u8]> {
|
||||||
match policy {
|
match policy {
|
||||||
CacheUpdatePolicy::Overwrite => {
|
CacheUpdatePolicy::Overwrite => {
|
||||||
for (key, value) in values {
|
for (key, value) in values {
|
||||||
@ -137,7 +137,7 @@ pub trait Writable {
|
|||||||
fn extend_with_option_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, Option<T>>, values: HashMap<K, Option<T>>, policy: CacheUpdatePolicy) where
|
fn extend_with_option_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, Option<T>>, values: HashMap<K, Option<T>>, policy: CacheUpdatePolicy) where
|
||||||
K: Key<T, Target = R> + Hash + Eq,
|
K: Key<T, Target = R> + Hash + Eq,
|
||||||
T: rlp::Encodable,
|
T: rlp::Encodable,
|
||||||
R: Deref<Target = [u8]> {
|
R: AsRef<[u8]> {
|
||||||
match policy {
|
match policy {
|
||||||
CacheUpdatePolicy::Overwrite => {
|
CacheUpdatePolicy::Overwrite => {
|
||||||
for (key, value) in values {
|
for (key, value) in values {
|
||||||
@ -167,7 +167,7 @@ pub trait Readable {
|
|||||||
/// Returns value for given key.
|
/// Returns value for given key.
|
||||||
fn read<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> Option<T> where
|
fn read<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> Option<T> where
|
||||||
T: rlp::Decodable,
|
T: rlp::Decodable,
|
||||||
R: Deref<Target = [u8]>;
|
R: AsRef<[u8]>;
|
||||||
|
|
||||||
/// Returns value for given key either in cache or in database.
|
/// Returns value for given key either in cache or in database.
|
||||||
fn read_with_cache<K, T, C>(&self, col: Option<u32>, cache: &RwLock<C>, key: &K) -> Option<T> where
|
fn read_with_cache<K, T, C>(&self, col: Option<u32>, cache: &RwLock<C>, key: &K) -> Option<T> where
|
||||||
@ -189,12 +189,12 @@ pub trait Readable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if given value exists.
|
/// Returns true if given value exists.
|
||||||
fn exists<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> bool where R: Deref<Target= [u8]>;
|
fn exists<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> bool where R: AsRef<[u8]>;
|
||||||
|
|
||||||
/// Returns true if given value exists either in cache or in database.
|
/// Returns true if given value exists either in cache or in database.
|
||||||
fn exists_with_cache<K, T, R, C>(&self, col: Option<u32>, cache: &RwLock<C>, key: &K) -> bool where
|
fn exists_with_cache<K, T, R, C>(&self, col: Option<u32>, cache: &RwLock<C>, key: &K) -> bool where
|
||||||
K: Eq + Hash + Key<T, Target = R>,
|
K: Eq + Hash + Key<T, Target = R>,
|
||||||
R: Deref<Target = [u8]>,
|
R: AsRef<[u8]>,
|
||||||
C: Cache<K, T> {
|
C: Cache<K, T> {
|
||||||
{
|
{
|
||||||
let read = cache.read();
|
let read = cache.read();
|
||||||
@ -208,31 +208,31 @@ pub trait Readable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Writable for DBTransaction {
|
impl Writable for DBTransaction {
|
||||||
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]> {
|
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: AsRef<[u8]> {
|
||||||
self.put(col, &key.key(), &rlp::encode(value));
|
self.put(col, key.key().as_ref(), &rlp::encode(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: Deref<Target = [u8]> {
|
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: AsRef<[u8]> {
|
||||||
self.delete(col, &key.key());
|
self.delete(col, key.key().as_ref());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<KVDB: KeyValueDB + ?Sized> Readable for KVDB {
|
impl<KVDB: KeyValueDB + ?Sized> Readable for KVDB {
|
||||||
fn read<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> Option<T>
|
fn read<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> Option<T>
|
||||||
where T: rlp::Decodable, R: Deref<Target = [u8]> {
|
where T: rlp::Decodable, R: AsRef<[u8]> {
|
||||||
self.get(col, &key.key())
|
self.get(col, key.key().as_ref())
|
||||||
.expect(&format!("db get failed, key: {:?}", &key.key() as &[u8]))
|
.expect(&format!("db get failed, key: {:?}", key.key().as_ref()))
|
||||||
.map(|v| rlp::decode(&v).expect("decode db value failed") )
|
.map(|v| rlp::decode(&v).expect("decode db value failed") )
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exists<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> bool where R: Deref<Target = [u8]> {
|
fn exists<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>) -> bool where R: AsRef<[u8]> {
|
||||||
let result = self.get(col, &key.key());
|
let result = self.get(col, key.key().as_ref());
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(v) => v.is_some(),
|
Ok(v) => v.is_some(),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
panic!("db get failed, key: {:?}, err: {:?}", &key.key() as &[u8], err);
|
panic!("db get failed, key: {:?}, err: {:?}", key.key().as_ref(), err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,13 +17,13 @@
|
|||||||
//! Blockchain DB extras.
|
//! Blockchain DB extras.
|
||||||
|
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::ops;
|
use std::convert::AsRef;
|
||||||
|
|
||||||
use common_types::BlockNumber;
|
use common_types::BlockNumber;
|
||||||
use common_types::engines::epoch::Transition as EpochTransition;
|
use common_types::engines::epoch::Transition as EpochTransition;
|
||||||
use common_types::receipt::Receipt;
|
use common_types::receipt::Receipt;
|
||||||
use ethereum_types::{H256, H264, U256};
|
use ethereum_types::{H256, H264, U256};
|
||||||
use heapsize::HeapSizeOf;
|
use parity_util_mem::MallocSizeOf;
|
||||||
use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
|
use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
|
||||||
use rlp;
|
use rlp;
|
||||||
use rlp_derive::{RlpEncodableWrapper, RlpDecodableWrapper, RlpEncodable, RlpDecodable};
|
use rlp_derive::{RlpEncodableWrapper, RlpDecodableWrapper, RlpEncodable, RlpDecodable};
|
||||||
@ -49,19 +49,17 @@ pub enum ExtrasIndex {
|
|||||||
|
|
||||||
fn with_index(hash: &H256, i: ExtrasIndex) -> H264 {
|
fn with_index(hash: &H256, i: ExtrasIndex) -> H264 {
|
||||||
let mut result = H264::default();
|
let mut result = H264::default();
|
||||||
result[0] = i as u8;
|
result.as_bytes_mut()[0] = i as u8;
|
||||||
(*result)[1..].clone_from_slice(hash);
|
result.as_bytes_mut()[1..].clone_from_slice(hash.as_bytes());
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrapper for block number used as a DB key.
|
/// Wrapper for block number used as a DB key.
|
||||||
pub struct BlockNumberKey([u8; 5]);
|
pub struct BlockNumberKey([u8; 5]);
|
||||||
|
|
||||||
impl ops::Deref for BlockNumberKey {
|
impl AsRef<[u8]> for BlockNumberKey {
|
||||||
type Target = [u8];
|
fn as_ref(&self) -> &[u8] {
|
||||||
|
&self.0[..]
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,10 +121,8 @@ pub const EPOCH_KEY_PREFIX: &'static [u8; DB_PREFIX_LEN] = &[
|
|||||||
/// Epoch transitions key
|
/// Epoch transitions key
|
||||||
pub struct EpochTransitionsKey([u8; EPOCH_KEY_LEN]);
|
pub struct EpochTransitionsKey([u8; EPOCH_KEY_LEN]);
|
||||||
|
|
||||||
impl ops::Deref for EpochTransitionsKey {
|
impl AsRef<[u8]> for EpochTransitionsKey {
|
||||||
type Target = [u8];
|
fn as_ref(&self) -> &[u8] { &self.0[..] }
|
||||||
|
|
||||||
fn deref(&self) -> &[u8] { &self.0[..] }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Key<EpochTransitions> for u64 {
|
impl Key<EpochTransitions> for u64 {
|
||||||
@ -144,7 +140,7 @@ impl Key<EpochTransitions> for u64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Familial details concerning a block
|
/// Familial details concerning a block
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, MallocSizeOf)]
|
||||||
pub struct BlockDetails {
|
pub struct BlockDetails {
|
||||||
/// Block number
|
/// Block number
|
||||||
pub number: BlockNumber,
|
pub number: BlockNumber,
|
||||||
@ -199,14 +195,8 @@ impl rlp::Decodable for BlockDetails {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HeapSizeOf for BlockDetails {
|
|
||||||
fn heap_size_of_children(&self) -> usize {
|
|
||||||
self.children.heap_size_of_children()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents address of certain transaction within block
|
/// Represents address of certain transaction within block
|
||||||
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable)]
|
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable, MallocSizeOf)]
|
||||||
pub struct TransactionAddress {
|
pub struct TransactionAddress {
|
||||||
/// Block hash
|
/// Block hash
|
||||||
pub block_hash: H256,
|
pub block_hash: H256,
|
||||||
@ -214,12 +204,8 @@ pub struct TransactionAddress {
|
|||||||
pub index: usize
|
pub index: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HeapSizeOf for TransactionAddress {
|
|
||||||
fn heap_size_of_children(&self) -> usize { 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Contains all block receipts.
|
/// Contains all block receipts.
|
||||||
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
|
#[derive(Debug, Clone, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
|
||||||
pub struct BlockReceipts {
|
pub struct BlockReceipts {
|
||||||
/// Block receipts
|
/// Block receipts
|
||||||
pub receipts: Vec<Receipt>,
|
pub receipts: Vec<Receipt>,
|
||||||
@ -228,15 +214,7 @@ pub struct BlockReceipts {
|
|||||||
impl BlockReceipts {
|
impl BlockReceipts {
|
||||||
/// Create new block receipts wrapper.
|
/// Create new block receipts wrapper.
|
||||||
pub fn new(receipts: Vec<Receipt>) -> Self {
|
pub fn new(receipts: Vec<Receipt>) -> Self {
|
||||||
BlockReceipts {
|
BlockReceipts { receipts }
|
||||||
receipts: receipts
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HeapSizeOf for BlockReceipts {
|
|
||||||
fn heap_size_of_children(&self) -> usize {
|
|
||||||
self.receipts.heap_size_of_children()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,9 @@
|
|||||||
|
|
||||||
#![warn(missing_docs)]
|
#![warn(missing_docs)]
|
||||||
|
|
||||||
|
extern crate parity_util_mem as mem;
|
||||||
|
extern crate parity_util_mem as malloc_size_of;
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
|
|
||||||
pub mod keys;
|
pub mod keys;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
|
description = "Parity Ethereum Virtual Machine (EVM) Rust Implementation"
|
||||||
name = "evm"
|
name = "evm"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Parity Technologies <admin@parity.io>"]
|
authors = ["Parity Technologies <admin@parity.io>"]
|
||||||
@ -6,18 +7,19 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
bit-set = "0.4"
|
bit-set = "0.4"
|
||||||
parity-bytes = "0.1"
|
parity-bytes = "0.1"
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
heapsize = "0.4"
|
parity-util-mem = "0.1"
|
||||||
lazy_static = "1.0"
|
lazy_static = "1.0"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
vm = { path = "../vm" }
|
vm = { path = "../vm" }
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
memory-cache = { path = "../../util/memory-cache" }
|
memory-cache = { path = "../../util/memory-cache" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rustc-hex = "1.0"
|
rustc-hex = "1.0"
|
||||||
criterion = "0.2"
|
criterion = "0.2"
|
||||||
|
hex-literal = "0.2.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
evm-debug = []
|
evm-debug = []
|
||||||
|
@ -21,7 +21,7 @@ extern crate criterion;
|
|||||||
extern crate bit_set;
|
extern crate bit_set;
|
||||||
extern crate ethereum_types;
|
extern crate ethereum_types;
|
||||||
extern crate parking_lot;
|
extern crate parking_lot;
|
||||||
extern crate heapsize;
|
extern crate parity_util_mem as mem;
|
||||||
extern crate vm;
|
extern crate vm;
|
||||||
extern crate evm;
|
extern crate evm;
|
||||||
extern crate keccak_hash as hash;
|
extern crate keccak_hash as hash;
|
||||||
@ -45,7 +45,9 @@ criterion_group!(
|
|||||||
mem_gas_calculation_same_usize,
|
mem_gas_calculation_same_usize,
|
||||||
mem_gas_calculation_same_u256,
|
mem_gas_calculation_same_u256,
|
||||||
mem_gas_calculation_increasing_usize,
|
mem_gas_calculation_increasing_usize,
|
||||||
mem_gas_calculation_increasing_u256
|
mem_gas_calculation_increasing_u256,
|
||||||
|
blockhash_mulmod_small,
|
||||||
|
blockhash_mulmod_large,
|
||||||
);
|
);
|
||||||
criterion_main!(basic);
|
criterion_main!(basic);
|
||||||
|
|
||||||
@ -150,6 +152,54 @@ fn mem_gas_calculation_increasing(gas: U256, b: &mut Bencher) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn blockhash_mulmod_small(b: &mut Criterion) {
|
||||||
|
b.bench_function("blockhash_mulmod_small", |b| {
|
||||||
|
let factory = Factory::default();
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
let code = black_box(
|
||||||
|
"6080604052348015600f57600080fd5b5060005a90505b60c881111560de5760017effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80095060017effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80095060017effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80095060017effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80095060017effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8009505a90506016565b506035806100ed6000396000f3fe6080604052600080fdfea165627a7a72305820bde4a0ac6d0fac28fc879244baf8a6a0eda514bc95fb7ecbcaaebf2556e2687c0029".from_hex().unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.address = address.clone();
|
||||||
|
params.gas = U256::from(4_000u64);
|
||||||
|
params.code = Some(Arc::new(code.clone()));
|
||||||
|
|
||||||
|
let vm = factory.create(params, ext.schedule(), 0);
|
||||||
|
|
||||||
|
result(vm.exec(&mut ext).ok().unwrap())
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn blockhash_mulmod_large(b: &mut Criterion) {
|
||||||
|
b.bench_function("blockhash_mulmod_large", |b| {
|
||||||
|
let factory = Factory::default();
|
||||||
|
let mut ext = FakeExt::new();
|
||||||
|
|
||||||
|
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
let code = black_box(
|
||||||
|
"608060405234801561001057600080fd5b5060005a90505b60c8811115610177577efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff17efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff08009507efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff17efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff08009507efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff17efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff08009507efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff17efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff08009507efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff17efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff08009505a9050610017565b506035806101866000396000f3fe6080604052600080fdfea165627a7a72305820dcaec306f67bb96f3044fff25c9af2ec66f01d0954d0656964f046f42f2780670029".from_hex().unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.address = address.clone();
|
||||||
|
params.gas = U256::from(4_000u64);
|
||||||
|
params.code = Some(Arc::new(code.clone()));
|
||||||
|
|
||||||
|
let vm = factory.create(params, ext.schedule(), 0);
|
||||||
|
|
||||||
|
result(vm.exec(&mut ext).ok().unwrap())
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn result(r: Result<evm::GasLeft>) -> U256 {
|
fn result(r: Result<evm::GasLeft>) -> U256 {
|
||||||
match r {
|
match r {
|
||||||
Ok(GasLeft::Known(gas_left)) => gas_left,
|
Ok(GasLeft::Known(gas_left)) => gas_left,
|
||||||
|
@ -44,12 +44,18 @@ pub trait Finalize {
|
|||||||
impl Finalize for Result<GasLeft> {
|
impl Finalize for Result<GasLeft> {
|
||||||
fn finalize<E: Ext>(self, ext: E) -> Result<FinalizationResult> {
|
fn finalize<E: Ext>(self, ext: E) -> Result<FinalizationResult> {
|
||||||
match self {
|
match self {
|
||||||
Ok(GasLeft::Known(gas_left)) => Ok(FinalizationResult { gas_left: gas_left, apply_state: true, return_data: ReturnData::empty() }),
|
Ok(GasLeft::Known(gas_left)) => {
|
||||||
Ok(GasLeft::NeedsReturn { gas_left, data, apply_state }) => ext.ret(&gas_left, &data, apply_state).map(|gas_left| FinalizationResult {
|
Ok(FinalizationResult {
|
||||||
gas_left: gas_left,
|
gas_left,
|
||||||
apply_state: apply_state,
|
apply_state: true,
|
||||||
return_data: data,
|
return_data: ReturnData::empty()
|
||||||
}),
|
})
|
||||||
|
},
|
||||||
|
Ok(GasLeft::NeedsReturn { gas_left, data, apply_state }) => {
|
||||||
|
ext.ret(&gas_left, &data, apply_state).map(|gas_left|
|
||||||
|
FinalizationResult { gas_left, apply_state, return_data: data }
|
||||||
|
)
|
||||||
|
},
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ impl Factory {
|
|||||||
/// for caching jump destinations.
|
/// for caching jump destinations.
|
||||||
pub fn new(evm: VMType, cache_size: usize) -> Self {
|
pub fn new(evm: VMType, cache_size: usize) -> Self {
|
||||||
Factory {
|
Factory {
|
||||||
evm: evm,
|
evm,
|
||||||
evm_cache: Arc::new(SharedCache::new(cache_size)),
|
evm_cache: Arc::new(SharedCache::new(cache_size)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -149,6 +149,10 @@ enum_with_from_u8! {
|
|||||||
DIFFICULTY = 0x44,
|
DIFFICULTY = 0x44,
|
||||||
#[doc = "get the block's gas limit"]
|
#[doc = "get the block's gas limit"]
|
||||||
GASLIMIT = 0x45,
|
GASLIMIT = 0x45,
|
||||||
|
#[doc = "get chain ID"]
|
||||||
|
CHAINID = 0x46,
|
||||||
|
#[doc = "get balance of own account"]
|
||||||
|
SELFBALANCE = 0x47,
|
||||||
|
|
||||||
#[doc = "remove item from stack"]
|
#[doc = "remove item from stack"]
|
||||||
POP = 0x50,
|
POP = 0x50,
|
||||||
@ -442,12 +446,7 @@ pub struct InstructionInfo {
|
|||||||
impl InstructionInfo {
|
impl InstructionInfo {
|
||||||
/// Create new instruction info.
|
/// Create new instruction info.
|
||||||
pub fn new(name: &'static str, args: usize, ret: usize, tier: GasPriceTier) -> Self {
|
pub fn new(name: &'static str, args: usize, ret: usize, tier: GasPriceTier) -> Self {
|
||||||
InstructionInfo {
|
InstructionInfo { name, args, ret, tier }
|
||||||
name: name,
|
|
||||||
args: args,
|
|
||||||
ret: ret,
|
|
||||||
tier: tier
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -504,6 +503,8 @@ lazy_static! {
|
|||||||
arr[NUMBER as usize] = Some(InstructionInfo::new("NUMBER", 0, 1, GasPriceTier::Base));
|
arr[NUMBER as usize] = Some(InstructionInfo::new("NUMBER", 0, 1, GasPriceTier::Base));
|
||||||
arr[DIFFICULTY as usize] = Some(InstructionInfo::new("DIFFICULTY", 0, 1, GasPriceTier::Base));
|
arr[DIFFICULTY as usize] = Some(InstructionInfo::new("DIFFICULTY", 0, 1, GasPriceTier::Base));
|
||||||
arr[GASLIMIT as usize] = Some(InstructionInfo::new("GASLIMIT", 0, 1, GasPriceTier::Base));
|
arr[GASLIMIT as usize] = Some(InstructionInfo::new("GASLIMIT", 0, 1, GasPriceTier::Base));
|
||||||
|
arr[CHAINID as usize] = Some(InstructionInfo::new("CHAINID", 0, 1, GasPriceTier::Base));
|
||||||
|
arr[SELFBALANCE as usize] = Some(InstructionInfo::new("SELFBALANCE", 0, 1, GasPriceTier::Low));
|
||||||
arr[POP as usize] = Some(InstructionInfo::new("POP", 1, 0, GasPriceTier::Base));
|
arr[POP as usize] = Some(InstructionInfo::new("POP", 1, 0, GasPriceTier::Base));
|
||||||
arr[MLOAD as usize] = Some(InstructionInfo::new("MLOAD", 1, 1, GasPriceTier::VeryLow));
|
arr[MLOAD as usize] = Some(InstructionInfo::new("MLOAD", 1, 1, GasPriceTier::VeryLow));
|
||||||
arr[MSTORE as usize] = Some(InstructionInfo::new("MSTORE", 2, 0, GasPriceTier::VeryLow));
|
arr[MSTORE as usize] = Some(InstructionInfo::new("MSTORE", 2, 0, GasPriceTier::VeryLow));
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use ethereum_types::{U256, H256};
|
use ethereum_types::{BigEndianHash, U256};
|
||||||
use super::u256_to_address;
|
use super::u256_to_address;
|
||||||
|
|
||||||
use {evm, vm};
|
use {evm, vm};
|
||||||
@ -121,12 +121,16 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
|||||||
Request::Gas(Gas::from(1))
|
Request::Gas(Gas::from(1))
|
||||||
},
|
},
|
||||||
instructions::SSTORE => {
|
instructions::SSTORE => {
|
||||||
let address = H256::from(stack.peek(0));
|
if schedule.eip1706 && self.current_gas <= Gas::from(schedule.call_stipend) {
|
||||||
|
return Err(vm::Error::OutOfGas);
|
||||||
|
}
|
||||||
|
|
||||||
|
let address = BigEndianHash::from_uint(stack.peek(0));
|
||||||
let newval = stack.peek(1);
|
let newval = stack.peek(1);
|
||||||
let val = U256::from(&*ext.storage_at(&address)?);
|
let val = ext.storage_at(&address)?.into_uint();
|
||||||
|
|
||||||
let gas = if schedule.eip1283 {
|
let gas = if schedule.eip1283 {
|
||||||
let orig = U256::from(&*ext.initial_storage_at(&address)?);
|
let orig = ext.initial_storage_at(&address)?.into_uint();
|
||||||
calculate_eip1283_sstore_gas(schedule, &orig, &val, &newval)
|
calculate_eip1283_sstore_gas(schedule, &orig, &val, &newval)
|
||||||
} else {
|
} else {
|
||||||
if val.is_zero() && !newval.is_zero() {
|
if val.is_zero() && !newval.is_zero() {
|
||||||
|
@ -26,9 +26,11 @@ mod shared_cache;
|
|||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::{cmp, mem};
|
use std::{cmp, mem};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::convert::TryFrom;
|
||||||
use hash::keccak;
|
use hash::keccak;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use ethereum_types::{U256, U512, H256, Address};
|
use ethereum_types::{U256, U512, H256, Address, BigEndianHash};
|
||||||
|
|
||||||
|
|
||||||
use vm::{
|
use vm::{
|
||||||
self, ActionParams, ParamsType, ActionValue, CallType, MessageCallResult,
|
self, ActionParams, ParamsType, ActionValue, CallType, MessageCallResult,
|
||||||
@ -107,8 +109,6 @@ enum InstructionResult<Gas> {
|
|||||||
Trap(TrapKind),
|
Trap(TrapKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Never {}
|
|
||||||
|
|
||||||
/// ActionParams without code, so that it can be feed into CodeReader.
|
/// ActionParams without code, so that it can be feed into CodeReader.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct InterpreterParams {
|
struct InterpreterParams {
|
||||||
@ -166,12 +166,6 @@ pub enum InterpreterResult {
|
|||||||
Trap(TrapKind),
|
Trap(TrapKind),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<vm::Error> for InterpreterResult {
|
|
||||||
fn from(error: vm::Error) -> InterpreterResult {
|
|
||||||
InterpreterResult::Done(Err(error))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Intepreter EVM implementation
|
/// Intepreter EVM implementation
|
||||||
pub struct Interpreter<Cost: CostType> {
|
pub struct Interpreter<Cost: CostType> {
|
||||||
mem: Vec<u8>,
|
mem: Vec<u8>,
|
||||||
@ -282,6 +276,8 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
cache, params, reader, informant,
|
cache, params, reader, informant,
|
||||||
valid_jump_destinations, gasometer, stack,
|
valid_jump_destinations, gasometer, stack,
|
||||||
done: false,
|
done: false,
|
||||||
|
// Overridden in `step_inner` based on
|
||||||
|
// the result of `ext.trace_next_instruction`.
|
||||||
do_trace: true,
|
do_trace: true,
|
||||||
mem: Vec::new(),
|
mem: Vec::new(),
|
||||||
return_data: ReturnData::empty(),
|
return_data: ReturnData::empty(),
|
||||||
@ -302,21 +298,26 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let result = if self.gasometer.is_none() {
|
let result = if self.gasometer.is_none() {
|
||||||
InterpreterResult::Done(Err(vm::Error::OutOfGas))
|
InterpreterResult::Done(Err(vm::Error::OutOfGas))
|
||||||
} else if self.reader.len() == 0 {
|
} else if self.reader.len() == 0 {
|
||||||
InterpreterResult::Done(Ok(GasLeft::Known(self.gasometer.as_ref().expect("Gasometer None case is checked above; qed").current_gas.as_u256())))
|
let current_gas = self.gasometer
|
||||||
|
.as_ref()
|
||||||
|
.expect("Gasometer None case is checked above; qed")
|
||||||
|
.current_gas
|
||||||
|
.as_u256();
|
||||||
|
InterpreterResult::Done(Ok(GasLeft::Known(current_gas)))
|
||||||
} else {
|
} else {
|
||||||
self.step_inner(ext).err().expect("step_inner never returns Ok(()); qed")
|
self.step_inner(ext)
|
||||||
};
|
};
|
||||||
|
|
||||||
if let &InterpreterResult::Done(_) = &result {
|
if let &InterpreterResult::Done(_) = &result {
|
||||||
self.done = true;
|
self.done = true;
|
||||||
self.informant.done();
|
self.informant.done();
|
||||||
}
|
}
|
||||||
return result;
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Inner helper function for step.
|
/// Inner helper function for step.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn step_inner(&mut self, ext: &mut vm::Ext) -> Result<Never, InterpreterResult> {
|
fn step_inner(&mut self, ext: &mut dyn vm::Ext) -> InterpreterResult {
|
||||||
let result = match self.resume_result.take() {
|
let result = match self.resume_result.take() {
|
||||||
Some(result) => result,
|
Some(result) => result,
|
||||||
None => {
|
None => {
|
||||||
@ -331,22 +332,31 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
|
|
||||||
let instruction = match instruction {
|
let instruction = match instruction {
|
||||||
Some(i) => i,
|
Some(i) => i,
|
||||||
None => return Err(InterpreterResult::Done(Err(vm::Error::BadInstruction {
|
None => return InterpreterResult::Done(Err(vm::Error::BadInstruction {
|
||||||
instruction: opcode
|
instruction: opcode
|
||||||
}))),
|
})),
|
||||||
};
|
};
|
||||||
|
|
||||||
let info = instruction.info();
|
let info = instruction.info();
|
||||||
self.last_stack_ret_len = info.ret;
|
self.last_stack_ret_len = info.ret;
|
||||||
self.verify_instruction(ext, instruction, info)?;
|
if let Err(e) = self.verify_instruction(ext, instruction, info) {
|
||||||
|
return InterpreterResult::Done(Err(e));
|
||||||
|
};
|
||||||
|
|
||||||
// Calculate gas cost
|
// Calculate gas cost
|
||||||
let requirements = self.gasometer.as_mut().expect(GASOMETER_PROOF).requirements(ext, instruction, info, &self.stack, self.mem.size())?;
|
let requirements = match self.gasometer.as_mut().expect(GASOMETER_PROOF).requirements(ext, instruction, info, &self.stack, self.mem.size()) {
|
||||||
|
Ok(t) => t,
|
||||||
|
Err(e) => return InterpreterResult::Done(Err(e)),
|
||||||
|
};
|
||||||
if self.do_trace {
|
if self.do_trace {
|
||||||
ext.trace_prepare_execute(self.reader.position - 1, opcode, requirements.gas_cost.as_u256(), Self::mem_written(instruction, &self.stack), Self::store_written(instruction, &self.stack));
|
ext.trace_prepare_execute(self.reader.position - 1, opcode, requirements.gas_cost.as_u256(), Self::mem_written(instruction, &self.stack), Self::store_written(instruction, &self.stack));
|
||||||
}
|
}
|
||||||
|
if let Err(e) = self.gasometer.as_mut().expect(GASOMETER_PROOF).verify_gas(&requirements.gas_cost) {
|
||||||
self.gasometer.as_mut().expect(GASOMETER_PROOF).verify_gas(&requirements.gas_cost)?;
|
if self.do_trace {
|
||||||
|
ext.trace_failed();
|
||||||
|
}
|
||||||
|
return InterpreterResult::Done(Err(e));
|
||||||
|
}
|
||||||
self.mem.expand(requirements.memory_required_size);
|
self.mem.expand(requirements.memory_required_size);
|
||||||
self.gasometer.as_mut().expect(GASOMETER_PROOF).current_mem_gas = requirements.memory_total_gas;
|
self.gasometer.as_mut().expect(GASOMETER_PROOF).current_mem_gas = requirements.memory_total_gas;
|
||||||
self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas = self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas - requirements.gas_cost;
|
self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas = self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas - requirements.gas_cost;
|
||||||
@ -355,18 +365,24 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
|
|
||||||
// Execute instruction
|
// Execute instruction
|
||||||
let current_gas = self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas;
|
let current_gas = self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas;
|
||||||
let result = self.exec_instruction(
|
let result = match self.exec_instruction(
|
||||||
current_gas, ext, instruction, requirements.provide_gas
|
current_gas, ext, instruction, requirements.provide_gas
|
||||||
)?;
|
) {
|
||||||
|
Err(x) => {
|
||||||
|
if self.do_trace {
|
||||||
|
ext.trace_failed();
|
||||||
|
}
|
||||||
|
return InterpreterResult::Done(Err(x));
|
||||||
|
},
|
||||||
|
Ok(x) => x,
|
||||||
|
};
|
||||||
evm_debug!({ self.informant.after_instruction(instruction) });
|
evm_debug!({ self.informant.after_instruction(instruction) });
|
||||||
|
|
||||||
result
|
result
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if let InstructionResult::Trap(trap) = result {
|
if let InstructionResult::Trap(trap) = result {
|
||||||
return Err(InterpreterResult::Trap(trap));
|
return InterpreterResult::Trap(trap);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let InstructionResult::UnusedGas(ref gas) = result {
|
if let InstructionResult::UnusedGas(ref gas) = result {
|
||||||
@ -388,28 +404,31 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
self.valid_jump_destinations = Some(self.cache.jump_destinations(&self.params.code_hash, &self.reader.code));
|
self.valid_jump_destinations = Some(self.cache.jump_destinations(&self.params.code_hash, &self.reader.code));
|
||||||
}
|
}
|
||||||
let jump_destinations = self.valid_jump_destinations.as_ref().expect("jump_destinations are initialized on first jump; qed");
|
let jump_destinations = self.valid_jump_destinations.as_ref().expect("jump_destinations are initialized on first jump; qed");
|
||||||
let pos = self.verify_jump(position, jump_destinations)?;
|
let pos = match self.verify_jump(position, jump_destinations) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => return InterpreterResult::Done(Err(e))
|
||||||
|
};
|
||||||
self.reader.position = pos;
|
self.reader.position = pos;
|
||||||
},
|
},
|
||||||
InstructionResult::StopExecutionNeedsReturn {gas, init_off, init_size, apply} => {
|
InstructionResult::StopExecutionNeedsReturn {gas, init_off, init_size, apply} => {
|
||||||
let mem = mem::replace(&mut self.mem, Vec::new());
|
let mem = mem::replace(&mut self.mem, Vec::new());
|
||||||
return Err(InterpreterResult::Done(Ok(GasLeft::NeedsReturn {
|
return InterpreterResult::Done(Ok(GasLeft::NeedsReturn {
|
||||||
gas_left: gas.as_u256(),
|
gas_left: gas.as_u256(),
|
||||||
data: mem.into_return_data(init_off, init_size),
|
data: mem.into_return_data(init_off, init_size),
|
||||||
apply_state: apply
|
apply_state: apply
|
||||||
})));
|
}));
|
||||||
},
|
},
|
||||||
InstructionResult::StopExecution => {
|
InstructionResult::StopExecution => {
|
||||||
return Err(InterpreterResult::Done(Ok(GasLeft::Known(self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas.as_u256()))));
|
return InterpreterResult::Done(Ok(GasLeft::Known(self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas.as_u256())));
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.reader.position >= self.reader.len() {
|
if self.reader.position >= self.reader.len() {
|
||||||
return Err(InterpreterResult::Done(Ok(GasLeft::Known(self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas.as_u256()))));
|
return InterpreterResult::Done(Ok(GasLeft::Known(self.gasometer.as_mut().expect(GASOMETER_PROOF).current_gas.as_u256())));
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(InterpreterResult::Continue)
|
InterpreterResult::Continue
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_instruction(&self, ext: &vm::Ext, instruction: Instruction, info: &InstructionInfo) -> vm::Result<()> {
|
fn verify_instruction(&self, ext: &vm::Ext, instruction: Instruction, info: &InstructionInfo) -> vm::Result<()> {
|
||||||
@ -421,7 +440,9 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
((instruction == instructions::RETURNDATACOPY || instruction == instructions::RETURNDATASIZE) && !schedule.have_return_data) ||
|
((instruction == instructions::RETURNDATACOPY || instruction == instructions::RETURNDATASIZE) && !schedule.have_return_data) ||
|
||||||
(instruction == instructions::REVERT && !schedule.have_revert) ||
|
(instruction == instructions::REVERT && !schedule.have_revert) ||
|
||||||
((instruction == instructions::SHL || instruction == instructions::SHR || instruction == instructions::SAR) && !schedule.have_bitwise_shifting) ||
|
((instruction == instructions::SHL || instruction == instructions::SHR || instruction == instructions::SAR) && !schedule.have_bitwise_shifting) ||
|
||||||
(instruction == instructions::EXTCODEHASH && !schedule.have_extcodehash)
|
(instruction == instructions::EXTCODEHASH && !schedule.have_extcodehash) ||
|
||||||
|
(instruction == instructions::CHAINID && !schedule.have_chain_id) ||
|
||||||
|
(instruction == instructions::SELFBALANCE && !schedule.have_selfbalance)
|
||||||
{
|
{
|
||||||
return Err(vm::Error::BadInstruction {
|
return Err(vm::Error::BadInstruction {
|
||||||
instruction: instruction as u8
|
instruction: instruction as u8
|
||||||
@ -508,7 +529,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let init_size = self.stack.pop_back();
|
let init_size = self.stack.pop_back();
|
||||||
let address_scheme = match instruction {
|
let address_scheme = match instruction {
|
||||||
instructions::CREATE => CreateContractAddress::FromSenderAndNonce,
|
instructions::CREATE => CreateContractAddress::FromSenderAndNonce,
|
||||||
instructions::CREATE2 => CreateContractAddress::FromSenderSaltAndCodeHash(self.stack.pop_back().into()),
|
instructions::CREATE2 => CreateContractAddress::FromSenderSaltAndCodeHash(BigEndianHash::from_uint(&self.stack.pop_back())),
|
||||||
_ => unreachable!("instruction can only be CREATE/CREATE2 checked above; qed"),
|
_ => unreachable!("instruction can only be CREATE/CREATE2 checked above; qed"),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -665,7 +686,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let size = self.stack.pop_back();
|
let size = self.stack.pop_back();
|
||||||
let topics = self.stack.pop_n(no_of_topics)
|
let topics = self.stack.pop_n(no_of_topics)
|
||||||
.iter()
|
.iter()
|
||||||
.map(H256::from)
|
.map(BigEndianHash::from_uint)
|
||||||
.collect();
|
.collect();
|
||||||
ext.log(topics, self.mem.read_slice(offset, size))?;
|
ext.log(topics, self.mem.read_slice(offset, size))?;
|
||||||
},
|
},
|
||||||
@ -702,21 +723,21 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let offset = self.stack.pop_back();
|
let offset = self.stack.pop_back();
|
||||||
let size = self.stack.pop_back();
|
let size = self.stack.pop_back();
|
||||||
let k = keccak(self.mem.read_slice(offset, size));
|
let k = keccak(self.mem.read_slice(offset, size));
|
||||||
self.stack.push(U256::from(&*k));
|
self.stack.push(k.into_uint());
|
||||||
},
|
},
|
||||||
instructions::SLOAD => {
|
instructions::SLOAD => {
|
||||||
let key = H256::from(&self.stack.pop_back());
|
let key = BigEndianHash::from_uint(&self.stack.pop_back());
|
||||||
let word = U256::from(&*ext.storage_at(&key)?);
|
let word = ext.storage_at(&key)?.into_uint();
|
||||||
self.stack.push(word);
|
self.stack.push(word);
|
||||||
},
|
},
|
||||||
instructions::SSTORE => {
|
instructions::SSTORE => {
|
||||||
let address = H256::from(&self.stack.pop_back());
|
let address = BigEndianHash::from_uint(&self.stack.pop_back());
|
||||||
let val = self.stack.pop_back();
|
let val = self.stack.pop_back();
|
||||||
|
|
||||||
let current_val = U256::from(&*ext.storage_at(&address)?);
|
let current_val = ext.storage_at(&address)?.into_uint();
|
||||||
// Increase refund for clear
|
// Increase refund for clear
|
||||||
if ext.schedule().eip1283 {
|
if ext.schedule().eip1283 {
|
||||||
let original_val = U256::from(&*ext.initial_storage_at(&address)?);
|
let original_val = ext.initial_storage_at(&address)?.into_uint();
|
||||||
gasometer::handle_eip1283_sstore_clears_refund(ext, &original_val, ¤t_val, &val);
|
gasometer::handle_eip1283_sstore_clears_refund(ext, &original_val, ¤t_val, &val);
|
||||||
} else {
|
} else {
|
||||||
if !current_val.is_zero() && val.is_zero() {
|
if !current_val.is_zero() && val.is_zero() {
|
||||||
@ -724,7 +745,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
ext.add_sstore_refund(sstore_clears_schedule);
|
ext.add_sstore_refund(sstore_clears_schedule);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ext.set_storage(address, H256::from(&val))?;
|
ext.set_storage(address, BigEndianHash::from_uint(&val))?;
|
||||||
},
|
},
|
||||||
instructions::PC => {
|
instructions::PC => {
|
||||||
self.stack.push(U256::from(self.reader.position - 1));
|
self.stack.push(U256::from(self.reader.position - 1));
|
||||||
@ -785,7 +806,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
instructions::EXTCODEHASH => {
|
instructions::EXTCODEHASH => {
|
||||||
let address = u256_to_address(&self.stack.pop_back());
|
let address = u256_to_address(&self.stack.pop_back());
|
||||||
let hash = ext.extcodehash(&address)?.unwrap_or_else(H256::zero);
|
let hash = ext.extcodehash(&address)?.unwrap_or_else(H256::zero);
|
||||||
self.stack.push(U256::from(hash));
|
self.stack.push(hash.into_uint());
|
||||||
},
|
},
|
||||||
instructions::CALLDATACOPY => {
|
instructions::CALLDATACOPY => {
|
||||||
Self::copy_data_to_memory(&mut self.mem, &mut self.stack, &self.params.data.as_ref().map_or_else(|| &[] as &[u8], |d| &*d as &[u8]));
|
Self::copy_data_to_memory(&mut self.mem, &mut self.stack, &self.params.data.as_ref().map_or_else(|| &[] as &[u8], |d| &*d as &[u8]));
|
||||||
@ -819,7 +840,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
instructions::BLOCKHASH => {
|
instructions::BLOCKHASH => {
|
||||||
let block_number = self.stack.pop_back();
|
let block_number = self.stack.pop_back();
|
||||||
let block_hash = ext.blockhash(&block_number);
|
let block_hash = ext.blockhash(&block_number);
|
||||||
self.stack.push(U256::from(&*block_hash));
|
self.stack.push(block_hash.into_uint());
|
||||||
},
|
},
|
||||||
instructions::COINBASE => {
|
instructions::COINBASE => {
|
||||||
self.stack.push(address_to_u256(ext.env_info().author.clone()));
|
self.stack.push(address_to_u256(ext.env_info().author.clone()));
|
||||||
@ -836,6 +857,12 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
instructions::GASLIMIT => {
|
instructions::GASLIMIT => {
|
||||||
self.stack.push(ext.env_info().gas_limit.clone());
|
self.stack.push(ext.env_info().gas_limit.clone());
|
||||||
},
|
},
|
||||||
|
instructions::CHAINID => {
|
||||||
|
self.stack.push(ext.chain_id().into())
|
||||||
|
},
|
||||||
|
instructions::SELFBALANCE => {
|
||||||
|
self.stack.push(ext.balance(&self.params.address)?);
|
||||||
|
}
|
||||||
|
|
||||||
// Stack instructions
|
// Stack instructions
|
||||||
|
|
||||||
@ -1009,11 +1036,12 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let c = self.stack.pop_back();
|
let c = self.stack.pop_back();
|
||||||
|
|
||||||
self.stack.push(if !c.is_zero() {
|
self.stack.push(if !c.is_zero() {
|
||||||
// upcast to 512
|
let a_512 = U512::from(a);
|
||||||
let a5 = U512::from(a);
|
let b_512 = U512::from(b);
|
||||||
let res = a5.overflowing_add(U512::from(b)).0;
|
let c_512 = U512::from(c);
|
||||||
let x = res % U512::from(c);
|
let res = a_512 + b_512;
|
||||||
U256::from(x)
|
let x = res % c_512;
|
||||||
|
U256::try_from(x).expect("U512 % U256 fits U256; qed")
|
||||||
} else {
|
} else {
|
||||||
U256::zero()
|
U256::zero()
|
||||||
});
|
});
|
||||||
@ -1024,10 +1052,12 @@ impl<Cost: CostType> Interpreter<Cost> {
|
|||||||
let c = self.stack.pop_back();
|
let c = self.stack.pop_back();
|
||||||
|
|
||||||
self.stack.push(if !c.is_zero() {
|
self.stack.push(if !c.is_zero() {
|
||||||
let a5 = U512::from(a);
|
let a_512 = U512::from(a);
|
||||||
let res = a5.overflowing_mul(U512::from(b)).0;
|
let b_512 = U512::from(b);
|
||||||
let x = res % U512::from(c);
|
let c_512 = U512::from(c);
|
||||||
U256::from(x)
|
let res = a_512 * b_512;
|
||||||
|
let x = res % c_512;
|
||||||
|
U256::try_from(x).expect("U512 % U256 fits U256; qed")
|
||||||
} else {
|
} else {
|
||||||
U256::zero()
|
U256::zero()
|
||||||
});
|
});
|
||||||
@ -1167,12 +1197,13 @@ fn set_sign(value: U256, sign: bool) -> U256 {
|
|||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn u256_to_address(value: &U256) -> Address {
|
fn u256_to_address(value: &U256) -> Address {
|
||||||
Address::from(H256::from(value))
|
let addr: H256 = BigEndianHash::from_uint(value);
|
||||||
|
Address::from(addr)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn address_to_u256(value: Address) -> U256 {
|
fn address_to_u256(value: Address) -> U256 {
|
||||||
U256::from(&*H256::from(value))
|
H256::from(value).into_uint()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -1183,6 +1214,7 @@ mod tests {
|
|||||||
use factory::Factory;
|
use factory::Factory;
|
||||||
use vm::{self, Exec, ActionParams, ActionValue};
|
use vm::{self, Exec, ActionParams, ActionValue};
|
||||||
use vm::tests::{FakeExt, test_finalize};
|
use vm::tests::{FakeExt, test_finalize};
|
||||||
|
use ethereum_types::Address;
|
||||||
|
|
||||||
fn interpreter(params: ActionParams, ext: &vm::Ext) -> Box<Exec> {
|
fn interpreter(params: ActionParams, ext: &vm::Ext) -> Box<Exec> {
|
||||||
Factory::new(VMType::Interpreter, 1).create(params, ext.schedule(), ext.depth())
|
Factory::new(VMType::Interpreter, 1).create(params, ext.schedule(), ext.depth())
|
||||||
@ -1193,13 +1225,13 @@ mod tests {
|
|||||||
let code = "7feeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff006000527faaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffaa6020526000620f120660406000601773945304eb96065b2a98b57a48a06ae28d285a71b56101f4f1600055".from_hex().unwrap();
|
let code = "7feeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff006000527faaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffaa6020526000620f120660406000601773945304eb96065b2a98b57a48a06ae28d285a71b56101f4f1600055".from_hex().unwrap();
|
||||||
|
|
||||||
let mut params = ActionParams::default();
|
let mut params = ActionParams::default();
|
||||||
params.address = 5.into();
|
params.address = Address::from_low_u64_be(5);
|
||||||
params.gas = 300_000.into();
|
params.gas = 300_000.into();
|
||||||
params.gas_price = 1.into();
|
params.gas_price = 1.into();
|
||||||
params.value = ActionValue::Transfer(100_000.into());
|
params.value = ActionValue::Transfer(100_000.into());
|
||||||
params.code = Some(Arc::new(code));
|
params.code = Some(Arc::new(code));
|
||||||
let mut ext = FakeExt::new();
|
let mut ext = FakeExt::new();
|
||||||
ext.balances.insert(5.into(), 1_000_000_000.into());
|
ext.balances.insert(Address::from_low_u64_be(5), 1_000_000_000.into());
|
||||||
ext.tracing = true;
|
ext.tracing = true;
|
||||||
|
|
||||||
let gas_left = {
|
let gas_left = {
|
||||||
@ -1216,12 +1248,12 @@ mod tests {
|
|||||||
let code = "6001600160000360003e00".from_hex().unwrap();
|
let code = "6001600160000360003e00".from_hex().unwrap();
|
||||||
|
|
||||||
let mut params = ActionParams::default();
|
let mut params = ActionParams::default();
|
||||||
params.address = 5.into();
|
params.address = Address::from_low_u64_be(5);
|
||||||
params.gas = 300_000.into();
|
params.gas = 300_000.into();
|
||||||
params.gas_price = 1.into();
|
params.gas_price = 1.into();
|
||||||
params.code = Some(Arc::new(code));
|
params.code = Some(Arc::new(code));
|
||||||
let mut ext = FakeExt::new_byzantium();
|
let mut ext = FakeExt::new_byzantium();
|
||||||
ext.balances.insert(5.into(), 1_000_000_000.into());
|
ext.balances.insert(Address::from_low_u64_be(5), 1_000_000_000.into());
|
||||||
ext.tracing = true;
|
ext.tracing = true;
|
||||||
|
|
||||||
let err = {
|
let err = {
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use hash::KECCAK_EMPTY;
|
use hash::KECCAK_EMPTY;
|
||||||
use heapsize::HeapSizeOf;
|
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use memory_cache::MemoryLruCache;
|
use memory_cache::MemoryLruCache;
|
||||||
@ -25,11 +25,12 @@ use super::super::instructions::{self, Instruction};
|
|||||||
|
|
||||||
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
|
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
|
||||||
|
|
||||||
// stub for a HeapSizeOf implementation.
|
/// Stub for a sharing `BitSet` data in cache (reference counted)
|
||||||
|
/// and implementing MallocSizeOf on it.
|
||||||
struct Bits(Arc<BitSet>);
|
struct Bits(Arc<BitSet>);
|
||||||
|
|
||||||
impl HeapSizeOf for Bits {
|
impl MallocSizeOf for Bits {
|
||||||
fn heap_size_of_children(&self) -> usize {
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
// dealing in bits here
|
// dealing in bits here
|
||||||
self.0.capacity() * 8
|
self.0.capacity() * 8
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
extern crate bit_set;
|
extern crate bit_set;
|
||||||
extern crate ethereum_types;
|
extern crate ethereum_types;
|
||||||
extern crate parking_lot;
|
extern crate parking_lot;
|
||||||
extern crate heapsize;
|
extern crate parity_util_mem;
|
||||||
extern crate vm;
|
extern crate vm;
|
||||||
extern crate keccak_hash as hash;
|
extern crate keccak_hash as hash;
|
||||||
extern crate memory_cache;
|
extern crate memory_cache;
|
||||||
@ -33,6 +33,8 @@ extern crate log;
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate rustc_hex;
|
extern crate rustc_hex;
|
||||||
|
#[cfg(test)]
|
||||||
|
extern crate hex_literal;
|
||||||
|
|
||||||
pub mod evm;
|
pub mod evm;
|
||||||
pub mod interpreter;
|
pub mod interpreter;
|
||||||
|
@ -25,6 +25,7 @@ use vm::{self, ActionParams, ActionValue, Ext};
|
|||||||
use vm::tests::{FakeExt, FakeCall, FakeCallType, test_finalize};
|
use vm::tests::{FakeExt, FakeCall, FakeCallType, test_finalize};
|
||||||
use factory::Factory;
|
use factory::Factory;
|
||||||
use vmtype::VMType;
|
use vmtype::VMType;
|
||||||
|
use hex_literal::hex;
|
||||||
|
|
||||||
evm_test!{test_add: test_add_int}
|
evm_test!{test_add: test_add_int}
|
||||||
fn test_add(factory: super::Factory) {
|
fn test_add(factory: super::Factory) {
|
||||||
@ -108,6 +109,32 @@ fn test_origin(factory: super::Factory) {
|
|||||||
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
evm_test!{test_selfbalance: test_selfbalance_int}
|
||||||
|
fn test_selfbalance(factory: super::Factory) {
|
||||||
|
let own_addr = Address::from_str("1337000000000000000000000000000000000000").unwrap();
|
||||||
|
// 47 SELFBALANCE
|
||||||
|
// 60 ff PUSH ff
|
||||||
|
// 55 SSTORE
|
||||||
|
let code = hex!("47 60 ff 55").to_vec();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.address = own_addr.clone();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(Arc::new(code));
|
||||||
|
let mut ext = FakeExt::new_istanbul();
|
||||||
|
ext.balances = {
|
||||||
|
let mut x = HashMap::new();
|
||||||
|
x.insert(own_addr, U256::from(1_025)); // 0x401
|
||||||
|
x
|
||||||
|
};
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create(params, ext.schedule(), ext.depth());
|
||||||
|
test_finalize(vm.exec(&mut ext).ok().unwrap()).unwrap()
|
||||||
|
};
|
||||||
|
assert_eq!(gas_left, U256::from(79_992)); // TODO[dvdplm]: do the sums here, SELFBALANCE-5 + PUSH1-3 + ONEBYTE-4 + SSTORE-?? = 100_000 - 79_992
|
||||||
|
assert_store(&ext, 0xff, "0000000000000000000000000000000000000000000000000000000000000401");
|
||||||
|
}
|
||||||
|
|
||||||
evm_test!{test_sender: test_sender_int}
|
evm_test!{test_sender: test_sender_int}
|
||||||
fn test_sender(factory: super::Factory) {
|
fn test_sender(factory: super::Factory) {
|
||||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||||
@ -130,6 +157,27 @@ fn test_sender(factory: super::Factory) {
|
|||||||
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
assert_store(&ext, 0, "000000000000000000000000cd1722f2947def4cf144679da39c4c32bdc35681");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
evm_test!{test_chain_id: test_chain_id_int}
|
||||||
|
fn test_chain_id(factory: super::Factory) {
|
||||||
|
// 46 CHAINID
|
||||||
|
// 60 00 PUSH 0
|
||||||
|
// 55 SSTORE
|
||||||
|
let code = hex!("46 60 00 55").to_vec();
|
||||||
|
|
||||||
|
let mut params = ActionParams::default();
|
||||||
|
params.gas = U256::from(100_000);
|
||||||
|
params.code = Some(Arc::new(code));
|
||||||
|
let mut ext = FakeExt::new_istanbul().with_chain_id(9);
|
||||||
|
|
||||||
|
let gas_left = {
|
||||||
|
let vm = factory.create(params, ext.schedule(), ext.depth());
|
||||||
|
test_finalize(vm.exec(&mut ext).ok().unwrap()).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(gas_left, U256::from(79_995));
|
||||||
|
assert_store(&ext, 0, "0000000000000000000000000000000000000000000000000000000000000009");
|
||||||
|
}
|
||||||
|
|
||||||
evm_test!{test_extcodecopy: test_extcodecopy_int}
|
evm_test!{test_extcodecopy: test_extcodecopy_int}
|
||||||
fn test_extcodecopy(factory: super::Factory) {
|
fn test_extcodecopy(factory: super::Factory) {
|
||||||
// 33 - sender
|
// 33 - sender
|
||||||
@ -239,7 +287,7 @@ fn test_blockhash(factory: super::Factory) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_974));
|
assert_eq!(gas_left, U256::from(79_974));
|
||||||
assert_eq!(ext.store.get(&H256::new()).unwrap(), &blockhash);
|
assert_eq!(ext.store.get(&H256::zero()).unwrap(), &blockhash);
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_calldataload: test_calldataload_int}
|
evm_test!{test_calldataload: test_calldataload_int}
|
||||||
@ -262,7 +310,6 @@ fn test_calldataload(factory: super::Factory) {
|
|||||||
|
|
||||||
assert_eq!(gas_left, U256::from(79_991));
|
assert_eq!(gas_left, U256::from(79_991));
|
||||||
assert_store(&ext, 0, "23ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff23");
|
assert_store(&ext, 0, "23ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff23");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
evm_test!{test_author: test_author_int}
|
evm_test!{test_author: test_author_int}
|
||||||
@ -726,8 +773,8 @@ evm_test!{test_calls: test_calls_int}
|
|||||||
fn test_calls(factory: super::Factory) {
|
fn test_calls(factory: super::Factory) {
|
||||||
let code = "600054602d57600160005560006000600060006050610998610100f160006000600060006050610998610100f25b".from_hex().unwrap();
|
let code = "600054602d57600160005560006000600060006050610998610100f160006000600060006050610998610100f25b".from_hex().unwrap();
|
||||||
|
|
||||||
let address = Address::from(0x155);
|
let address = Address::from_low_u64_be(0x155);
|
||||||
let code_address = Address::from(0x998);
|
let code_address = Address::from_low_u64_be(0x998);
|
||||||
let mut params = ActionParams::default();
|
let mut params = ActionParams::default();
|
||||||
params.gas = U256::from(150_000);
|
params.gas = U256::from(150_000);
|
||||||
params.code = Some(Arc::new(code));
|
params.code = Some(Arc::new(code));
|
||||||
@ -772,7 +819,7 @@ evm_test!{test_create_in_staticcall: test_create_in_staticcall_int}
|
|||||||
fn test_create_in_staticcall(factory: super::Factory) {
|
fn test_create_in_staticcall(factory: super::Factory) {
|
||||||
let code = "600060006064f000".from_hex().unwrap();
|
let code = "600060006064f000".from_hex().unwrap();
|
||||||
|
|
||||||
let address = Address::from(0x155);
|
let address = Address::from_low_u64_be(0x155);
|
||||||
let mut params = ActionParams::default();
|
let mut params = ActionParams::default();
|
||||||
params.gas = U256::from(100_000);
|
params.gas = U256::from(100_000);
|
||||||
params.code = Some(Arc::new(code));
|
params.code = Some(Arc::new(code));
|
||||||
@ -1066,5 +1113,5 @@ fn assert_set_contains<T : Debug + Eq + PartialEq + Hash>(set: &HashSet<T>, val:
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn assert_store(ext: &FakeExt, pos: u64, val: &str) {
|
fn assert_store(ext: &FakeExt, pos: u64, val: &str) {
|
||||||
assert_eq!(ext.store.get(&H256::from(pos)).unwrap(), &H256::from_str(val).unwrap());
|
assert_eq!(ext.store.get(&H256::from_low_u64_be(pos)).unwrap(), &H256::from_str(val).unwrap());
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
description = "Parity Light Client Implementation"
|
description = "Parity Ethereum (EthCore) Light Client Implementation (Block Import IO Service, Blockchain Data Fetching, Light Client Header Chain Storage, Parity Light Protocol (PLP) Provider, Light Transaction Queue, CHT Definitions, Light Client Data Cache), Parity Light Protocol (PLP) Implementation, P2P Network I/O and Event Context Generalization, Peer Error Handling & Punishment, Request Load Timer & Distribution Manager, Pending Request Set Storage, Request Credit Management, Light Client Request Types, Request Chain Builder Utility, On-demand Chain Request Service over LES (for RPCs), ResponseGuard Implementation)"
|
||||||
homepage = "http://parity.io"
|
homepage = "http://parity.io"
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
name = "ethcore-light"
|
name = "ethcore-light"
|
||||||
@ -10,32 +10,34 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
|||||||
log = "0.4"
|
log = "0.4"
|
||||||
parity-bytes = "0.1"
|
parity-bytes = "0.1"
|
||||||
common-types = { path = "../types" }
|
common-types = { path = "../types" }
|
||||||
|
derive_more = "0.14.0"
|
||||||
ethcore = { path = ".."}
|
ethcore = { path = ".."}
|
||||||
ethcore-db = { path = "../db" }
|
ethcore-db = { path = "../db" }
|
||||||
ethcore-blockchain = { path = "../blockchain" }
|
ethcore-blockchain = { path = "../blockchain" }
|
||||||
ethereum-types = "0.4"
|
ethereum-types = "0.6.0"
|
||||||
memory-db = "0.11.0"
|
memory-db = "0.12.4"
|
||||||
trie-db = "0.11.0"
|
trie-db = "0.12.4"
|
||||||
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
|
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
|
||||||
ethcore-network = { path = "../../util/network" }
|
ethcore-network = { path = "../../util/network" }
|
||||||
|
ethcore-miner = { path = "../../miner" }
|
||||||
ethcore-io = { path = "../../util/io" }
|
ethcore-io = { path = "../../util/io" }
|
||||||
hash-db = "0.11.0"
|
hash-db = "0.12.4"
|
||||||
heapsize = "0.4"
|
parity-util-mem = "0.1"
|
||||||
vm = { path = "../vm" }
|
vm = { path = "../vm" }
|
||||||
fastmap = { path = "../../util/fastmap" }
|
fastmap = { path = "../../util/fastmap" }
|
||||||
failsafe = { version = "0.3.0", default-features = false, features = ["parking_lot_mutex"] }
|
failsafe = { version = "0.3.0", default-features = false, features = ["parking_lot_mutex"] }
|
||||||
rlp = { version = "0.3.0", features = ["ethereum"] }
|
rlp = "0.4.0"
|
||||||
rlp_derive = { path = "../../util/rlp-derive" }
|
rlp_derive = { path = "../../util/rlp-derive" }
|
||||||
smallvec = "0.6"
|
smallvec = "0.6"
|
||||||
futures = "0.1"
|
futures = "0.1"
|
||||||
rand = "0.4"
|
rand = "0.6"
|
||||||
itertools = "0.5"
|
itertools = "0.5"
|
||||||
bincode = "0.8.0"
|
bincode = "1.1"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_derive = "1.0"
|
serde_derive = "1.0"
|
||||||
parking_lot = "0.7"
|
parking_lot = "0.7"
|
||||||
stats = { path = "../../util/stats" }
|
stats = { path = "../../util/stats" }
|
||||||
keccak-hash = "0.1"
|
keccak-hash = "0.2.0"
|
||||||
keccak-hasher = { path = "../../util/keccak-hasher" }
|
keccak-hasher = { path = "../../util/keccak-hasher" }
|
||||||
triehash-ethereum = { version = "0.2", path = "../../util/triehash-ethereum" }
|
triehash-ethereum = { version = "0.2", path = "../../util/triehash-ethereum" }
|
||||||
kvdb = "0.1"
|
kvdb = "0.1"
|
||||||
|
@ -21,12 +21,12 @@
|
|||||||
//! vector of all gas prices from a recent range of blocks.
|
//! vector of all gas prices from a recent range of blocks.
|
||||||
|
|
||||||
use std::time::{Instant, Duration};
|
use std::time::{Instant, Duration};
|
||||||
|
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps, MallocSizeOfExt};
|
||||||
|
|
||||||
use common_types::encoded;
|
use common_types::encoded;
|
||||||
use common_types::BlockNumber;
|
use common_types::BlockNumber;
|
||||||
use common_types::receipt::Receipt;
|
use common_types::receipt::Receipt;
|
||||||
use ethereum_types::{H256, U256};
|
use ethereum_types::{H256, U256};
|
||||||
use heapsize::HeapSizeOf;
|
|
||||||
use memory_cache::MemoryLruCache;
|
use memory_cache::MemoryLruCache;
|
||||||
use stats::Corpus;
|
use stats::Corpus;
|
||||||
|
|
||||||
@ -157,18 +157,20 @@ impl Cache {
|
|||||||
|
|
||||||
/// Get the memory used.
|
/// Get the memory used.
|
||||||
pub fn mem_used(&self) -> usize {
|
pub fn mem_used(&self) -> usize {
|
||||||
self.heap_size_of_children()
|
self.malloc_size_of()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HeapSizeOf for Cache {
|
|
||||||
fn heap_size_of_children(&self) -> usize {
|
// This is fast method: it is possible to have a more exhaustive implementation
|
||||||
|
impl MallocSizeOf for Cache {
|
||||||
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
self.headers.current_size()
|
self.headers.current_size()
|
||||||
+ self.canon_hashes.current_size()
|
+ self.canon_hashes.current_size()
|
||||||
+ self.bodies.current_size()
|
+ self.bodies.current_size()
|
||||||
+ self.receipts.current_size()
|
+ self.receipts.current_size()
|
||||||
+ self.chain_score.current_size()
|
+ self.chain_score.current_size()
|
||||||
// TODO: + corpus
|
// `self.corpus` is skipped
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +95,8 @@ pub struct BlockInfo {
|
|||||||
/// Build an in-memory CHT from a closure which provides necessary information
|
/// Build an in-memory CHT from a closure which provides necessary information
|
||||||
/// about blocks. If the fetcher ever fails to provide the info, the CHT
|
/// about blocks. If the fetcher ever fails to provide the info, the CHT
|
||||||
/// will not be generated.
|
/// will not be generated.
|
||||||
pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB<KeccakHasher, DBValue>>>
|
pub fn build<F>(cht_num: u64, mut fetcher: F)
|
||||||
|
-> Option<CHT<MemoryDB<KeccakHasher, memory_db::HashKey<KeccakHasher>, DBValue>>>
|
||||||
where F: FnMut(BlockId) -> Option<BlockInfo>
|
where F: FnMut(BlockId) -> Option<BlockInfo>
|
||||||
{
|
{
|
||||||
let mut db = new_memory_db();
|
let mut db = new_memory_db();
|
||||||
@ -104,7 +105,7 @@ pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB<KeccakHashe
|
|||||||
let last_num = start_number(cht_num + 1) - 1;
|
let last_num = start_number(cht_num + 1) - 1;
|
||||||
let mut id = BlockId::Number(last_num);
|
let mut id = BlockId::Number(last_num);
|
||||||
|
|
||||||
let mut root = H256::default();
|
let mut root = H256::zero();
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||||
@ -154,7 +155,7 @@ pub fn compute_root<I>(cht_num: u64, iterable: I) -> Option<H256>
|
|||||||
pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> {
|
pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> {
|
||||||
let mut db = new_memory_db();
|
let mut db = new_memory_db();
|
||||||
|
|
||||||
for node in proof { db.insert(&node[..]); }
|
for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
|
||||||
let res = match TrieDB::new(&db, &root) {
|
let res = match TrieDB::new(&db, &root) {
|
||||||
Err(_) => return None,
|
Err(_) => return None,
|
||||||
Ok(trie) => trie.get_with(&key!(num), |val: &[u8]| {
|
Ok(trie) => trie.get_with(&key!(num), |val: &[u8]| {
|
||||||
|
@ -21,8 +21,7 @@ use std::sync::Arc;
|
|||||||
use common_types::encoded;
|
use common_types::encoded;
|
||||||
use common_types::header::Header;
|
use common_types::header::Header;
|
||||||
use common_types::receipt::Receipt;
|
use common_types::receipt::Receipt;
|
||||||
use ethcore::engines::{EthEngine, StateDependentProof};
|
use ethcore::engines::{Engine, StateDependentProof};
|
||||||
use ethcore::machine::EthereumMachine;
|
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
use futures::future::IntoFuture;
|
use futures::future::IntoFuture;
|
||||||
|
|
||||||
@ -48,8 +47,8 @@ pub trait ChainDataFetcher: Send + Sync + 'static {
|
|||||||
fn epoch_transition(
|
fn epoch_transition(
|
||||||
&self,
|
&self,
|
||||||
_hash: H256,
|
_hash: H256,
|
||||||
_engine: Arc<EthEngine>,
|
_engine: Arc<Engine>,
|
||||||
_checker: Arc<StateDependentProof<EthereumMachine>>
|
_checker: Arc<StateDependentProof>
|
||||||
) -> Self::Transition;
|
) -> Self::Transition;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,8 +76,8 @@ impl ChainDataFetcher for Unavailable {
|
|||||||
fn epoch_transition(
|
fn epoch_transition(
|
||||||
&self,
|
&self,
|
||||||
_hash: H256,
|
_hash: H256,
|
||||||
_engine: Arc<EthEngine>,
|
_engine: Arc<Engine>,
|
||||||
_checker: Arc<StateDependentProof<EthereumMachine>>
|
_checker: Arc<StateDependentProof>
|
||||||
) -> Self::Transition {
|
) -> Self::Transition {
|
||||||
Err("fetching epoch transition proofs unavailable")
|
Err("fetching epoch transition proofs unavailable")
|
||||||
}
|
}
|
||||||
|
@ -35,10 +35,10 @@ use common_types::encoded;
|
|||||||
use common_types::header::Header;
|
use common_types::header::Header;
|
||||||
use common_types::ids::BlockId;
|
use common_types::ids::BlockId;
|
||||||
use ethcore::engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
|
use ethcore::engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
|
||||||
use ethcore::error::{Error, EthcoreResult, ErrorKind as EthcoreErrorKind, BlockError};
|
use ethcore::error::{Error, EthcoreResult, BlockError};
|
||||||
use ethcore::spec::{Spec, SpecHardcodedSync};
|
use ethcore::spec::{Spec, SpecHardcodedSync};
|
||||||
use ethereum_types::{H256, H264, U256};
|
use ethereum_types::{H256, H264, U256};
|
||||||
use heapsize::HeapSizeOf;
|
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
|
||||||
use kvdb::{DBTransaction, KeyValueDB};
|
use kvdb::{DBTransaction, KeyValueDB};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use fastmap::H256FastMap;
|
use fastmap::H256FastMap;
|
||||||
@ -95,8 +95,8 @@ struct Entry {
|
|||||||
canonical_hash: H256,
|
canonical_hash: H256,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HeapSizeOf for Entry {
|
impl MallocSizeOf for Entry {
|
||||||
fn heap_size_of_children(&self) -> usize {
|
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
|
||||||
if self.candidates.spilled() {
|
if self.candidates.spilled() {
|
||||||
self.candidates.capacity() * ::std::mem::size_of::<Candidate>()
|
self.candidates.capacity() * ::std::mem::size_of::<Candidate>()
|
||||||
} else {
|
} else {
|
||||||
@ -154,8 +154,11 @@ fn pending_transition_key(block_hash: H256) -> H264 {
|
|||||||
|
|
||||||
let mut key = H264::default();
|
let mut key = H264::default();
|
||||||
|
|
||||||
key[0] = LEADING;
|
{
|
||||||
key.0[1..].copy_from_slice(&block_hash.0[..]);
|
let bytes = key.as_bytes_mut();
|
||||||
|
bytes[0] = LEADING;
|
||||||
|
bytes[1..].copy_from_slice(block_hash.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
@ -165,8 +168,11 @@ fn transition_key(block_hash: H256) -> H264 {
|
|||||||
|
|
||||||
let mut key = H264::default();
|
let mut key = H264::default();
|
||||||
|
|
||||||
key[0] = LEADING;
|
{
|
||||||
key.0[1..].copy_from_slice(&block_hash.0[..]);
|
let bytes = key.as_bytes_mut();
|
||||||
|
bytes[0] = LEADING;
|
||||||
|
bytes[1..].copy_from_slice(block_hash.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
@ -196,14 +202,21 @@ pub enum HardcodedSync {
|
|||||||
Deny,
|
Deny,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(MallocSizeOf)]
|
||||||
/// Header chain. See module docs for more details.
|
/// Header chain. See module docs for more details.
|
||||||
pub struct HeaderChain {
|
pub struct HeaderChain {
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
genesis_header: encoded::Header, // special-case the genesis.
|
genesis_header: encoded::Header, // special-case the genesis.
|
||||||
candidates: RwLock<BTreeMap<u64, Entry>>,
|
candidates: RwLock<BTreeMap<u64, Entry>>,
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
best_block: RwLock<BlockDescriptor>,
|
best_block: RwLock<BlockDescriptor>,
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>,
|
live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>,
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
db: Arc<KeyValueDB>,
|
db: Arc<KeyValueDB>,
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
col: Option<u32>,
|
col: Option<u32>,
|
||||||
|
#[ignore_malloc_size_of = "ignored for performance reason"]
|
||||||
cache: Arc<Mutex<Cache>>,
|
cache: Arc<Mutex<Cache>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -237,7 +250,7 @@ impl HeaderChain {
|
|||||||
for c in &entry.candidates {
|
for c in &entry.candidates {
|
||||||
let key = transition_key(c.hash);
|
let key = transition_key(c.hash);
|
||||||
|
|
||||||
if let Some(proof) = db.get(col, &*key)? {
|
if let Some(proof) = db.get(col, key.as_bytes())? {
|
||||||
live_epoch_proofs.insert(c.hash, EpochTransition {
|
live_epoch_proofs.insert(c.hash, EpochTransition {
|
||||||
block_hash: c.hash,
|
block_hash: c.hash,
|
||||||
block_number: cur_number,
|
block_number: cur_number,
|
||||||
@ -254,7 +267,7 @@ impl HeaderChain {
|
|||||||
let best_block = {
|
let best_block = {
|
||||||
let era = match candidates.get(&curr.best_num) {
|
let era = match candidates.get(&curr.best_num) {
|
||||||
Some(era) => era,
|
Some(era) => era,
|
||||||
None => bail!("Database corrupt: highest block referenced but no data."),
|
None => return Err("Database corrupt: highest block referenced but no data.".into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let best = &era.candidates[0];
|
let best = &era.candidates[0];
|
||||||
@ -403,7 +416,7 @@ impl HeaderChain {
|
|||||||
.and_then(|entry| entry.candidates.iter().find(|c| c.hash == parent_hash))
|
.and_then(|entry| entry.candidates.iter().find(|c| c.hash == parent_hash))
|
||||||
.map(|c| c.total_difficulty)
|
.map(|c| c.total_difficulty)
|
||||||
.ok_or_else(|| BlockError::UnknownParent(parent_hash))
|
.ok_or_else(|| BlockError::UnknownParent(parent_hash))
|
||||||
.map_err(EthcoreErrorKind::Block)?
|
.map_err(Error::Block)?
|
||||||
};
|
};
|
||||||
|
|
||||||
parent_td + *header.difficulty()
|
parent_td + *header.difficulty()
|
||||||
@ -431,7 +444,7 @@ impl HeaderChain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(transition) = transition {
|
if let Some(transition) = transition {
|
||||||
transaction.put(self.col, &*transition_key(hash), &transition.proof);
|
transaction.put(self.col, transition_key(hash).as_bytes(), &transition.proof);
|
||||||
self.live_epoch_proofs.write().insert(hash, transition);
|
self.live_epoch_proofs.write().insert(hash, transition);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -508,10 +521,10 @@ impl HeaderChain {
|
|||||||
for ancient in &era_entry.candidates {
|
for ancient in &era_entry.candidates {
|
||||||
let maybe_transition = live_epoch_proofs.remove(&ancient.hash);
|
let maybe_transition = live_epoch_proofs.remove(&ancient.hash);
|
||||||
if let Some(epoch_transition) = maybe_transition {
|
if let Some(epoch_transition) = maybe_transition {
|
||||||
transaction.delete(self.col, &*transition_key(ancient.hash));
|
transaction.delete(self.col, transition_key(ancient.hash).as_bytes());
|
||||||
|
|
||||||
if ancient.hash == era_entry.canonical_hash {
|
if ancient.hash == era_entry.canonical_hash {
|
||||||
last_canonical_transition = match self.db.get(self.col, &ancient.hash) {
|
last_canonical_transition = match self.db.get(self.col, ancient.hash.as_bytes()) {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!(target: "chain", "Error reading from DB: {}\n
|
warn!(target: "chain", "Error reading from DB: {}\n
|
||||||
", e);
|
", e);
|
||||||
@ -526,7 +539,7 @@ impl HeaderChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
transaction.delete(self.col, &ancient.hash);
|
transaction.delete(self.col, ancient.hash.as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
let canon = &era_entry.candidates[0];
|
let canon = &era_entry.candidates[0];
|
||||||
@ -576,7 +589,7 @@ impl HeaderChain {
|
|||||||
} else {
|
} else {
|
||||||
let msg = format!("header of block #{} not found in DB ; database in an \
|
let msg = format!("header of block #{} not found in DB ; database in an \
|
||||||
inconsistent state", h_num);
|
inconsistent state", h_num);
|
||||||
bail!(msg);
|
return Err(msg.into());
|
||||||
};
|
};
|
||||||
|
|
||||||
let decoded = header.decode().expect("decoding db value failed");
|
let decoded = header.decode().expect("decoding db value failed");
|
||||||
@ -647,7 +660,7 @@ impl HeaderChain {
|
|||||||
match cache.block_header(&hash) {
|
match cache.block_header(&hash) {
|
||||||
Some(header) => Some(header),
|
Some(header) => Some(header),
|
||||||
None => {
|
None => {
|
||||||
match self.db.get(self.col, &hash) {
|
match self.db.get(self.col, hash.as_bytes()) {
|
||||||
Ok(db_value) => {
|
Ok(db_value) => {
|
||||||
db_value.map(|x| x.into_vec()).map(encoded::Header::new)
|
db_value.map(|x| x.into_vec()).map(encoded::Header::new)
|
||||||
.and_then(|header| {
|
.and_then(|header| {
|
||||||
@ -772,7 +785,7 @@ impl HeaderChain {
|
|||||||
|
|
||||||
/// Get block status.
|
/// Get block status.
|
||||||
pub fn status(&self, hash: &H256) -> BlockStatus {
|
pub fn status(&self, hash: &H256) -> BlockStatus {
|
||||||
if self.db.get(self.col, hash).ok().map_or(false, |x| x.is_some()) {
|
if self.db.get(self.col, hash.as_bytes()).ok().map_or(false, |x| x.is_some()) {
|
||||||
BlockStatus::InChain
|
BlockStatus::InChain
|
||||||
} else {
|
} else {
|
||||||
BlockStatus::Unknown
|
BlockStatus::Unknown
|
||||||
@ -782,13 +795,13 @@ impl HeaderChain {
|
|||||||
/// Insert a pending transition.
|
/// Insert a pending transition.
|
||||||
pub fn insert_pending_transition(&self, batch: &mut DBTransaction, hash: H256, t: &PendingEpochTransition) {
|
pub fn insert_pending_transition(&self, batch: &mut DBTransaction, hash: H256, t: &PendingEpochTransition) {
|
||||||
let key = pending_transition_key(hash);
|
let key = pending_transition_key(hash);
|
||||||
batch.put(self.col, &*key, &*::rlp::encode(t));
|
batch.put(self.col, key.as_bytes(), &*::rlp::encode(t));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get pending transition for a specific block hash.
|
/// Get pending transition for a specific block hash.
|
||||||
pub fn pending_transition(&self, hash: H256) -> Option<PendingEpochTransition> {
|
pub fn pending_transition(&self, hash: H256) -> Option<PendingEpochTransition> {
|
||||||
let key = pending_transition_key(hash);
|
let key = pending_transition_key(hash);
|
||||||
match self.db.get(self.col, &*key) {
|
match self.db.get(self.col, key.as_bytes()) {
|
||||||
Ok(db_fetch) => db_fetch.map(|bytes| ::rlp::decode(&bytes).expect("decoding value from db failed")),
|
Ok(db_fetch) => db_fetch.map(|bytes| ::rlp::decode(&bytes).expect("decoding value from db failed")),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!(target: "chain", "Error reading from database: {}", e);
|
warn!(target: "chain", "Error reading from database: {}", e);
|
||||||
@ -832,12 +845,6 @@ impl HeaderChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HeapSizeOf for HeaderChain {
|
|
||||||
fn heap_size_of_children(&self) -> usize {
|
|
||||||
self.candidates.read().heap_size_of_children()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterator over a block's ancestry.
|
/// Iterator over a block's ancestry.
|
||||||
pub struct AncestryIter<'a> {
|
pub struct AncestryIter<'a> {
|
||||||
next: Option<encoded::Header>,
|
next: Option<encoded::Header>,
|
||||||
|
@ -19,8 +19,7 @@
|
|||||||
use std::sync::{Weak, Arc};
|
use std::sync::{Weak, Arc};
|
||||||
|
|
||||||
use ethcore::client::{ClientReport, EnvInfo, ClientIoMessage};
|
use ethcore::client::{ClientReport, EnvInfo, ClientIoMessage};
|
||||||
use ethcore::engines::{epoch, EthEngine, EpochChange, EpochTransition, Proof};
|
use ethcore::engines::{epoch, Engine, EpochChange, EpochTransition, Proof};
|
||||||
use ethcore::machine::EthereumMachine;
|
|
||||||
use ethcore::error::{Error, EthcoreResult};
|
use ethcore::error::{Error, EthcoreResult};
|
||||||
use ethcore::verification::queue::{self, HeaderQueue};
|
use ethcore::verification::queue::{self, HeaderQueue};
|
||||||
use ethcore::spec::{Spec, SpecHardcodedSync};
|
use ethcore::spec::{Spec, SpecHardcodedSync};
|
||||||
@ -34,6 +33,7 @@ use common_types::blockchain_info::BlockChainInfo;
|
|||||||
use common_types::encoded;
|
use common_types::encoded;
|
||||||
use common_types::header::Header;
|
use common_types::header::Header;
|
||||||
use common_types::ids::BlockId;
|
use common_types::ids::BlockId;
|
||||||
|
use common_types::verification_queue_info::VerificationQueueInfo as BlockQueueInfo;
|
||||||
|
|
||||||
use kvdb::KeyValueDB;
|
use kvdb::KeyValueDB;
|
||||||
|
|
||||||
@ -91,6 +91,9 @@ pub trait LightChainClient: Send + Sync {
|
|||||||
/// Attempt to get a block hash by block id.
|
/// Attempt to get a block hash by block id.
|
||||||
fn block_hash(&self, id: BlockId) -> Option<H256>;
|
fn block_hash(&self, id: BlockId) -> Option<H256>;
|
||||||
|
|
||||||
|
/// Get block queue information.
|
||||||
|
fn queue_info(&self) -> BlockQueueInfo;
|
||||||
|
|
||||||
/// Attempt to get block header by block id.
|
/// Attempt to get block header by block id.
|
||||||
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
|
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
|
||||||
|
|
||||||
@ -111,7 +114,7 @@ pub trait LightChainClient: Send + Sync {
|
|||||||
fn env_info(&self, id: BlockId) -> Option<EnvInfo>;
|
fn env_info(&self, id: BlockId) -> Option<EnvInfo>;
|
||||||
|
|
||||||
/// Get a handle to the consensus engine.
|
/// Get a handle to the consensus engine.
|
||||||
fn engine(&self) -> &Arc<EthEngine>;
|
fn engine(&self) -> &Arc<Engine>;
|
||||||
|
|
||||||
/// Query whether a block is known.
|
/// Query whether a block is known.
|
||||||
fn is_known(&self, hash: &H256) -> bool;
|
fn is_known(&self, hash: &H256) -> bool;
|
||||||
@ -125,9 +128,6 @@ pub trait LightChainClient: Send + Sync {
|
|||||||
/// Flush the queue.
|
/// Flush the queue.
|
||||||
fn flush_queue(&self);
|
fn flush_queue(&self);
|
||||||
|
|
||||||
/// Get queue info.
|
|
||||||
fn queue_info(&self) -> queue::QueueInfo;
|
|
||||||
|
|
||||||
/// Get the `i`th CHT root.
|
/// Get the `i`th CHT root.
|
||||||
fn cht_root(&self, i: usize) -> Option<H256>;
|
fn cht_root(&self, i: usize) -> Option<H256>;
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ impl<T: LightChainClient> AsLightClient for T {
|
|||||||
/// Light client implementation.
|
/// Light client implementation.
|
||||||
pub struct Client<T> {
|
pub struct Client<T> {
|
||||||
queue: HeaderQueue,
|
queue: HeaderQueue,
|
||||||
engine: Arc<EthEngine>,
|
engine: Arc<Engine>,
|
||||||
chain: HeaderChain,
|
chain: HeaderChain,
|
||||||
report: RwLock<ClientReport>,
|
report: RwLock<ClientReport>,
|
||||||
import_lock: Mutex<()>,
|
import_lock: Mutex<()>,
|
||||||
@ -361,9 +361,9 @@ impl<T: ChainDataFetcher> Client<T> {
|
|||||||
|
|
||||||
/// Get blockchain mem usage in bytes.
|
/// Get blockchain mem usage in bytes.
|
||||||
pub fn chain_mem_used(&self) -> usize {
|
pub fn chain_mem_used(&self) -> usize {
|
||||||
use heapsize::HeapSizeOf;
|
use parity_util_mem::MallocSizeOfExt;
|
||||||
|
|
||||||
self.chain.heap_size_of_children()
|
self.chain.malloc_size_of()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set a closure to call when the client wants to be restarted.
|
/// Set a closure to call when the client wants to be restarted.
|
||||||
@ -375,7 +375,7 @@ impl<T: ChainDataFetcher> Client<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a handle to the verification engine.
|
/// Get a handle to the verification engine.
|
||||||
pub fn engine(&self) -> &Arc<EthEngine> {
|
pub fn engine(&self) -> &Arc<Engine> {
|
||||||
&self.engine
|
&self.engine
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -467,7 +467,7 @@ impl<T: ChainDataFetcher> Client<T> {
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_epoch_signal(&self, verified_header: &Header) -> Result<Option<Proof<EthereumMachine>>, T::Error> {
|
fn check_epoch_signal(&self, verified_header: &Header) -> Result<Option<Proof>, T::Error> {
|
||||||
use ethcore::machine::{AuxiliaryRequest, AuxiliaryData};
|
use ethcore::machine::{AuxiliaryRequest, AuxiliaryData};
|
||||||
|
|
||||||
let mut block: Option<Vec<u8>> = None;
|
let mut block: Option<Vec<u8>> = None;
|
||||||
@ -513,7 +513,7 @@ impl<T: ChainDataFetcher> Client<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// attempts to fetch the epoch proof from the network until successful.
|
// attempts to fetch the epoch proof from the network until successful.
|
||||||
fn write_pending_proof(&self, header: &Header, proof: Proof<EthereumMachine>) -> Result<(), T::Error> {
|
fn write_pending_proof(&self, header: &Header, proof: Proof) -> Result<(), T::Error> {
|
||||||
let proof = match proof {
|
let proof = match proof {
|
||||||
Proof::Known(known) => known,
|
Proof::Known(known) => known,
|
||||||
Proof::WithState(state_dependent) => {
|
Proof::WithState(state_dependent) => {
|
||||||
@ -534,6 +534,7 @@ impl<T: ChainDataFetcher> Client<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
||||||
fn add_listener(&self, listener: Weak<LightChainNotify>) {
|
fn add_listener(&self, listener: Weak<LightChainNotify>) {
|
||||||
Client::add_listener(self, listener)
|
Client::add_listener(self, listener)
|
||||||
@ -541,6 +542,10 @@ impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
|||||||
|
|
||||||
fn chain_info(&self) -> BlockChainInfo { Client::chain_info(self) }
|
fn chain_info(&self) -> BlockChainInfo { Client::chain_info(self) }
|
||||||
|
|
||||||
|
fn queue_info(&self) -> queue::QueueInfo {
|
||||||
|
self.queue.queue_info()
|
||||||
|
}
|
||||||
|
|
||||||
fn queue_header(&self, header: Header) -> EthcoreResult<H256> {
|
fn queue_header(&self, header: Header) -> EthcoreResult<H256> {
|
||||||
self.import_header(header)
|
self.import_header(header)
|
||||||
}
|
}
|
||||||
@ -573,7 +578,7 @@ impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
|||||||
Client::env_info(self, id)
|
Client::env_info(self, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn engine(&self) -> &Arc<EthEngine> {
|
fn engine(&self) -> &Arc<Engine> {
|
||||||
Client::engine(self)
|
Client::engine(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -600,10 +605,6 @@ impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
|||||||
Client::flush_queue(self);
|
Client::flush_queue(self);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn queue_info(&self) -> queue::QueueInfo {
|
|
||||||
self.queue.queue_info()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cht_root(&self, i: usize) -> Option<H256> {
|
fn cht_root(&self, i: usize) -> Option<H256> {
|
||||||
Client::cht_root(self, i)
|
Client::cht_root(self, i)
|
||||||
}
|
}
|
||||||
|
@ -75,9 +75,8 @@ impl<T: ChainDataFetcher> Service<T> {
|
|||||||
io_service.channel(),
|
io_service.channel(),
|
||||||
cache,
|
cache,
|
||||||
)?);
|
)?);
|
||||||
|
|
||||||
io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
|
|
||||||
spec.engine.register_client(Arc::downgrade(&client) as _);
|
spec.engine.register_client(Arc::downgrade(&client) as _);
|
||||||
|
io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
|
||||||
|
|
||||||
Ok(Service {
|
Ok(Service {
|
||||||
client,
|
client,
|
||||||
|
@ -61,9 +61,12 @@ extern crate ethcore_io as io;
|
|||||||
extern crate ethcore_network as network;
|
extern crate ethcore_network as network;
|
||||||
extern crate parity_bytes as bytes;
|
extern crate parity_bytes as bytes;
|
||||||
extern crate ethereum_types;
|
extern crate ethereum_types;
|
||||||
|
extern crate ethcore_miner as miner;
|
||||||
extern crate ethcore;
|
extern crate ethcore;
|
||||||
extern crate hash_db;
|
extern crate hash_db;
|
||||||
extern crate heapsize;
|
extern crate parity_util_mem;
|
||||||
|
extern crate parity_util_mem as mem;
|
||||||
|
extern crate parity_util_mem as malloc_size_of;
|
||||||
extern crate failsafe;
|
extern crate failsafe;
|
||||||
extern crate futures;
|
extern crate futures;
|
||||||
extern crate itertools;
|
extern crate itertools;
|
||||||
@ -85,8 +88,7 @@ extern crate keccak_hash as hash;
|
|||||||
extern crate triehash_ethereum as triehash;
|
extern crate triehash_ethereum as triehash;
|
||||||
extern crate kvdb;
|
extern crate kvdb;
|
||||||
extern crate memory_cache;
|
extern crate memory_cache;
|
||||||
#[macro_use]
|
extern crate derive_more;
|
||||||
extern crate error_chain;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate kvdb_memorydb;
|
extern crate kvdb_memorydb;
|
||||||
|
@ -199,15 +199,15 @@ pub struct FileStore(pub PathBuf);
|
|||||||
impl SampleStore for FileStore {
|
impl SampleStore for FileStore {
|
||||||
fn load(&self) -> HashMap<Kind, VecDeque<u64>> {
|
fn load(&self) -> HashMap<Kind, VecDeque<u64>> {
|
||||||
File::open(&self.0)
|
File::open(&self.0)
|
||||||
.map_err(|e| Box::new(bincode::ErrorKind::IoError(e)))
|
.map_err(|e| Box::new(bincode::ErrorKind::Io(e)))
|
||||||
.and_then(|mut file| bincode::deserialize_from(&mut file, bincode::Infinite))
|
.and_then(|mut file| bincode::deserialize_from(&mut file))
|
||||||
.unwrap_or_else(|_| HashMap::new())
|
.unwrap_or_else(|_| HashMap::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn store(&self, samples: &HashMap<Kind, VecDeque<u64>>) {
|
fn store(&self, samples: &HashMap<Kind, VecDeque<u64>>) {
|
||||||
let res = File::create(&self.0)
|
let res = File::create(&self.0)
|
||||||
.map_err(|e| Box::new(bincode::ErrorKind::IoError(e)))
|
.map_err(|e| Box::new(bincode::ErrorKind::Io(e)))
|
||||||
.and_then(|mut file| bincode::serialize_into(&mut file, samples, bincode::Infinite));
|
.and_then(|mut file| bincode::serialize_into(&mut file, samples));
|
||||||
|
|
||||||
if let Err(e) = res {
|
if let Err(e) = res {
|
||||||
warn!(target: "pip", "Error writing light request timing samples to file: {}", e);
|
warn!(target: "pip", "Error writing light request timing samples to file: {}", e);
|
||||||
|
@ -382,7 +382,7 @@ mod tests {
|
|||||||
protocol_version: 1,
|
protocol_version: 1,
|
||||||
network_id: 1,
|
network_id: 1,
|
||||||
head_td: U256::default(),
|
head_td: U256::default(),
|
||||||
head_hash: H256::default(),
|
head_hash: H256::zero(),
|
||||||
head_num: 10,
|
head_num: 10,
|
||||||
genesis_hash: H256::zero(),
|
genesis_hash: H256::zero(),
|
||||||
last_head: None,
|
last_head: None,
|
||||||
@ -417,7 +417,7 @@ mod tests {
|
|||||||
protocol_version: 1,
|
protocol_version: 1,
|
||||||
network_id: 1,
|
network_id: 1,
|
||||||
head_td: U256::default(),
|
head_td: U256::default(),
|
||||||
head_hash: H256::default(),
|
head_hash: H256::zero(),
|
||||||
head_num: 10,
|
head_num: 10,
|
||||||
genesis_hash: H256::zero(),
|
genesis_hash: H256::zero(),
|
||||||
last_head: None,
|
last_head: None,
|
||||||
@ -452,7 +452,7 @@ mod tests {
|
|||||||
protocol_version: 1,
|
protocol_version: 1,
|
||||||
network_id: 1,
|
network_id: 1,
|
||||||
head_td: U256::default(),
|
head_td: U256::default(),
|
||||||
head_hash: H256::default(),
|
head_hash: H256::zero(),
|
||||||
head_num: 10,
|
head_num: 10,
|
||||||
genesis_hash: H256::zero(),
|
genesis_hash: H256::zero(),
|
||||||
last_head: None,
|
last_head: None,
|
||||||
@ -550,7 +550,7 @@ mod tests {
|
|||||||
protocol_version: 1,
|
protocol_version: 1,
|
||||||
network_id: 1,
|
network_id: 1,
|
||||||
head_td: U256::default(),
|
head_td: U256::default(),
|
||||||
head_hash: H256::default(),
|
head_hash: H256::zero(),
|
||||||
head_num: 10,
|
head_num: 10,
|
||||||
genesis_hash: H256::zero(),
|
genesis_hash: H256::zero(),
|
||||||
last_head: None,
|
last_head: None,
|
||||||
|
@ -22,7 +22,7 @@ use common_types::encoded;
|
|||||||
use common_types::ids::BlockId;
|
use common_types::ids::BlockId;
|
||||||
use common_types::transaction::{Action, PendingTransaction};
|
use common_types::transaction::{Action, PendingTransaction};
|
||||||
use ethcore::client::{EachBlockWith, TestBlockChainClient};
|
use ethcore::client::{EachBlockWith, TestBlockChainClient};
|
||||||
use ethereum_types::{H256, U256, Address};
|
use ethereum_types::{H256, U256, Address, BigEndianHash};
|
||||||
use net::context::IoContext;
|
use net::context::IoContext;
|
||||||
use net::load_timer::MOVING_SAMPLE_SIZE;
|
use net::load_timer::MOVING_SAMPLE_SIZE;
|
||||||
use net::status::{Capabilities, Status};
|
use net::status::{Capabilities, Status};
|
||||||
@ -158,7 +158,7 @@ impl Provider for TestProvider {
|
|||||||
|
|
||||||
fn contract_code(&self, req: request::CompleteCodeRequest) -> Option<request::CodeResponse> {
|
fn contract_code(&self, req: request::CompleteCodeRequest) -> Option<request::CodeResponse> {
|
||||||
Some(CodeResponse {
|
Some(CodeResponse {
|
||||||
code: req.block_hash.iter().chain(req.code_hash.iter()).cloned().collect(),
|
code: req.block_hash.as_bytes().iter().chain(req.code_hash.as_bytes().iter()).cloned().collect(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,7 +261,7 @@ fn genesis_mismatch() {
|
|||||||
let (provider, proto) = setup(capabilities);
|
let (provider, proto) = setup(capabilities);
|
||||||
|
|
||||||
let mut status = status(provider.client.chain_info());
|
let mut status = status(provider.client.chain_info());
|
||||||
status.genesis_hash = H256::default();
|
status.genesis_hash = H256::zero();
|
||||||
|
|
||||||
let packet_body = write_handshake(&status, &capabilities, &proto);
|
let packet_body = write_handshake(&status, &capabilities, &proto);
|
||||||
|
|
||||||
@ -472,16 +472,16 @@ fn get_state_proofs() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req_id = 112;
|
let req_id = 112;
|
||||||
let key1: H256 = U256::from(11223344).into();
|
let key1: H256 = BigEndianHash::from_uint(&U256::from(11223344));
|
||||||
let key2: H256 = U256::from(99988887).into();
|
let key2: H256 = BigEndianHash::from_uint(&U256::from(99988887));
|
||||||
|
|
||||||
let mut builder = Builder::default();
|
let mut builder = Builder::default();
|
||||||
builder.push(Request::Account(IncompleteAccountRequest {
|
builder.push(Request::Account(IncompleteAccountRequest {
|
||||||
block_hash: H256::default().into(),
|
block_hash: H256::zero().into(),
|
||||||
address_hash: key1.into(),
|
address_hash: key1.into(),
|
||||||
})).unwrap();
|
})).unwrap();
|
||||||
builder.push(Request::Storage(IncompleteStorageRequest {
|
builder.push(Request::Storage(IncompleteStorageRequest {
|
||||||
block_hash: H256::default().into(),
|
block_hash: H256::zero().into(),
|
||||||
address_hash: key1.into(),
|
address_hash: key1.into(),
|
||||||
key_hash: key2.into(),
|
key_hash: key2.into(),
|
||||||
})).unwrap();
|
})).unwrap();
|
||||||
@ -492,11 +492,11 @@ fn get_state_proofs() {
|
|||||||
let response = {
|
let response = {
|
||||||
let responses = vec![
|
let responses = vec![
|
||||||
Response::Account(provider.account_proof(CompleteAccountRequest {
|
Response::Account(provider.account_proof(CompleteAccountRequest {
|
||||||
block_hash: H256::default(),
|
block_hash: H256::zero(),
|
||||||
address_hash: key1,
|
address_hash: key1,
|
||||||
}).unwrap()),
|
}).unwrap()),
|
||||||
Response::Storage(provider.storage_proof(CompleteStorageRequest {
|
Response::Storage(provider.storage_proof(CompleteStorageRequest {
|
||||||
block_hash: H256::default(),
|
block_hash: H256::zero(),
|
||||||
address_hash: key1,
|
address_hash: key1,
|
||||||
key_hash: key2,
|
key_hash: key2,
|
||||||
}).unwrap()),
|
}).unwrap()),
|
||||||
@ -529,8 +529,8 @@ fn get_contract_code() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req_id = 112;
|
let req_id = 112;
|
||||||
let key1: H256 = U256::from(11223344).into();
|
let key1: H256 = BigEndianHash::from_uint(&U256::from(11223344));
|
||||||
let key2: H256 = U256::from(99988887).into();
|
let key2: H256 = BigEndianHash::from_uint(&U256::from(99988887));
|
||||||
|
|
||||||
let request = Request::Code(IncompleteCodeRequest {
|
let request = Request::Code(IncompleteCodeRequest {
|
||||||
block_hash: key1.into(),
|
block_hash: key1.into(),
|
||||||
@ -541,7 +541,7 @@ fn get_contract_code() {
|
|||||||
let request_body = make_packet(req_id, &requests);
|
let request_body = make_packet(req_id, &requests);
|
||||||
let response = {
|
let response = {
|
||||||
let response = vec![Response::Code(CodeResponse {
|
let response = vec![Response::Code(CodeResponse {
|
||||||
code: key1.iter().chain(key2.iter()).cloned().collect(),
|
code: key1.as_bytes().iter().chain(key2.as_bytes().iter()).cloned().collect(),
|
||||||
})];
|
})];
|
||||||
|
|
||||||
let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests()).unwrap();
|
let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests()).unwrap();
|
||||||
@ -616,9 +616,9 @@ fn proof_of_execution() {
|
|||||||
|
|
||||||
let req_id = 112;
|
let req_id = 112;
|
||||||
let mut request = Request::Execution(request::IncompleteExecutionRequest {
|
let mut request = Request::Execution(request::IncompleteExecutionRequest {
|
||||||
block_hash: H256::default().into(),
|
block_hash: H256::zero().into(),
|
||||||
from: Address::default(),
|
from: Address::zero(),
|
||||||
action: Action::Call(Address::default()),
|
action: Action::Call(Address::zero()),
|
||||||
gas: 100.into(),
|
gas: 100.into(),
|
||||||
gas_price: 0.into(),
|
gas_price: 0.into(),
|
||||||
value: 0.into(),
|
value: 0.into(),
|
||||||
@ -755,7 +755,7 @@ fn get_transaction_index() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req_id = 112;
|
let req_id = 112;
|
||||||
let key1: H256 = U256::from(11223344).into();
|
let key1: H256 = BigEndianHash::from_uint(&U256::from(11223344));
|
||||||
|
|
||||||
let request = Request::TransactionIndex(IncompleteTransactionIndexRequest {
|
let request = Request::TransactionIndex(IncompleteTransactionIndexRequest {
|
||||||
hash: key1.into(),
|
hash: key1.into(),
|
||||||
|
@ -66,32 +66,31 @@ pub const DEFAULT_NUM_CONSECUTIVE_FAILED_REQUESTS: usize = 1;
|
|||||||
|
|
||||||
/// OnDemand related errors
|
/// OnDemand related errors
|
||||||
pub mod error {
|
pub mod error {
|
||||||
// Silence: `use of deprecated item 'std::error::Error::cause': replaced by Error::source, which can support downcasting`
|
|
||||||
// https://github.com/paritytech/parity-ethereum/issues/10302
|
|
||||||
#![allow(deprecated)]
|
|
||||||
|
|
||||||
use futures::sync::oneshot::Canceled;
|
use futures::sync::oneshot::Canceled;
|
||||||
|
|
||||||
error_chain! {
|
/// OnDemand Error
|
||||||
|
#[derive(Debug, derive_more::Display, derive_more::From)]
|
||||||
foreign_links {
|
pub enum Error {
|
||||||
ChannelCanceled(Canceled) #[doc = "Canceled oneshot channel"];
|
/// Canceled oneshot channel
|
||||||
|
ChannelCanceled(Canceled),
|
||||||
|
/// Timeout bad response
|
||||||
|
BadResponse(String),
|
||||||
|
/// OnDemand requests limit exceeded
|
||||||
|
#[display(fmt = "OnDemand request maximum backoff iterations exceeded")]
|
||||||
|
RequestLimit,
|
||||||
}
|
}
|
||||||
|
|
||||||
errors {
|
impl std::error::Error for Error {
|
||||||
#[doc = "Timeout bad response"]
|
fn source(&self) -> Option<&(std::error::Error + 'static)> {
|
||||||
BadResponse(err: String) {
|
match self {
|
||||||
description("Max response evaluation time exceeded")
|
Error::ChannelCanceled(err) => Some(err),
|
||||||
display("{}", err)
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc = "OnDemand requests limit exceeded"]
|
/// OnDemand Result
|
||||||
RequestLimit {
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
description("OnDemand request maximum backoff iterations exceeded")
|
|
||||||
display("OnDemand request maximum backoff iterations exceeded")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Public interface for performing network requests `OnDemand`
|
/// Public interface for performing network requests `OnDemand`
|
||||||
@ -272,7 +271,7 @@ impl Pending {
|
|||||||
response_err
|
response_err
|
||||||
);
|
);
|
||||||
|
|
||||||
let err = self::error::ErrorKind::BadResponse(err);
|
let err = self::error::Error::BadResponse(err);
|
||||||
if self.sender.send(Err(err.into())).is_err() {
|
if self.sender.send(Err(err.into())).is_err() {
|
||||||
debug!(target: "on_demand", "Dropped oneshot channel receiver on no response");
|
debug!(target: "on_demand", "Dropped oneshot channel receiver on no response");
|
||||||
}
|
}
|
||||||
@ -280,7 +279,7 @@ impl Pending {
|
|||||||
|
|
||||||
// returning a peer discovery timeout during query attempts
|
// returning a peer discovery timeout during query attempts
|
||||||
fn request_limit_reached(self) {
|
fn request_limit_reached(self) {
|
||||||
let err = self::error::ErrorKind::RequestLimit;
|
let err = self::error::Error::RequestLimit;
|
||||||
if self.sender.send(Err(err.into())).is_err() {
|
if self.sender.send(Err(err.into())).is_err() {
|
||||||
debug!(target: "on_demand", "Dropped oneshot channel receiver on time out");
|
debug!(target: "on_demand", "Dropped oneshot channel receiver on time out");
|
||||||
}
|
}
|
||||||
|
@ -24,8 +24,7 @@ use common_types::basic_account::BasicAccount;
|
|||||||
use common_types::encoded;
|
use common_types::encoded;
|
||||||
use common_types::receipt::Receipt;
|
use common_types::receipt::Receipt;
|
||||||
use common_types::transaction::SignedTransaction;
|
use common_types::transaction::SignedTransaction;
|
||||||
use ethcore::engines::{EthEngine, StateDependentProof};
|
use ethcore::engines::{Engine, StateDependentProof};
|
||||||
use ethcore::machine::EthereumMachine;
|
|
||||||
use ethcore::state::{self, ProvedExecution};
|
use ethcore::state::{self, ProvedExecution};
|
||||||
use ethereum_types::{H256, U256, Address};
|
use ethereum_types::{H256, U256, Address};
|
||||||
use ethtrie::{TrieError, TrieDB};
|
use ethtrie::{TrieError, TrieDB};
|
||||||
@ -981,9 +980,9 @@ impl Account {
|
|||||||
let state_root = header.state_root();
|
let state_root = header.state_root();
|
||||||
|
|
||||||
let mut db = journaldb::new_memory_db();
|
let mut db = journaldb::new_memory_db();
|
||||||
for node in proof { db.insert(&node[..]); }
|
for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
|
||||||
|
|
||||||
match TrieDB::new(&db, &state_root).and_then(|t| t.get(&keccak(&self.address)))? {
|
match TrieDB::new(&db, &state_root).and_then(|t| t.get(keccak(&self.address).as_bytes()))? {
|
||||||
Some(val) => {
|
Some(val) => {
|
||||||
let rlp = Rlp::new(&val);
|
let rlp = Rlp::new(&val);
|
||||||
Ok(Some(BasicAccount {
|
Ok(Some(BasicAccount {
|
||||||
@ -1038,7 +1037,7 @@ pub struct TransactionProof {
|
|||||||
// TODO: it's not really possible to provide this if the header is unknown.
|
// TODO: it's not really possible to provide this if the header is unknown.
|
||||||
pub env_info: EnvInfo,
|
pub env_info: EnvInfo,
|
||||||
/// Consensus engine.
|
/// Consensus engine.
|
||||||
pub engine: Arc<EthEngine>,
|
pub engine: Arc<Engine>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TransactionProof {
|
impl TransactionProof {
|
||||||
@ -1081,9 +1080,9 @@ pub struct Signal {
|
|||||||
/// Block hash and number to fetch proof for.
|
/// Block hash and number to fetch proof for.
|
||||||
pub hash: H256,
|
pub hash: H256,
|
||||||
/// Consensus engine, used to check the proof.
|
/// Consensus engine, used to check the proof.
|
||||||
pub engine: Arc<EthEngine>,
|
pub engine: Arc<Engine>,
|
||||||
/// Special checker for the proof.
|
/// Special checker for the proof.
|
||||||
pub proof_check: Arc<StateDependentProof<EthereumMachine>>,
|
pub proof_check: Arc<StateDependentProof>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Signal {
|
impl Signal {
|
||||||
@ -1162,7 +1161,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn check_header_with_ancestors() {
|
fn check_header_with_ancestors() {
|
||||||
let mut last_header_hash = H256::default();
|
let mut last_header_hash = H256::zero();
|
||||||
let mut headers = (0..11).map(|num| {
|
let mut headers = (0..11).map(|num| {
|
||||||
let mut header = Header::new();
|
let mut header = Header::new();
|
||||||
header.set_number(num);
|
header.set_number(num);
|
||||||
@ -1278,7 +1277,7 @@ mod tests {
|
|||||||
fn check_state_proof() {
|
fn check_state_proof() {
|
||||||
use rlp::RlpStream;
|
use rlp::RlpStream;
|
||||||
|
|
||||||
let mut root = H256::default();
|
let mut root = H256::zero();
|
||||||
let mut db = journaldb::new_memory_db();
|
let mut db = journaldb::new_memory_db();
|
||||||
let mut header = Header::new();
|
let mut header = Header::new();
|
||||||
header.set_number(123_456);
|
header.set_number(123_456);
|
||||||
@ -1298,17 +1297,17 @@ mod tests {
|
|||||||
let mut trie = SecTrieDBMut::new(&mut db, &mut root);
|
let mut trie = SecTrieDBMut::new(&mut db, &mut root);
|
||||||
for _ in 0..100 {
|
for _ in 0..100 {
|
||||||
let address = Address::random();
|
let address = Address::random();
|
||||||
trie.insert(&*address, &rand_acc()).unwrap();
|
trie.insert(address.as_bytes(), &rand_acc()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
trie.insert(&*addr, &rand_acc()).unwrap();
|
trie.insert(addr.as_bytes(), &rand_acc()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
let proof = {
|
let proof = {
|
||||||
let trie = SecTrieDB::new(&db, &root).unwrap();
|
let trie = SecTrieDB::new(&db, &root).unwrap();
|
||||||
let mut recorder = Recorder::new();
|
let mut recorder = Recorder::new();
|
||||||
|
|
||||||
trie.get_with(&*addr, &mut recorder).unwrap().unwrap();
|
trie.get_with(addr.as_bytes(), &mut recorder).unwrap().unwrap();
|
||||||
|
|
||||||
recorder.drain().into_iter().map(|r| r.data).collect::<Vec<_>>()
|
recorder.drain().into_iter().map(|r| r.data).collect::<Vec<_>>()
|
||||||
};
|
};
|
||||||
|
@ -117,9 +117,9 @@ fn dummy_status() -> Status {
|
|||||||
protocol_version: 1,
|
protocol_version: 1,
|
||||||
network_id: 999,
|
network_id: 999,
|
||||||
head_td: 1.into(),
|
head_td: 1.into(),
|
||||||
head_hash: H256::default(),
|
head_hash: H256::zero(),
|
||||||
head_num: 1359,
|
head_num: 1359,
|
||||||
genesis_hash: H256::default(),
|
genesis_hash: H256::zero(),
|
||||||
last_head: None,
|
last_head: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -138,7 +138,7 @@ fn detects_hangup() {
|
|||||||
let on_demand = Harness::create().service;
|
let on_demand = Harness::create().service;
|
||||||
let result = on_demand.request_raw(
|
let result = on_demand.request_raw(
|
||||||
&Context::NoOp,
|
&Context::NoOp,
|
||||||
vec![request::HeaderByHash(H256::default().into()).into()],
|
vec![request::HeaderByHash(H256::zero().into()).into()],
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(on_demand.pending.read().len(), 1);
|
assert_eq!(on_demand.pending.read().len(), 1);
|
||||||
@ -199,7 +199,7 @@ fn no_capabilities() {
|
|||||||
|
|
||||||
let _recv = harness.service.request_raw(
|
let _recv = harness.service.request_raw(
|
||||||
&Context::NoOp,
|
&Context::NoOp,
|
||||||
vec![request::HeaderByHash(H256::default().into()).into()]
|
vec![request::HeaderByHash(H256::zero().into()).into()]
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
assert_eq!(harness.service.pending.read().len(), 1);
|
assert_eq!(harness.service.pending.read().len(), 1);
|
||||||
@ -395,7 +395,7 @@ fn wrong_kind() {
|
|||||||
|
|
||||||
let _recv = harness.service.request_raw(
|
let _recv = harness.service.request_raw(
|
||||||
&Context::NoOp,
|
&Context::NoOp,
|
||||||
vec![request::HeaderByHash(H256::default().into()).into()]
|
vec![request::HeaderByHash(H256::zero().into()).into()]
|
||||||
).unwrap();
|
).unwrap();
|
||||||
|
|
||||||
assert_eq!(harness.service.pending.read().len(), 1);
|
assert_eq!(harness.service.pending.read().len(), 1);
|
||||||
|
@ -24,12 +24,15 @@
|
|||||||
//! address-wise manner.
|
//! address-wise manner.
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
use common_types::transaction::{self, Condition, PendingTransaction, SignedTransaction};
|
use common_types::transaction::{self, Condition, PendingTransaction, SignedTransaction};
|
||||||
use ethereum_types::{H256, U256, Address};
|
use ethereum_types::{H256, U256, Address};
|
||||||
use fastmap::H256FastMap;
|
use fastmap::H256FastMap;
|
||||||
|
use futures::sync::mpsc;
|
||||||
|
use miner::pool::TxStatus;
|
||||||
|
|
||||||
// Knowledge of an account's current nonce.
|
// Knowledge of an account's current nonce.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
@ -126,14 +129,13 @@ pub enum ImportDestination {
|
|||||||
Future,
|
Future,
|
||||||
}
|
}
|
||||||
|
|
||||||
type Listener = Box<Fn(&[H256]) + Send + Sync>;
|
|
||||||
|
|
||||||
/// Light transaction queue. See module docs for more details.
|
/// Light transaction queue. See module docs for more details.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct TransactionQueue {
|
pub struct TransactionQueue {
|
||||||
by_account: HashMap<Address, AccountTransactions>,
|
by_account: HashMap<Address, AccountTransactions>,
|
||||||
by_hash: H256FastMap<PendingTransaction>,
|
by_hash: H256FastMap<PendingTransaction>,
|
||||||
listeners: Vec<Listener>,
|
pending_listeners: Vec<mpsc::UnboundedSender<Arc<Vec<H256>>>>,
|
||||||
|
full_listeners: Vec<mpsc::UnboundedSender<Arc<Vec<(H256, TxStatus)>>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for TransactionQueue {
|
impl fmt::Debug for TransactionQueue {
|
||||||
@ -141,7 +143,8 @@ impl fmt::Debug for TransactionQueue {
|
|||||||
fmt.debug_struct("TransactionQueue")
|
fmt.debug_struct("TransactionQueue")
|
||||||
.field("by_account", &self.by_account)
|
.field("by_account", &self.by_account)
|
||||||
.field("by_hash", &self.by_hash)
|
.field("by_hash", &self.by_hash)
|
||||||
.field("listeners", &self.listeners.len())
|
.field("pending_listeners", &self.pending_listeners.len())
|
||||||
|
.field("full_listeners", &self.pending_listeners.len())
|
||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -231,7 +234,7 @@ impl TransactionQueue {
|
|||||||
};
|
};
|
||||||
|
|
||||||
self.by_hash.insert(hash, tx);
|
self.by_hash.insert(hash, tx);
|
||||||
self.notify(&promoted);
|
self.notify(&promoted, TxStatus::Added);
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,6 +346,8 @@ impl TransactionQueue {
|
|||||||
trace!(target: "txqueue", "Culled {} old transactions from sender {} (nonce={})",
|
trace!(target: "txqueue", "Culled {} old transactions from sender {} (nonce={})",
|
||||||
removed_hashes.len(), address, cur_nonce);
|
removed_hashes.len(), address, cur_nonce);
|
||||||
|
|
||||||
|
self.notify(&removed_hashes, TxStatus::Culled);
|
||||||
|
|
||||||
for hash in removed_hashes {
|
for hash in removed_hashes {
|
||||||
self.by_hash.remove(&hash);
|
self.by_hash.remove(&hash);
|
||||||
}
|
}
|
||||||
@ -354,15 +359,40 @@ impl TransactionQueue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Add a transaction queue listener.
|
/// Add a transaction queue listener.
|
||||||
pub fn add_listener(&mut self, f: Listener) {
|
pub fn pending_transactions_receiver(&mut self) -> mpsc::UnboundedReceiver<Arc<Vec<H256>>> {
|
||||||
self.listeners.push(f);
|
let (sender, receiver) = mpsc::unbounded();
|
||||||
|
self.pending_listeners.push(sender);
|
||||||
|
receiver
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a transaction queue listener.
|
||||||
|
pub fn full_transactions_receiver(&mut self) -> mpsc::UnboundedReceiver<Arc<Vec<(H256, TxStatus)>>> {
|
||||||
|
let (sender, receiver) = mpsc::unbounded();
|
||||||
|
self.full_listeners.push(sender);
|
||||||
|
receiver
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Notifies all listeners about new pending transaction.
|
/// Notifies all listeners about new pending transaction.
|
||||||
fn notify(&self, hashes: &[H256]) {
|
fn notify(&mut self, hashes: &[H256], status: TxStatus) {
|
||||||
for listener in &self.listeners {
|
if status == TxStatus::Added {
|
||||||
listener(hashes)
|
let to_pending_send: Arc<Vec<H256>> = Arc::new(
|
||||||
|
hashes
|
||||||
|
.into_iter()
|
||||||
|
.map(|hash| hash.clone())
|
||||||
|
.collect()
|
||||||
|
);
|
||||||
|
self.pending_listeners.retain(|listener| listener.unbounded_send(to_pending_send.clone()).is_ok());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let to_full_send: Arc<Vec<(H256, TxStatus)>> = Arc::new(
|
||||||
|
hashes
|
||||||
|
.into_iter()
|
||||||
|
.map(|hash| (hash.clone(), status))
|
||||||
|
.collect()
|
||||||
|
);
|
||||||
|
|
||||||
|
self.full_listeners.retain(|listener| listener.unbounded_send(to_full_send.clone()).is_ok());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -374,7 +404,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn queued_senders() {
|
fn queued_senders() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
let tx = Transaction::default().fake_sign(sender);
|
let tx = Transaction::default().fake_sign(sender);
|
||||||
|
|
||||||
@ -390,7 +420,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn next_nonce() {
|
fn next_nonce() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
for i in (0..5).chain(10..15) {
|
for i in (0..5).chain(10..15) {
|
||||||
@ -421,7 +451,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn current_to_future() {
|
fn current_to_future() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
for i in 5..10 {
|
for i in 5..10 {
|
||||||
@ -464,7 +494,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn conditional() {
|
fn conditional() {
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
|
|
||||||
for i in 0..5 {
|
for i in 0..5 {
|
||||||
let mut tx = Transaction::default();
|
let mut tx = Transaction::default();
|
||||||
@ -486,7 +516,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cull_from_future() {
|
fn cull_from_future() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
for i in (0..1).chain(3..10) {
|
for i in (0..1).chain(3..10) {
|
||||||
@ -506,7 +536,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn import_old() {
|
fn import_old() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
let mut tx_a = Transaction::default();
|
let mut tx_a = Transaction::default();
|
||||||
@ -523,7 +553,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn replace_is_removed() {
|
fn replace_is_removed() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
let tx_b: PendingTransaction = Transaction::default().fake_sign(sender).into();
|
let tx_b: PendingTransaction = Transaction::default().fake_sign(sender).into();
|
||||||
@ -543,7 +573,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn future_transactions() {
|
fn future_transactions() {
|
||||||
let sender = Address::default();
|
let sender = Address::zero();
|
||||||
let mut txq = TransactionQueue::default();
|
let mut txq = TransactionQueue::default();
|
||||||
|
|
||||||
for i in (0..1).chain(3..10) {
|
for i in (0..1).chain(3..10) {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user