Compare commits
54 Commits
main
...
v1.9.3-ci3
Author | SHA1 | Date | |
---|---|---|---|
|
57b8efb86a | ||
|
8f841767a4 | ||
|
d30c035440 | ||
|
89dc08a5cd | ||
|
fa0e2a7449 | ||
|
1de095f3bd | ||
|
b60511e3d2 | ||
|
af70a681d5 | ||
|
7adfb82076 | ||
|
d504ce64e8 | ||
|
0feb0bb6e7 | ||
|
3b5a8d5d69 | ||
|
aca9f13d45 | ||
|
e09bef98fb | ||
|
ceb590a360 | ||
|
75c0db2b15 | ||
|
70b42345c5 | ||
|
a42d780d02 | ||
|
582fa8ce45 | ||
|
73be0fb096 | ||
|
627d1a4971 | ||
|
a7807106f5 | ||
|
33b39f0725 | ||
|
53ec1141cf | ||
|
145229d46d | ||
|
568dc33a02 | ||
|
cf10450108 | ||
|
fe779686ca | ||
|
58c1dbe322 | ||
|
14b578832d | ||
|
e961398393 | ||
|
0fad2a6d8c | ||
|
f3bcada7b9 | ||
|
b814f1ccbf | ||
|
cad91df2b8 | ||
|
50a58e1ae8 | ||
|
1e36fc5d0f | ||
|
fa6a0a6b60 | ||
|
a8fc42d282 | ||
|
c6685a7f57 | ||
|
736a8c40f0 | ||
|
5f74f8c265 | ||
|
97ed569588 | ||
|
6766ef988d | ||
|
8a87cfb893 | ||
|
54aebdcb45 | ||
|
86a6145d76 | ||
|
718020b64b | ||
|
8c36a56365 | ||
|
7bccaa5c15 | ||
|
98ec46fff6 | ||
|
8dc584ece9 | ||
|
63d154dad3 | ||
|
0030bb4f1d |
@ -9,7 +9,7 @@ trim_trailing_whitespace=true
|
||||
max_line_length=120
|
||||
insert_final_newline=true
|
||||
|
||||
[.travis.yml]
|
||||
[*.{yml,sh}]
|
||||
indent_style=space
|
||||
indent_size=2
|
||||
tab_width=8
|
||||
|
556
.gitlab-ci.yml
@ -4,13 +4,15 @@ stages:
|
||||
- push-release
|
||||
- build
|
||||
variables:
|
||||
SIMPLECOV: "true"
|
||||
RUST_BACKTRACE: "1"
|
||||
RUSTFLAGS: ""
|
||||
CARGOFLAGS: ""
|
||||
CI_SERVER_NAME: "GitLab CI"
|
||||
LIBSSL: "libssl1.0.0 (>=1.0.0)"
|
||||
cache:
|
||||
key: "$CI_BUILD_STAGE/$CI_BUILD_REF_NAME"
|
||||
key: "$CI_BUILD_STAGE-$CI_BUILD_REF_NAME"
|
||||
paths:
|
||||
- target
|
||||
untracked: true
|
||||
linux-stable:
|
||||
stage: build
|
||||
@ -22,77 +24,14 @@ linux-stable:
|
||||
- triggers
|
||||
script:
|
||||
- rustup default stable
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh amd64
|
||||
- cp target/release/parity deb/usr/bin/parity
|
||||
- cp target/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||
- md5sum "parity_"$VER"_amd64.deb" > "parity_"$VER"_amd64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
# ARGUMENTS: 1. BUILD_PLATFORM (target for binaries) 2. PLATFORM (target for cargo) 3. ARC (architecture) 4. & 5. CC & CXX flags 6. binary identifier
|
||||
- scripts/gitlab-build.sh x86_64-unknown-linux-gnu x86_64-unknown-linux-gnu amd64 gcc g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- target/release/parity-evm
|
||||
- target/release/ethstore
|
||||
- target/release/ethkey
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-linux-gnu_parity"
|
||||
linux-snap:
|
||||
stage: build
|
||||
image: parity/snapcraft:gitlab-ci
|
||||
only:
|
||||
- snap
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
script:
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- cd snap
|
||||
- rm -rf *snap
|
||||
- sed -i 's/master/'"$VER"'/g' snapcraft.yaml
|
||||
- echo "Version:"$VER
|
||||
- snapcraft
|
||||
- ls
|
||||
- cp "parity_"$CI_BUILD"_REF_NAME_amd64.snap" "parity_"$VER"_amd64.snap"
|
||||
- md5sum "parity_"$VER"_amd64.snap" > "parity_"$VER"_amd64.snap.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.snap" --body "parity_"$VER"_amd64.snap"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.snap.md5" --body "parity_"$VER"_amd64.snap.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- scripts/parity_*_amd64.snap
|
||||
name: "stable-x86_64-unknown-snap-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-stable-debian:
|
||||
stage: build
|
||||
image: parity/rust-debian:gitlab-ci
|
||||
@ -102,81 +41,14 @@ linux-stable-debian:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh amd64
|
||||
- cp target/release/parity deb/usr/bin/parity
|
||||
- cp target/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||
- md5sum "parity_"$VER"_amd64.deb" > "parity_"$VER"_amd64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- export LIBSSL="libssl1.1 (>=1.1.0)"
|
||||
- scripts/gitlab-build.sh x86_64-unknown-debian-gnu x86_64-unknown-linux-gnu amd64 gcc g++ debian
|
||||
tags:
|
||||
- rust
|
||||
- rust-debian
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-debian-gnu_parity"
|
||||
linux-beta:
|
||||
stage: build
|
||||
image: parity/rust:gitlab-ci
|
||||
only:
|
||||
- beta
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- rustup default beta
|
||||
- cargo build -j $(nproc) --release $CARGOFLAGS
|
||||
- strip target/release/parity
|
||||
tags:
|
||||
- rust
|
||||
- rust-beta
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
name: "beta-x86_64-unknown-linux-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-nightly:
|
||||
stage: build
|
||||
image: parity/rust:gitlab-ci
|
||||
only:
|
||||
- beta
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- rustup default nightly
|
||||
- cargo build -j $(nproc) --release $CARGOFLAGS
|
||||
- strip target/release/parity
|
||||
tags:
|
||||
- rust
|
||||
- rust-nightly
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
name: "nigthly-x86_64-unknown-linux-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-centos:
|
||||
stage: build
|
||||
image: parity/rust-centos:gitlab-ci
|
||||
@ -186,42 +58,12 @@ linux-centos:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CXX="g++"
|
||||
- export CC="gcc"
|
||||
- export PLATFORM=x86_64-unknown-centos-gnu
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- md5sum target/release/parity-evm > parity-evm.md5
|
||||
- md5sum target/release/ethstore > ethstore.md5
|
||||
- md5sum target/release/ethkey > ethkey.md5
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity-evm --body target/release/parity-evm
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity-evm.md5 --body parity-evm.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethstore --body target/release/ethstore
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethstore.md5 --body ethstore.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethkey --body target/release/ethkey
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethkey.md5 --body ethkey.md5
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh x86_64-unknown-centos-gnu x86_64-unknown-linux-gnu x86_64 gcc g++ centos
|
||||
tags:
|
||||
- rust
|
||||
- rust-centos
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "x86_64-unknown-centos-gnu_parity"
|
||||
linux-i686:
|
||||
stage: build
|
||||
@ -232,47 +74,13 @@ linux-i686:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export COMMIT=$(git rev-parse HEAD)
|
||||
- export PLATFORM=i686-unknown-linux-gnu
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- strip target/$PLATFORM/release/parity
|
||||
- strip target/$PLATFORM/release/parity-evm
|
||||
- strip target/$PLATFORM/release/ethstore
|
||||
- strip target/$PLATFORM/release/ethkey
|
||||
- strip target/$PLATFORM/release/parity
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- export SHA3=$(target/$PLATFORM/release/parity tools hash target/$PLATFORM/release/parity)
|
||||
- sh scripts/deb-build.sh i386
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_i386.deb"
|
||||
- md5sum "parity_"$VER"_i386.deb" > "parity_"$VER"_i386.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh i686-unknown-linux-gnu i686-unknown-linux-gnu i386 gcc g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-i686
|
||||
artifacts:
|
||||
paths:
|
||||
- target/i686-unknown-linux-gnu/release/parity
|
||||
- parity.zip
|
||||
name: "i686-unknown-linux-gnu"
|
||||
allow_failure: true
|
||||
linux-armv7:
|
||||
stage: build
|
||||
image: parity/rust-armv7:gitlab-ci
|
||||
@ -282,55 +90,13 @@ linux-armv7:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=arm-linux-gnueabihf-gcc
|
||||
- export CXX=arm-linux-gnueabihf-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=armv7-unknown-linux-gnueabihf
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- export SHA3=$(target/$PLATFORM/release/parity tools hash target/$PLATFORM/release/parity)
|
||||
- sh scripts/deb-build.sh i386
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity-evm
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethstore
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh armhf
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||
- md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh armv7-unknown-linux-gnueabihf armv7-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/armv7-unknown-linux-gnueabihf/release/parity
|
||||
- parity.zip
|
||||
name: "armv7_unknown_linux_gnueabihf_parity"
|
||||
allow_failure: true
|
||||
linux-arm:
|
||||
stage: build
|
||||
image: parity/rust-arm:gitlab-ci
|
||||
@ -340,52 +106,13 @@ linux-arm:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=arm-linux-gnueabihf-gcc
|
||||
- export CXX=arm-linux-gnueabihf-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=arm-unknown-linux-gnueabihf
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity-evm
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethstore
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh armhf
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||
- md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh arm-unknown-linux-gnueabihf arm-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/arm-unknown-linux-gnueabihf/release/parity
|
||||
- parity.zip
|
||||
name: "arm-unknown-linux-gnueabihf_parity"
|
||||
allow_failure: true
|
||||
linux-aarch64:
|
||||
stage: build
|
||||
image: parity/rust-arm64:gitlab-ci
|
||||
@ -395,50 +122,29 @@ linux-aarch64:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=aarch64-linux-gnu-gcc
|
||||
- export CXX=aarch64-linux-gnu-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=aarch64-unknown-linux-gnu
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/parity
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/parity-evm
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/ethstore
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh arm64
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
|
||||
- md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh aarch64-unknown-linux-gnu aarch64-unknown-linux-gnu arm64 aarch64-linux-gnu-gcc aarch64-linux-gnu-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/aarch64-unknown-linux-gnu/release/parity
|
||||
- parity.zip
|
||||
name: "aarch64-unknown-linux-gnu_parity"
|
||||
linux-snap:
|
||||
stage: build
|
||||
image: snapcore/snapcraft:stable
|
||||
only:
|
||||
- stable
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
script:
|
||||
- scripts/gitlab-build.sh x86_64-unknown-snap-gnu x86_64-unknown-linux-gnu amd64 gcc g++ snap
|
||||
tags:
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-snap-gnu_parity"
|
||||
allow_failure: true
|
||||
darwin:
|
||||
stage: build
|
||||
@ -447,45 +153,17 @@ darwin:
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script: |
|
||||
export COMMIT=$(git rev-parse HEAD)
|
||||
export PLATFORM=x86_64-apple-darwin
|
||||
rustup default stable
|
||||
cargo clean
|
||||
cargo build -j 8 --features final --release #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p ethstore-cli #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p ethkey-cli #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p evmbin #$CARGOFLAGS
|
||||
rm -rf parity.md5
|
||||
md5sum target/release/parity > parity.md5
|
||||
export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
cd mac
|
||||
xcodebuild -configuration Release
|
||||
cd ..
|
||||
packagesbuild -v mac/Parity.pkgproj
|
||||
productsign --sign 'Developer ID Installer: PARITY TECHNOLOGIES LIMITED (P2PX3JU8FT)' target/release/Parity\ Ethereum.pkg target/release/Parity\ Ethereum-signed.pkg
|
||||
export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
mv target/release/Parity\ Ethereum-signed.pkg "parity-"$VER"-macos-installer.pkg"
|
||||
md5sum "parity-"$VER"-macos-installer.pkg" >> "parity-"$VER"-macos-installer.pkg.md5"
|
||||
aws configure set aws_access_key_id $s3_key
|
||||
aws configure set aws_secret_access_key $s3_secret
|
||||
if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/release/parity
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-macos-installer.pkg" --body "parity-"$VER"-macos-installer.pkg"
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-macos-installer.pkg.md5" --body "parity-"$VER"-macos-installer.pkg.md5"
|
||||
curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
script:
|
||||
- scripts/gitlab-build.sh x86_64-apple-darwin x86_64-apple-darwin macos gcc g++ macos
|
||||
tags:
|
||||
- osx
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "x86_64-apple-darwin_parity"
|
||||
windows:
|
||||
cache:
|
||||
key: "%CI_BUILD_STAGE%/%CI_BUILD_REF_NAME%"
|
||||
key: "%CI_BUILD_STAGE%-%CI_BUILD_REF_NAME%"
|
||||
untracked: true
|
||||
stage: build
|
||||
only:
|
||||
@ -494,62 +172,12 @@ windows:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- set PLATFORM=x86_64-pc-windows-msvc
|
||||
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
|
||||
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
|
||||
- set RUST_BACKTRACE=1
|
||||
- set RUSTFLAGS=%RUSTFLAGS%
|
||||
- rustup default stable-x86_64-pc-windows-msvc
|
||||
- cargo clean
|
||||
- cargo build --features final --release #%CARGOFLAGS%
|
||||
- cargo build --release -p ethstore-cli #%CARGOFLAGS%
|
||||
- cargo build --release -p ethkey-cli #%CARGOFLAGS%
|
||||
- cargo build --release -p evmbin #%CARGOFLAGS%
|
||||
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
|
||||
- target\release\parity.exe tools hash target\release\parity.exe > parity.sha3
|
||||
- set /P SHA3=<parity.sha3
|
||||
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
||||
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
||||
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
|
||||
- signtool sign /f %keyfile% /p %certpass% windows\ptray\x64\release\ptray.exe
|
||||
- cd nsis
|
||||
- makensis.exe installer.nsi
|
||||
- copy installer.exe InstallParity.exe
|
||||
- signtool sign /f %keyfile% /p %certpass% InstallParity.exe
|
||||
- md5sums InstallParity.exe > InstallParity.exe.md5
|
||||
- zip win-installer.zip InstallParity.exe InstallParity.exe.md5
|
||||
- md5sums win-installer.zip > win-installer.zip.md5
|
||||
- cd ..\target\release\
|
||||
- md5sums parity.exe > parity.exe.md5
|
||||
- zip parity.zip parity.exe parity.md5
|
||||
- md5sums parity.zip > parity.zip.md5
|
||||
- cd ..\..
|
||||
- aws configure set aws_access_key_id %s3_key%
|
||||
- aws configure set aws_secret_access_key %s3_secret%
|
||||
- echo %CI_BUILD_REF_NAME%
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "master" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "beta" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "stable" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "nightly" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %S3_BUCKET%
|
||||
- aws s3 rm --recursive s3://%S3_BUCKET%/%CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe --body target\release\parity.exe
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe.md5 --body target\release\parity.exe.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip --body target\release\parity.zip
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip.md5 --body target\release\parity.zip.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe --body nsis\InstallParity.exe
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5
|
||||
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
|
||||
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1338/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
|
||||
- sh scripts/gitlab-build.sh x86_64-pc-windows-msvc x86_64-pc-windows-msvc installer "" "" windows
|
||||
tags:
|
||||
- rust-windows
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity.exe
|
||||
- target/release/parity.pdb
|
||||
- nsis/InstallParity.exe
|
||||
- parity.zip
|
||||
name: "x86_64-pc-windows-msvc_parity"
|
||||
docker-build:
|
||||
stage: build
|
||||
@ -562,7 +190,7 @@ docker-build:
|
||||
- if [ "$CI_BUILD_REF_NAME" == "beta-release" ]; then DOCKER_TAG="latest"; else DOCKER_TAG=$CI_BUILD_REF_NAME; fi
|
||||
- echo "Tag:" $DOCKER_TAG
|
||||
- docker login -u $Docker_Hub_User_Parity -p $Docker_Hub_Pass_Parity
|
||||
- sh scripts/docker-build.sh $DOCKER_TAG
|
||||
- scripts/docker-build.sh $DOCKER_TAG
|
||||
- docker logout
|
||||
tags:
|
||||
- docker
|
||||
@ -571,63 +199,16 @@ test-coverage:
|
||||
only:
|
||||
- master
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- rm -rf target/*
|
||||
- rm -rf js/.coverage
|
||||
- scripts/cov.sh
|
||||
# - COVERAGE=$(grep -Po 'covered":.*?[^\\]"' target/cov/index.json | grep "[0-9]*\.[0-9]" -o)
|
||||
# - echo "Coverage:" $COVERAGE
|
||||
- scripts/gitlab-test.sh test-coverage
|
||||
tags:
|
||||
- kcov
|
||||
allow_failure: true
|
||||
test-darwin:
|
||||
stage: test
|
||||
only:
|
||||
- triggers
|
||||
variables:
|
||||
RUST_BACKTRACE: 1
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
tags:
|
||||
- osx
|
||||
allow_failure: true
|
||||
test-windows:
|
||||
stage: test
|
||||
only:
|
||||
- triggers
|
||||
variables:
|
||||
RUST_BACKTRACE: 1
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- echo cargo test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p parity-dapps -p parity-rpc -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity-rpc-client -p parity %CARGOFLAGS% --verbose --release
|
||||
tags:
|
||||
- rust-windows
|
||||
allow_failure: true
|
||||
test-rust-stable:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
variables:
|
||||
RUST_BACKTRACE: 1
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- rustup show
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- if [ "$CI_BUILD_REF_NAME" == "nightly" ]; then sh scripts/aura-test.sh; fi
|
||||
- scripts/gitlab-test.sh stable
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
js-test:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
|
||||
- if [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS (old) deps install since no JS files modified."; else ./js-old/scripts/install-deps.sh;fi
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS lint since no JS files modified."; else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh; fi
|
||||
- if [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS (old) lint since no JS files modified."; else ./js-old/scripts/lint.sh && ./js-old/scripts/test.sh && ./js-old/scripts/build.sh; fi
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
test-rust-beta:
|
||||
stage: test
|
||||
@ -635,14 +216,9 @@ test-rust-beta:
|
||||
- triggers
|
||||
- master
|
||||
image: parity/rust:gitlab-ci
|
||||
variables:
|
||||
RUST_BACKTRACE: 1
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- rustup default beta
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- scripts/gitlab-test.sh beta
|
||||
tags:
|
||||
- rust
|
||||
- rust-beta
|
||||
allow_failure: true
|
||||
test-rust-nightly:
|
||||
@ -651,36 +227,30 @@ test-rust-nightly:
|
||||
- triggers
|
||||
- master
|
||||
image: parity/rust:gitlab-ci
|
||||
variables:
|
||||
RUST_BACKTRACE: 1
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- rustup default nightly
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- scripts/gitlab-test.sh nightly
|
||||
tags:
|
||||
- rust
|
||||
- rust-nightly
|
||||
allow_failure: true
|
||||
js-test:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- scripts/gitlab-test.sh js-test
|
||||
tags:
|
||||
- rust-stable
|
||||
js-release:
|
||||
stage: js-build
|
||||
only:
|
||||
- master
|
||||
- beta
|
||||
- stable
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- rustup default stable
|
||||
- echo $JS_FILES_MODIFIED
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS rebuild since no JS files modified."; else ./js/scripts/build.sh && ./js/scripts/push-precompiled.sh; fi
|
||||
|
||||
- echo $JS_OLD_FILES_MODIFIED
|
||||
- if [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS (old) deps install since no JS files modified."; else ./js-old/scripts/install-deps.sh;fi
|
||||
- if [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS (old) rebuild since no JS files modified."; else ./js-old/scripts/build.sh && ./js-old/scripts/push-precompiled.sh; fi
|
||||
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ] && [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping Cargo update since no JS files modified."; else ./js/scripts/push-cargo.sh; fi
|
||||
- scripts/gitlab-test.sh js-release
|
||||
tags:
|
||||
- javascript
|
||||
push-release:
|
||||
@ -690,18 +260,6 @@ push-release:
|
||||
- triggers
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- rustup default stable
|
||||
- curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
|
||||
- curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
|
||||
- scripts/gitlab-push-release.sh
|
||||
tags:
|
||||
- curl
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
.functions: &functions |
|
||||
export JS_FILES_MODIFIED=$(git --no-pager diff --name-only master...$CI_BUILD_REF | grep ^js/ | wc -l)
|
||||
export JS_OLD_FILES_MODIFIED=$(git --no-pager diff --name-only master...$CI_BUILD_REF | grep ^js-old/ | wc -l)
|
||||
export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only master...$CI_BUILD_REF | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^test.sh -e ^windows/ -e ^scripts/ -e^mac/ -e ^nsis/ | wc -l)
|
||||
|
||||
before_script:
|
||||
- *functions
|
||||
|
||||
|
102
Cargo.lock
generated
@ -25,6 +25,11 @@ name = "ansi_term"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "app_dirs"
|
||||
version = "1.1.1"
|
||||
@ -163,6 +168,11 @@ name = "bitflags"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bloomable"
|
||||
version = "0.1.0"
|
||||
@ -234,15 +244,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "2.26.2"
|
||||
version = "2.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ansi_term 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -663,6 +672,8 @@ dependencies = [
|
||||
"rlp 0.2.1",
|
||||
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"snappy 0.1.0 (git+https://github.com/paritytech/rust-snappy)",
|
||||
@ -1903,11 +1914,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "parity"
|
||||
version = "1.9.0"
|
||||
version = "1.9.3"
|
||||
dependencies = [
|
||||
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ctrlc 1.1.1 (git+https://github.com/paritytech/rust-ctrlc.git)",
|
||||
"daemonize 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"dir 0.1.0",
|
||||
@ -1951,7 +1962,7 @@ dependencies = [
|
||||
"parity-rpc 1.9.0",
|
||||
"parity-rpc-client 1.4.0",
|
||||
"parity-updater 1.9.0",
|
||||
"parity-version 1.9.0",
|
||||
"parity-version 1.9.3",
|
||||
"parity-whisper 0.1.0",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"path 0.1.0",
|
||||
@ -1966,6 +1977,8 @@ dependencies = [
|
||||
"serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1997,7 +2010,7 @@ dependencies = [
|
||||
"parity-hash-fetch 1.9.0",
|
||||
"parity-reactor 0.1.0",
|
||||
"parity-ui 1.9.0",
|
||||
"parity-version 1.9.0",
|
||||
"parity-version 1.9.3",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2145,7 +2158,7 @@ dependencies = [
|
||||
"order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-reactor 0.1.0",
|
||||
"parity-updater 1.9.0",
|
||||
"parity-version 1.9.0",
|
||||
"parity-version 1.9.3",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2204,8 +2217,8 @@ version = "1.9.0"
|
||||
dependencies = [
|
||||
"parity-ui-dev 1.9.0",
|
||||
"parity-ui-old-dev 1.9.0",
|
||||
"parity-ui-old-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-master-1-9-v1.git)",
|
||||
"parity-ui-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-master-1-9-shell.git)",
|
||||
"parity-ui-old-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-beta-1-9-v1.git)",
|
||||
"parity-ui-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-beta-1-9-shell.git)",
|
||||
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -2226,7 +2239,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-ui-old-precompiled"
|
||||
version = "1.9.0"
|
||||
source = "git+https://github.com/js-dist-paritytech/parity-master-1-9-v1.git#ae75b135453ed081a6beb7c2bf3e3aa2b9957f69"
|
||||
source = "git+https://github.com/js-dist-paritytech/parity-beta-1-9-v1.git#f1ae0ad8b8b5ccb1783dcb776fc57ac1da07033e"
|
||||
dependencies = [
|
||||
"parity-dapps-glue 1.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2234,7 +2247,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-ui-precompiled"
|
||||
version = "1.9.0"
|
||||
source = "git+https://github.com/js-dist-paritytech/parity-master-1-9-shell.git#47c6e031f8f5b16af8e1d99cb5c6f27055c0156d"
|
||||
source = "git+https://github.com/js-dist-paritytech/parity-beta-1-9-shell.git#2f1068af8858046854042dbef03a510aef5d24e2"
|
||||
dependencies = [
|
||||
"parity-dapps-glue 1.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2253,7 +2266,7 @@ dependencies = [
|
||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-hash-fetch 1.9.0",
|
||||
"parity-reactor 0.1.0",
|
||||
"parity-version 1.9.0",
|
||||
"parity-version 1.9.3",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"path 0.1.0",
|
||||
"semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2262,23 +2275,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "parity-version"
|
||||
version = "1.9.0"
|
||||
version = "1.9.3"
|
||||
dependencies = [
|
||||
"ethcore-bytes 0.1.0",
|
||||
"rlp 0.2.1",
|
||||
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"vergen 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parity-wasm"
|
||||
version = "0.15.3"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2329,6 +2343,15 @@ dependencies = [
|
||||
"parking_lot_core 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot_core 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.2.6"
|
||||
@ -2505,7 +2528,7 @@ dependencies = [
|
||||
name = "pwasm-run-test"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-bigint 0.2.1",
|
||||
"ethcore-logger 1.9.0",
|
||||
"ethjson 0.1.0",
|
||||
@ -2671,7 +2694,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rocksdb"
|
||||
version = "0.4.5"
|
||||
source = "git+https://github.com/paritytech/rust-rocksdb#166e14ed63cbd2e44b51267b8b98e4b89b0f236f"
|
||||
source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e"
|
||||
dependencies = [
|
||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2681,10 +2704,11 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rocksdb-sys"
|
||||
version = "0.3.0"
|
||||
source = "git+https://github.com/paritytech/rust-rocksdb#166e14ed63cbd2e44b51267b8b98e4b89b0f236f"
|
||||
source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e"
|
||||
dependencies = [
|
||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"snappy-sys 0.1.0 (git+https://github.com/paritytech/rust-snappy)",
|
||||
]
|
||||
|
||||
@ -3112,10 +3136,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.8.0"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -3464,24 +3487,35 @@ dependencies = [
|
||||
"ethcore-bigint 0.2.1",
|
||||
"ethcore-logger 1.9.0",
|
||||
"ethcore-util 1.9.0",
|
||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-wasm 0.15.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-wasm 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"vm 0.1.0",
|
||||
"wasm-utils 0.1.0 (git+https://github.com/paritytech/wasm-utils)",
|
||||
"wasmi 0.0.0 (git+https://github.com/pepyakin/wasmi)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/paritytech/wasm-utils#3d59f7ca0661317bc66894a26b2a5a319fa5d229"
|
||||
source = "git+https://github.com/paritytech/wasm-utils#6fdc1c4ed47a6acb0a4774da505a416dd637bc6d"
|
||||
dependencies = [
|
||||
"byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-wasm 0.15.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-wasm 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmi"
|
||||
version = "0.0.0"
|
||||
source = "git+https://github.com/pepyakin/wasmi#551c99273042deaad869c17798060e2212deacab"
|
||||
dependencies = [
|
||||
"byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-wasm 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3555,6 +3589,7 @@ dependencies = [
|
||||
"checksum adler32 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6cbd0b9af8587c72beadc9f72d35b9fbb070982c9e6203e46e93f10df25f8f45"
|
||||
"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
|
||||
"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
|
||||
"checksum ansi_term 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3568b48b7cefa6b8ce125f9bb4989e52fbcc29ebea88df04cc7c5f12f70455"
|
||||
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
|
||||
"checksum app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b7d1c0d48a81bbb13043847f957971f4d87c81542d80ece5e84ba3cba4058fd4"
|
||||
"checksum arrayvec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1c0250693b17316353df525fb088da32a8c18f84eb65d113dde31f5a76ed17b6"
|
||||
@ -3573,6 +3608,7 @@ dependencies = [
|
||||
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
|
||||
"checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
|
||||
"checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
|
||||
"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
|
||||
"checksum bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f421095d2a76fc24cd3fb3f912b90df06be7689912b1bdb423caefae59c258d"
|
||||
"checksum bn 0.4.4 (git+https://github.com/paritytech/bn)" = "<none>"
|
||||
"checksum byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff81738b726f5d099632ceaffe7fb65b90212e8dce59d518729e7e8634032d3d"
|
||||
@ -3580,7 +3616,7 @@ dependencies = [
|
||||
"checksum cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7db2f146208d7e0fbee761b09cd65a7f51ccc38705d4e7262dad4d73b12a76b1"
|
||||
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
|
||||
"checksum cid 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "34aa7da06f10541fbca6850719cdaa8fa03060a5d2fb33840f149cf8133a00c7"
|
||||
"checksum clap 2.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3451e409013178663435d6f15fdb212f14ee4424a3d74f979d081d0a66b6f1f2"
|
||||
"checksum clap 2.29.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8f4a2b3bb7ef3c672d7c13d15613211d5a6976b6892c598b0fcb5d40765f19c2"
|
||||
"checksum coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd"
|
||||
"checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299"
|
||||
"checksum cookie 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d53b80dde876f47f03cda35303e368a79b91c70b0d65ecba5fd5280944a08591"
|
||||
@ -3695,11 +3731,12 @@ dependencies = [
|
||||
"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
|
||||
"checksum parity-dapps-glue 1.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "261c025c67ba416e9fe63aa9b3236520ce3c74cfbe43590c9cdcec4ccc8180e4"
|
||||
"checksum parity-tokio-ipc 0.1.5 (git+https://github.com/nikvolf/parity-tokio-ipc)" = "<none>"
|
||||
"checksum parity-ui-old-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-master-1-9-v1.git)" = "<none>"
|
||||
"checksum parity-ui-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-master-1-9-shell.git)" = "<none>"
|
||||
"checksum parity-wasm 0.15.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8431a184ad88cfbcd71a792aaca319cc7203a94300c26b8dce2d0df0681ea87d"
|
||||
"checksum parity-ui-old-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-beta-1-9-v1.git)" = "<none>"
|
||||
"checksum parity-ui-precompiled 1.9.0 (git+https://github.com/js-dist-paritytech/parity-beta-1-9-shell.git)" = "<none>"
|
||||
"checksum parity-wasm 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1ba4b1d4236b76694f6ab8d8d00cdbe1e37c6dd1b5c803d26721f27e097d4d9"
|
||||
"checksum parity-wordlist 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d0dec124478845b142f68b446cbee953d14d4b41f1bc0425024417720dce693"
|
||||
"checksum parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "149d8f5b97f3c1133e3cfcd8886449959e856b557ff281e292b733d7c69e005e"
|
||||
"checksum parking_lot 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3e7f7c9857874e54afeb950eebeae662b1e51a2493666d2ea4c0a5d91dcf0412"
|
||||
"checksum parking_lot_core 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4f610cb9664da38e417ea3225f23051f589851999535290e077939838ab7a595"
|
||||
"checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356"
|
||||
"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
|
||||
@ -3784,7 +3821,7 @@ dependencies = [
|
||||
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
|
||||
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
|
||||
"checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
|
||||
"checksum textwrap 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df8e08afc40ae3459e4838f303e465aa50d823df8d7f83ca88108f6d3afe7edd"
|
||||
"checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
|
||||
"checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
|
||||
"checksum threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e2f0c90a5f3459330ac8bc0d2f879c693bb7a2f59689c1083fc4ef83834da865"
|
||||
"checksum time 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "d5d788d3aa77bc0ef3e9621256885555368b47bd495c13dd2e7413c89f845520"
|
||||
@ -3821,6 +3858,7 @@ dependencies = [
|
||||
"checksum version_check 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6b772017e347561807c1aa192438c5fd74242a670a6cffacc40f2defd1dc069d"
|
||||
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||
"checksum wasm-utils 0.1.0 (git+https://github.com/paritytech/wasm-utils)" = "<none>"
|
||||
"checksum wasmi 0.0.0 (git+https://github.com/pepyakin/wasmi)" = "<none>"
|
||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
"checksum ws 0.7.1 (git+https://github.com/tomusdrw/ws-rs)" = "<none>"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
description = "Parity Ethereum client"
|
||||
name = "parity"
|
||||
version = "1.9.0"
|
||||
version = "1.9.3"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
@ -12,6 +12,8 @@ env_logger = "0.4"
|
||||
rustc-hex = "1.0"
|
||||
docopt = "0.8"
|
||||
clap = "2"
|
||||
term_size = "0.3"
|
||||
textwrap = "0.9"
|
||||
time = "0.1"
|
||||
num_cpus = "1.2"
|
||||
number_prefix = "0.2"
|
||||
|
@ -1,4 +1,4 @@
|
||||
# [Parity](https://parity.io/) - fast, light, and robust Ethereum client
|
||||
# [Parity](https://parity.io/) - fast, light, and robust Ethereum client
|
||||
|
||||
[](https://gitlab.parity.io/parity/parity/commits/master)
|
||||
[](https://build.snapcraft.io/user/paritytech/parity)
|
||||
|
@ -47,6 +47,8 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
||||
|
||||
// Content Security Policy headers
|
||||
headers.set_raw("Content-Security-Policy", String::new()
|
||||
// Restrict everything to the same origin by default.
|
||||
+ "default-src 'self';"
|
||||
// Allow connecting to WS servers and HTTP(S) servers.
|
||||
// We could be more restrictive and allow only RPC server URL.
|
||||
+ "connect-src http: https: ws: wss:;"
|
||||
@ -64,7 +66,9 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
||||
+ "style-src 'self' 'unsafe-inline' data: blob: https:;"
|
||||
// Allow fonts from data: and HTTPS.
|
||||
+ "font-src 'self' data: https:;"
|
||||
// Allow inline scripts and scripts eval (webpack/jsconsole)
|
||||
// Disallow objects
|
||||
+ "object-src 'none';"
|
||||
// Allow scripts
|
||||
+ {
|
||||
let script_src = embeddable_on.as_ref()
|
||||
.map(|e| e.extra_script_src.iter()
|
||||
@ -72,18 +76,16 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
||||
.join(" ")
|
||||
).unwrap_or_default();
|
||||
&format!(
|
||||
"script-src 'self' 'unsafe-inline' 'unsafe-eval' {};",
|
||||
"script-src 'self' {};",
|
||||
script_src
|
||||
)
|
||||
}
|
||||
// Same restrictions as script-src with additional
|
||||
// blob: that is required for camera access (worker)
|
||||
+ "worker-src 'self' 'unsafe-inline' 'unsafe-eval' https: blob:;"
|
||||
// Restrict everything else to the same origin.
|
||||
+ "default-src 'self';"
|
||||
+ "worker-src 'self' https: blob:;"
|
||||
// Run in sandbox mode (although it's not fully safe since we allow same-origin and script)
|
||||
+ "sandbox allow-same-origin allow-forms allow-modals allow-popups allow-presentation allow-scripts;"
|
||||
// Disallow subitting forms from any dapps
|
||||
// Disallow submitting forms from any dapps
|
||||
+ "form-action 'none';"
|
||||
// Never allow mixed content
|
||||
+ "block-all-mixed-content;"
|
||||
|
@ -13,8 +13,8 @@ rustc_version = "0.1"
|
||||
parity-ui-dev = { path = "../../js", optional = true }
|
||||
parity-ui-old-dev = { path = "../../js-old", optional = true }
|
||||
# This is managed by the js/scripts/release.sh script on CI - keep it in a single line
|
||||
parity-ui-old-precompiled = { git = "https://github.com/js-dist-paritytech/parity-master-1-9-v1.git", optional = true }
|
||||
parity-ui-precompiled = { git = "https://github.com/js-dist-paritytech/parity-master-1-9-shell.git", optional = true }
|
||||
parity-ui-old-precompiled = { git = "https://github.com/js-dist-paritytech/parity-beta-1-9-v1.git", optional = true }
|
||||
parity-ui-precompiled = { git = "https://github.com/js-dist-paritytech/parity-beta-1-9-shell.git", optional = true }
|
||||
|
||||
[features]
|
||||
no-precompiled-js = ["parity-ui-dev", "parity-ui-old-dev"]
|
||||
|
@ -90,7 +90,14 @@ impl Pending {
|
||||
match self.requests[idx].respond_local(cache) {
|
||||
Some(response) => {
|
||||
self.requests.supply_response_unchecked(&response);
|
||||
|
||||
// update header and back-references after each from-cache
|
||||
// response to ensure that the requests are left in a consistent
|
||||
// state and increase the likelihood of being able to answer
|
||||
// the next request from cache.
|
||||
self.update_header_refs(idx, &response);
|
||||
self.fill_unanswered();
|
||||
|
||||
self.responses.push(response);
|
||||
}
|
||||
None => break,
|
||||
|
@ -52,8 +52,10 @@
|
||||
"gasLimit": "0x1000000"
|
||||
},
|
||||
"nodes": [
|
||||
"enode://20c9ad97c081d63397d7b685a412227a40e23c8bdc6688c6f37e97cfbc22d2b4d1db1510d8f61e6a8866ad7f0e17c02b14182d37ea7c3c8b9c2683aeb6b733a1@52.169.14.227:30303",
|
||||
"enode://6ce05930c72abc632c58e2e4324f7c7ea478cec0ed4fa2528982cf34483094e9cbc9216e7aa349691242576d552a2a56aaeae426c5303ded677ce455ba1acd9d@13.84.180.240:30303"
|
||||
"enode://6332792c4a00e3e4ee0926ed89e0d27ef985424d97b6a45bf0f23e51f0dcb5e66b875777506458aea7af6f9e4ffb69f43f3778ee73c81ed9d34c51c4b16b0b0f@52.232.243.152:30303",
|
||||
"enode://94c15d1b9e2fe7ce56e458b9a3b672ef11894ddedd0c6f247e0f1d3487f52b66208fb4aeb8179fce6e3a749ea93ed147c37976d67af557508d199d9594c35f09@192.81.208.223:30303",
|
||||
"enode://30b7ab30a01c124a6cceca36863ece12c4f5fa68e3ba9b0b51407ccc002eeed3b3102d20a88f1c1d3c3154e2449317b8ef95090e77b312d5cc39354f86d5d606@52.176.7.10:30303",
|
||||
"enode://865a63255b3bb68023b6bffd5095118fcc13e79dcf014fe4e47e065c350c7cc72af2e53eff895f11ba1bbb6a2b33271c1116ee870f266618eadfc2e78aa7349c@52.176.100.77:30303"
|
||||
],
|
||||
"accounts": {
|
||||
"0000000000000000000000000000000000000000": { "balance": "1" },
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit d9d6133c1bc5dca4c74c9eb758a39546a0d46b45
|
||||
Subproject commit fb111c82deff8759f54a5038d07cecc77cb5a663
|
@ -264,9 +264,9 @@ impl AccountProvider {
|
||||
Ok(Address::from(account.address).into())
|
||||
}
|
||||
|
||||
/// Import a new presale wallet.
|
||||
pub fn import_wallet(&self, json: &[u8], password: &str) -> Result<Address, Error> {
|
||||
let account = self.sstore.import_wallet(SecretVaultRef::Root, json, password)?;
|
||||
/// Import a new wallet.
|
||||
pub fn import_wallet(&self, json: &[u8], password: &str, gen_id: bool) -> Result<Address, Error> {
|
||||
let account = self.sstore.import_wallet(SecretVaultRef::Root, json, password, gen_id)?;
|
||||
if self.blacklisted_accounts.contains(&account.address) {
|
||||
self.sstore.remove_account(&account, password)?;
|
||||
return Err(SSError::InvalidAccount.into());
|
||||
|
@ -44,8 +44,7 @@ use bigint::hash::{H256, H520};
|
||||
use semantic_version::SemanticVersion;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use unexpected::{Mismatch, OutOfBounds};
|
||||
use util::*;
|
||||
use bytes::Bytes;
|
||||
use util::Address;
|
||||
|
||||
mod finality;
|
||||
|
||||
@ -291,9 +290,11 @@ struct EpochVerifier {
|
||||
|
||||
impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
||||
fn verify_light(&self, header: &Header) -> Result<(), Error> {
|
||||
// Validate the timestamp
|
||||
verify_timestamp(&*self.step, header_step(header)?)?;
|
||||
// always check the seal since it's fast.
|
||||
// nothing heavier to do.
|
||||
verify_external(header, &self.subchain_validators, &*self.step, |_| {})
|
||||
verify_external(header, &self.subchain_validators)
|
||||
}
|
||||
|
||||
fn check_finality_proof(&self, proof: &[u8]) -> Option<Vec<H256>> {
|
||||
@ -317,7 +318,7 @@ impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
||||
//
|
||||
// `verify_external` checks that signature is correct and author == signer.
|
||||
if header.seal().len() != 2 { return None }
|
||||
otry!(verify_external(header, &self.subchain_validators, &*self.step, |_| {}).ok());
|
||||
otry!(verify_external(header, &self.subchain_validators).ok());
|
||||
|
||||
let newly_finalized = otry!(finality_checker.push_hash(header.hash(), header.author().clone()).ok());
|
||||
finalized.extend(newly_finalized);
|
||||
@ -327,16 +328,6 @@ impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
||||
}
|
||||
}
|
||||
|
||||
// Report misbehavior
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
enum Report {
|
||||
// Malicious behavior
|
||||
Malicious(Address, BlockNumber, Bytes),
|
||||
// benign misbehavior
|
||||
Benign(Address, BlockNumber),
|
||||
}
|
||||
|
||||
fn header_step(header: &Header) -> Result<usize, ::rlp::DecoderError> {
|
||||
UntrustedRlp::new(&header.seal().get(0).expect("was either checked with verify_block_basic or is genesis; has 2 fields; qed (Make sure the spec file has a correct genesis seal)")).as_val()
|
||||
}
|
||||
@ -355,34 +346,35 @@ fn is_step_proposer(validators: &ValidatorSet, bh: &H256, step: usize, address:
|
||||
step_proposer(validators, bh, step) == *address
|
||||
}
|
||||
|
||||
fn verify_external<F: Fn(Report)>(header: &Header, validators: &ValidatorSet, step: &Step, report: F)
|
||||
-> Result<(), Error>
|
||||
{
|
||||
let header_step = header_step(header)?;
|
||||
|
||||
fn verify_timestamp(step: &Step, header_step: usize) -> Result<(), BlockError> {
|
||||
match step.check_future(header_step) {
|
||||
Err(None) => {
|
||||
trace!(target: "engine", "verify_block_external: block from the future");
|
||||
report(Report::Benign(*header.author(), header.number()));
|
||||
return Err(BlockError::InvalidSeal.into())
|
||||
trace!(target: "engine", "verify_timestamp: block from the future");
|
||||
Err(BlockError::InvalidSeal.into())
|
||||
},
|
||||
Err(Some(oob)) => {
|
||||
trace!(target: "engine", "verify_block_external: block too early");
|
||||
return Err(BlockError::TemporarilyInvalid(oob).into())
|
||||
// NOTE This error might be returned only in early stage of verification (Stage 1).
|
||||
// Returning it further won't recover the sync process.
|
||||
trace!(target: "engine", "verify_timestamp: block too early");
|
||||
Err(BlockError::TemporarilyInvalid(oob).into())
|
||||
},
|
||||
Ok(_) => {
|
||||
let proposer_signature = header_signature(header)?;
|
||||
let correct_proposer = validators.get(header.parent_hash(), header_step);
|
||||
let is_invalid_proposer = *header.author() != correct_proposer ||
|
||||
!verify_address(&correct_proposer, &proposer_signature, &header.bare_hash())?;
|
||||
Ok(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
if is_invalid_proposer {
|
||||
trace!(target: "engine", "verify_block_external: bad proposer for step: {}", header_step);
|
||||
Err(EngineError::NotProposer(Mismatch { expected: correct_proposer, found: header.author().clone() }))?
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
fn verify_external(header: &Header, validators: &ValidatorSet) -> Result<(), Error> {
|
||||
let header_step = header_step(header)?;
|
||||
|
||||
let proposer_signature = header_signature(header)?;
|
||||
let correct_proposer = validators.get(header.parent_hash(), header_step);
|
||||
let is_invalid_proposer = *header.author() != correct_proposer ||
|
||||
!verify_address(&correct_proposer, &proposer_signature, &header.bare_hash())?;
|
||||
|
||||
if is_invalid_proposer {
|
||||
trace!(target: "engine", "verify_block_external: bad proposer for step: {}", header_step);
|
||||
Err(EngineError::NotProposer(Mismatch { expected: correct_proposer, found: header.author().clone() }))?
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -655,26 +647,38 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
||||
/// Check the number of seal fields.
|
||||
fn verify_block_basic(&self, header: &Header) -> Result<(), Error> {
|
||||
if header.number() >= self.validate_score_transition && *header.difficulty() >= U256::from(U128::max_value()) {
|
||||
Err(From::from(BlockError::DifficultyOutOfBounds(
|
||||
return Err(From::from(BlockError::DifficultyOutOfBounds(
|
||||
OutOfBounds { min: None, max: Some(U256::from(U128::max_value())), found: *header.difficulty() }
|
||||
)))
|
||||
} else {
|
||||
Ok(())
|
||||
)));
|
||||
}
|
||||
|
||||
// TODO [ToDr] Should this go from epoch manager?
|
||||
// If yes then probably benign reporting needs to be moved further in the verification.
|
||||
let set_number = header.number();
|
||||
|
||||
match verify_timestamp(&*self.step, header_step(header)?) {
|
||||
Err(BlockError::InvalidSeal) => {
|
||||
self.validators.report_benign(header.author(), set_number, header.number());
|
||||
Err(BlockError::InvalidSeal.into())
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
Ok(()) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Do the step and gas limit validation.
|
||||
fn verify_block_family(&self, header: &Header, parent: &Header) -> Result<(), Error> {
|
||||
let step = header_step(header)?;
|
||||
|
||||
let parent_step = header_step(parent)?;
|
||||
// TODO [ToDr] Should this go from epoch manager?
|
||||
let set_number = header.number();
|
||||
|
||||
// Ensure header is from the step after parent.
|
||||
if step == parent_step
|
||||
|| (header.number() >= self.validate_step_transition && step <= parent_step) {
|
||||
trace!(target: "engine", "Multiple blocks proposed for step {}.", parent_step);
|
||||
|
||||
self.validators.report_malicious(header.author(), header.number(), header.number(), Default::default());
|
||||
self.validators.report_malicious(header.author(), set_number, header.number(), Default::default());
|
||||
Err(EngineError::DoubleVote(header.author().clone()))?;
|
||||
}
|
||||
|
||||
@ -687,7 +691,7 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
||||
let skipped_primary = step_proposer(&*self.validators, &parent.hash(), s);
|
||||
// Do not report this signer.
|
||||
if skipped_primary != me {
|
||||
self.validators.report_benign(&skipped_primary, header.number(), header.number());
|
||||
self.validators.report_benign(&skipped_primary, set_number, header.number());
|
||||
}
|
||||
// Stop reporting once validators start repeating.
|
||||
if !reported.insert(skipped_primary) { break; }
|
||||
@ -702,9 +706,8 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
||||
// fetch correct validator set for current epoch, taking into account
|
||||
// finality of previous transitions.
|
||||
let active_set;
|
||||
|
||||
let (validators, set_number) = if self.immediate_transitions {
|
||||
(&*self.validators, header.number())
|
||||
let validators = if self.immediate_transitions {
|
||||
&*self.validators
|
||||
} else {
|
||||
// get correct validator set for epoch.
|
||||
let client = match self.client.read().as_ref().and_then(|weak| weak.upgrade()) {
|
||||
@ -722,21 +725,12 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
||||
}
|
||||
|
||||
active_set = epoch_manager.validators().clone();
|
||||
(&active_set as &_, epoch_manager.epoch_transition_number)
|
||||
};
|
||||
|
||||
// always report with "self.validators" so that the report actually gets
|
||||
// to the contract.
|
||||
let report = |report| match report {
|
||||
Report::Benign(address, block_number) =>
|
||||
self.validators.report_benign(&address, set_number, block_number),
|
||||
Report::Malicious(address, block_number, proof) =>
|
||||
self.validators.report_malicious(&address, set_number, block_number, proof),
|
||||
&active_set as &_
|
||||
};
|
||||
|
||||
// verify signature against fixed list, but reports should go to the
|
||||
// contract itself.
|
||||
verify_external(header, validators, &*self.step, report)
|
||||
verify_external(header, validators)
|
||||
}
|
||||
|
||||
fn genesis_epoch_data(&self, header: &Header, call: &Call) -> Result<Vec<u8>, String> {
|
||||
@ -1059,8 +1053,7 @@ mod tests {
|
||||
assert!(engine.verify_block_family(&header, &parent_header).is_ok());
|
||||
assert!(engine.verify_block_external(&header).is_ok());
|
||||
header.set_seal(vec![encode(&5usize).into_vec(), encode(&(&*signature as &[u8])).into_vec()]);
|
||||
assert!(engine.verify_block_family(&header, &parent_header).is_ok());
|
||||
assert!(engine.verify_block_external(&header).is_err());
|
||||
assert!(engine.verify_block_basic(&header).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1200,3 +1193,4 @@ mod tests {
|
||||
AuthorityRound::new(params, machine).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -192,7 +192,7 @@ mod tests {
|
||||
header.set_number(2);
|
||||
header.set_parent_hash(client.chain_info().best_block_hash);
|
||||
// `reportBenign` when the designated proposer releases block from the future (bad clock).
|
||||
assert!(client.engine().verify_block_external(&header).is_err());
|
||||
assert!(client.engine().verify_block_basic(&header).is_err());
|
||||
// Seal a block.
|
||||
client.engine().step();
|
||||
assert_eq!(client.chain_info().best_block_number, 1);
|
||||
|
@ -156,11 +156,7 @@ impl TransactOptions<trace::NoopTracer, trace::NoopVMTracer> {
|
||||
|
||||
pub fn executor(machine: &Machine, vm_factory: &Factory, params: &ActionParams) -> Box<vm::Vm> {
|
||||
if machine.supports_wasm() && params.code.as_ref().map_or(false, |code| code.len() > 4 && &code[0..4] == WASM_MAGIC_NUMBER) {
|
||||
Box::new(
|
||||
wasm::WasmInterpreter::new()
|
||||
// prefer to fail fast
|
||||
.expect("Failed to create wasm runtime")
|
||||
)
|
||||
Box::new(wasm::WasmInterpreter)
|
||||
} else {
|
||||
vm_factory.create(params.gas)
|
||||
}
|
||||
|
@ -1406,7 +1406,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_not_trace_delegatecall() {
|
||||
fn should_trace_delegatecall_properly() {
|
||||
init_log();
|
||||
|
||||
let mut state = get_temp_state();
|
||||
@ -1426,7 +1426,7 @@ mod tests {
|
||||
}.sign(&secret(), None);
|
||||
|
||||
state.init_code(&0xa.into(), FromHex::from_hex("6000600060006000600b618000f4").unwrap()).unwrap();
|
||||
state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap()).unwrap();
|
||||
state.init_code(&0xb.into(), FromHex::from_hex("60056000526001601ff3").unwrap()).unwrap();
|
||||
let result = state.apply(&info, &machine, &t, true).unwrap();
|
||||
|
||||
let expected_trace = vec![FlatTrace {
|
||||
@ -1441,23 +1441,23 @@ mod tests {
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: trace::Res::Call(trace::CallResult {
|
||||
gas_used: U256::from(721), // in post-eip150
|
||||
gas_used: U256::from(736), // in post-eip150
|
||||
output: vec![]
|
||||
}),
|
||||
}, FlatTrace {
|
||||
trace_address: vec![0].into_iter().collect(),
|
||||
subtraces: 0,
|
||||
action: trace::Action::Call(trace::Call {
|
||||
from: "9cce34f7ab185c7aba1b7c8140d620b4bda941d6".into(),
|
||||
to: 0xa.into(),
|
||||
from: 0xa.into(),
|
||||
to: 0xb.into(),
|
||||
value: 0.into(),
|
||||
gas: 32768.into(),
|
||||
input: vec![],
|
||||
call_type: CallType::DelegateCall,
|
||||
}),
|
||||
result: trace::Res::Call(trace::CallResult {
|
||||
gas_used: 3.into(),
|
||||
output: vec![],
|
||||
gas_used: 18.into(),
|
||||
output: vec![5],
|
||||
}),
|
||||
}];
|
||||
|
||||
|
@ -74,13 +74,23 @@ pub struct Call {
|
||||
|
||||
impl From<ActionParams> for Call {
|
||||
fn from(p: ActionParams) -> Self {
|
||||
Call {
|
||||
from: p.sender,
|
||||
to: p.address,
|
||||
value: p.value.value(),
|
||||
gas: p.gas,
|
||||
input: p.data.unwrap_or_else(Vec::new),
|
||||
call_type: p.call_type,
|
||||
match p.call_type {
|
||||
CallType::DelegateCall => Call {
|
||||
from: p.address,
|
||||
to: p.code_address,
|
||||
value: p.value.value(),
|
||||
gas: p.gas,
|
||||
input: p.data.unwrap_or_else(Vec::new),
|
||||
call_type: p.call_type,
|
||||
},
|
||||
_ => Call {
|
||||
from: p.sender,
|
||||
to: p.address,
|
||||
value: p.value.value(),
|
||||
gas: p.gas,
|
||||
input: p.data.unwrap_or_else(Vec::new),
|
||||
call_type: p.call_type,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -127,18 +127,19 @@ pub struct WasmCosts {
|
||||
pub mul: u32,
|
||||
/// Memory (load/store) operations multiplier.
|
||||
pub mem: u32,
|
||||
/// Memory copy operation, per byte.
|
||||
pub mem_copy: u32,
|
||||
/// Memory move operation, per byte.
|
||||
pub mem_move: u32,
|
||||
/// Memory set operation, per byte.
|
||||
pub mem_set: u32,
|
||||
/// Static region charge, per byte.
|
||||
pub static_region: u32,
|
||||
/// General static query of U256 value from env-info
|
||||
pub static_u256: u32,
|
||||
/// General static query of Address value from env-info
|
||||
pub static_address: u32,
|
||||
/// Memory stipend. Amount of free memory (in 64kb pages) each contract can use for stack.
|
||||
pub initial_mem: u32,
|
||||
/// Grow memory cost, per page (64kb)
|
||||
pub grow_mem: u32,
|
||||
/// Cost of wasm opcode is calculated as TABLE_ENTRY_COST * `opcodes_mul` / `opcodes_div`
|
||||
pub opcodes_mul: u32,
|
||||
/// Cost of wasm opcode is calculated as TABLE_ENTRY_COST * `opcodes_mul` / `opcodes_div`
|
||||
pub opcodes_div: u32,
|
||||
|
||||
}
|
||||
|
||||
impl Default for WasmCosts {
|
||||
@ -148,12 +149,12 @@ impl Default for WasmCosts {
|
||||
div: 16,
|
||||
mul: 4,
|
||||
mem: 2,
|
||||
mem_copy: 1,
|
||||
mem_move: 1,
|
||||
mem_set: 1,
|
||||
static_region: 1,
|
||||
static_u256: 64,
|
||||
static_address: 40,
|
||||
initial_mem: 4096,
|
||||
grow_mem: 8192,
|
||||
opcodes_mul: 3,
|
||||
opcodes_div: 8,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,9 @@ byteorder = "1.0"
|
||||
ethcore-util = { path = "../../util" }
|
||||
ethcore-bigint = { path = "../../util/bigint" }
|
||||
log = "0.3"
|
||||
parity-wasm = "0.15"
|
||||
parity-wasm = "0.23"
|
||||
libc = "0.2"
|
||||
wasm-utils = { git = "https://github.com/paritytech/wasm-utils" }
|
||||
vm = { path = "../vm" }
|
||||
ethcore-logger = { path = "../../logger" }
|
||||
wasmi = { git = "https://github.com/pepyakin/wasmi" }
|
@ -16,7 +16,7 @@ fn load_code<P: AsRef<path::Path>>(p: P) -> io::Result<Vec<u8>> {
|
||||
}
|
||||
|
||||
fn wasm_interpreter() -> WasmInterpreter {
|
||||
WasmInterpreter::new().expect("wasm interpreter to create without errors")
|
||||
WasmInterpreter
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -14,178 +14,279 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm env module bindings
|
||||
//! Env module glue for wasmi interpreter
|
||||
|
||||
use parity_wasm::elements::ValueType::*;
|
||||
use parity_wasm::interpreter::{self, UserFunctionDescriptor};
|
||||
use parity_wasm::interpreter::UserFunctionDescriptor::*;
|
||||
use super::runtime::{Runtime, UserTrap};
|
||||
use std::cell::RefCell;
|
||||
use wasmi::{
|
||||
self, Signature, Error, FuncRef, FuncInstance, MemoryDescriptor,
|
||||
MemoryRef, MemoryInstance,
|
||||
};
|
||||
|
||||
pub const SIGNATURES: &'static [UserFunctionDescriptor] = &[
|
||||
Static(
|
||||
"_storage_read",
|
||||
&[I32; 2],
|
||||
/// Internal ids all functions runtime supports. This is just a glue for wasmi interpreter
|
||||
/// that lacks high-level api and later will be factored out
|
||||
pub mod ids {
|
||||
pub const STORAGE_WRITE_FUNC: usize = 0;
|
||||
pub const STORAGE_READ_FUNC: usize = 10;
|
||||
pub const RET_FUNC: usize = 20;
|
||||
pub const GAS_FUNC: usize = 30;
|
||||
pub const FETCH_INPUT_FUNC: usize = 40;
|
||||
pub const INPUT_LENGTH_FUNC: usize = 50;
|
||||
pub const CCALL_FUNC: usize = 60;
|
||||
pub const SCALL_FUNC: usize = 70;
|
||||
pub const DCALL_FUNC: usize = 80;
|
||||
pub const VALUE_FUNC: usize = 90;
|
||||
pub const CREATE_FUNC: usize = 100;
|
||||
pub const SUICIDE_FUNC: usize = 110;
|
||||
pub const BLOCKHASH_FUNC: usize = 120;
|
||||
pub const BLOCKNUMBER_FUNC: usize = 130;
|
||||
pub const COINBASE_FUNC: usize = 140;
|
||||
pub const DIFFICULTY_FUNC: usize = 150;
|
||||
pub const GASLIMIT_FUNC: usize = 160;
|
||||
pub const TIMESTAMP_FUNC: usize = 170;
|
||||
pub const ADDRESS_FUNC: usize = 180;
|
||||
pub const SENDER_FUNC: usize = 190;
|
||||
pub const ORIGIN_FUNC: usize = 200;
|
||||
pub const ELOG_FUNC: usize = 210;
|
||||
|
||||
pub const PANIC_FUNC: usize = 1000;
|
||||
pub const DEBUG_FUNC: usize = 1010;
|
||||
}
|
||||
|
||||
/// Signatures of all functions runtime supports. The actual dispatch happens at
|
||||
/// impl runtime::Runtime methods.
|
||||
pub mod signatures {
|
||||
use wasmi::{self, ValueType};
|
||||
use wasmi::ValueType::*;
|
||||
|
||||
pub struct StaticSignature(pub &'static [ValueType], pub Option<ValueType>);
|
||||
|
||||
pub const STORAGE_READ: StaticSignature = StaticSignature(
|
||||
&[I32, I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_storage_write",
|
||||
&[I32; 2],
|
||||
);
|
||||
|
||||
pub const STORAGE_WRITE: StaticSignature = StaticSignature(
|
||||
&[I32, I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_balance",
|
||||
&[I32; 2],
|
||||
);
|
||||
|
||||
pub const RET: StaticSignature = StaticSignature(
|
||||
&[I32, I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_ext_malloc",
|
||||
);
|
||||
|
||||
pub const GAS: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const FETCH_INPUT: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const INPUT_LENGTH: StaticSignature = StaticSignature(
|
||||
&[],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_ext_free",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"gas",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_debug",
|
||||
&[I32; 2],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_suicide",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_create",
|
||||
&[I32; 4],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_ccall",
|
||||
);
|
||||
|
||||
pub const CCALL: StaticSignature = StaticSignature(
|
||||
&[I64, I32, I32, I32, I32, I32, I32],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_dcall",
|
||||
);
|
||||
|
||||
pub const DCALL: StaticSignature = StaticSignature(
|
||||
&[I64, I32, I32, I32, I32, I32],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_scall",
|
||||
);
|
||||
|
||||
pub const SCALL: StaticSignature = StaticSignature(
|
||||
&[I64, I32, I32, I32, I32, I32],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"abort",
|
||||
);
|
||||
|
||||
pub const PANIC: StaticSignature = StaticSignature(
|
||||
&[I32, I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const DEBUG: StaticSignature = StaticSignature(
|
||||
&[I32, I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const VALUE: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_emscripten_memcpy_big",
|
||||
&[I32; 3],
|
||||
);
|
||||
|
||||
pub const CREATE: StaticSignature = StaticSignature(
|
||||
&[I32, I32, I32, I32],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_ext_memcpy",
|
||||
&[I32; 3],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_ext_memset",
|
||||
&[I32; 3],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_ext_memmove",
|
||||
&[I32; 3],
|
||||
Some(I32),
|
||||
),
|
||||
Static(
|
||||
"_panic",
|
||||
&[I32; 2],
|
||||
);
|
||||
|
||||
pub const SUICIDE: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_blockhash",
|
||||
);
|
||||
|
||||
pub const BLOCKHASH: StaticSignature = StaticSignature(
|
||||
&[I64, I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_coinbase",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_sender",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_origin",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_address",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_value",
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_timestamp",
|
||||
);
|
||||
|
||||
pub const BLOCKNUMBER: StaticSignature = StaticSignature(
|
||||
&[],
|
||||
Some(I64),
|
||||
),
|
||||
Static(
|
||||
"_blocknumber",
|
||||
);
|
||||
|
||||
pub const COINBASE: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const DIFFICULTY: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const GASLIMIT: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
);
|
||||
|
||||
pub const TIMESTAMP: StaticSignature = StaticSignature(
|
||||
&[],
|
||||
Some(I64),
|
||||
),
|
||||
Static(
|
||||
"_difficulty",
|
||||
);
|
||||
|
||||
pub const ADDRESS: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_gaslimit",
|
||||
);
|
||||
|
||||
pub const SENDER: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
Static(
|
||||
"_elog",
|
||||
&[I32; 4],
|
||||
);
|
||||
|
||||
pub const ORIGIN: StaticSignature = StaticSignature(
|
||||
&[I32],
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
// TODO: Get rid of it also somehow?
|
||||
Static(
|
||||
"_llvm_trap",
|
||||
&[I32; 0],
|
||||
None
|
||||
),
|
||||
pub const ELOG: StaticSignature = StaticSignature(
|
||||
&[I32, I32, I32, I32],
|
||||
None,
|
||||
);
|
||||
|
||||
Static(
|
||||
"_llvm_bswap_i64",
|
||||
&[I64],
|
||||
Some(I64)
|
||||
),
|
||||
];
|
||||
|
||||
pub fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserDefinedElements<'a, UserTrap> {
|
||||
interpreter::UserDefinedElements {
|
||||
executor: Some(runtime),
|
||||
globals: ::std::collections::HashMap::new(),
|
||||
functions: ::std::borrow::Cow::from(SIGNATURES),
|
||||
impl Into<wasmi::Signature> for StaticSignature {
|
||||
fn into(self) -> wasmi::Signature {
|
||||
wasmi::Signature::new(self.0, self.1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn host(signature: signatures::StaticSignature, idx: usize) -> FuncRef {
|
||||
FuncInstance::alloc_host(signature.into(), idx)
|
||||
}
|
||||
|
||||
/// Import resolver for wasmi
|
||||
/// Maps all functions that runtime support to the corresponding contract import
|
||||
/// entries.
|
||||
/// Also manages initial memory request from the runtime.
|
||||
#[derive(Default)]
|
||||
pub struct ImportResolver {
|
||||
max_memory: u32,
|
||||
memory: RefCell<Option<MemoryRef>>,
|
||||
}
|
||||
|
||||
impl ImportResolver {
|
||||
/// New import resolver with specifed maximum amount of inital memory (in wasm pages = 64kb)
|
||||
pub fn with_limit(max_memory: u32) -> ImportResolver {
|
||||
ImportResolver {
|
||||
max_memory: max_memory,
|
||||
memory: RefCell::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns memory that was instantiated during the contract module
|
||||
/// start. If contract does not use memory at all, the dummy memory of length (0, 0)
|
||||
/// will be created instead. So this method always returns memory instance
|
||||
/// unless errored.
|
||||
pub fn memory_ref(&self) -> MemoryRef {
|
||||
{
|
||||
let mut mem_ref = self.memory.borrow_mut();
|
||||
if mem_ref.is_none() {
|
||||
*mem_ref = Some(
|
||||
MemoryInstance::alloc(0, Some(0)).expect("Memory allocation (0, 0) should not fail; qed")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
self.memory.borrow().clone().expect("it is either existed or was created as (0, 0) above; qed")
|
||||
}
|
||||
|
||||
/// Returns memory size module initially requested
|
||||
pub fn memory_size(&self) -> Result<u32, Error> {
|
||||
Ok(self.memory_ref().size())
|
||||
}
|
||||
}
|
||||
|
||||
impl wasmi::ModuleImportResolver for ImportResolver {
|
||||
fn resolve_func(&self, field_name: &str, _signature: &Signature) -> Result<FuncRef, Error> {
|
||||
let func_ref = match field_name {
|
||||
"storage_read" => host(signatures::STORAGE_READ, ids::STORAGE_READ_FUNC),
|
||||
"storage_write" => host(signatures::STORAGE_WRITE, ids::STORAGE_WRITE_FUNC),
|
||||
"ret" => host(signatures::RET, ids::RET_FUNC),
|
||||
"gas" => host(signatures::GAS, ids::GAS_FUNC),
|
||||
"input_length" => host(signatures::INPUT_LENGTH, ids::INPUT_LENGTH_FUNC),
|
||||
"fetch_input" => host(signatures::FETCH_INPUT, ids::FETCH_INPUT_FUNC),
|
||||
"panic" => host(signatures::PANIC, ids::PANIC_FUNC),
|
||||
"debug" => host(signatures::DEBUG, ids::DEBUG_FUNC),
|
||||
"ccall" => host(signatures::CCALL, ids::CCALL_FUNC),
|
||||
"dcall" => host(signatures::DCALL, ids::DCALL_FUNC),
|
||||
"scall" => host(signatures::SCALL, ids::SCALL_FUNC),
|
||||
"value" => host(signatures::VALUE, ids::VALUE_FUNC),
|
||||
"create" => host(signatures::CREATE, ids::CREATE_FUNC),
|
||||
"suicide" => host(signatures::SUICIDE, ids::SUICIDE_FUNC),
|
||||
"blockhash" => host(signatures::BLOCKHASH, ids::BLOCKHASH_FUNC),
|
||||
"blocknumber" => host(signatures::BLOCKNUMBER, ids::BLOCKNUMBER_FUNC),
|
||||
"coinbase" => host(signatures::COINBASE, ids::COINBASE_FUNC),
|
||||
"difficulty" => host(signatures::DIFFICULTY, ids::DIFFICULTY_FUNC),
|
||||
"gaslimit" => host(signatures::GASLIMIT, ids::GASLIMIT_FUNC),
|
||||
"timestamp" => host(signatures::TIMESTAMP, ids::TIMESTAMP_FUNC),
|
||||
"address" => host(signatures::ADDRESS, ids::ADDRESS_FUNC),
|
||||
"sender" => host(signatures::SENDER, ids::SENDER_FUNC),
|
||||
"origin" => host(signatures::ORIGIN, ids::ORIGIN_FUNC),
|
||||
"elog" => host(signatures::ELOG, ids::ELOG_FUNC),
|
||||
_ => {
|
||||
return Err(wasmi::Error::Instantiation(
|
||||
format!("Export {} not found", field_name),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(func_ref)
|
||||
}
|
||||
|
||||
fn resolve_memory(
|
||||
&self,
|
||||
field_name: &str,
|
||||
descriptor: &MemoryDescriptor,
|
||||
) -> Result<MemoryRef, Error> {
|
||||
if field_name == "memory" {
|
||||
let effective_max = descriptor.maximum().unwrap_or(self.max_memory + 1);
|
||||
if descriptor.initial() > self.max_memory || effective_max > self.max_memory
|
||||
{
|
||||
Err(Error::Instantiation("Module requested too much memory".to_owned()))
|
||||
} else {
|
||||
let mem = MemoryInstance::alloc(descriptor.initial(), descriptor.maximum())?;
|
||||
*self.memory.borrow_mut() = Some(mem.clone());
|
||||
Ok(mem)
|
||||
}
|
||||
} else {
|
||||
Err(Error::Instantiation("Memory imported under unknown name".to_owned()))
|
||||
}
|
||||
}
|
||||
}
|
@ -16,34 +16,30 @@
|
||||
|
||||
//! Wasm Interpreter
|
||||
|
||||
extern crate vm;
|
||||
extern crate byteorder;
|
||||
extern crate ethcore_logger;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
#[macro_use] extern crate log;
|
||||
extern crate ethcore_logger;
|
||||
extern crate byteorder;
|
||||
extern crate libc;
|
||||
extern crate parity_wasm;
|
||||
extern crate vm;
|
||||
extern crate wasm_utils;
|
||||
extern crate wasmi;
|
||||
|
||||
mod runtime;
|
||||
mod ptr;
|
||||
mod result;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod env;
|
||||
mod panic_payload;
|
||||
|
||||
const DEFAULT_STACK_SPACE: u32 = 5 * 1024 * 1024;
|
||||
|
||||
use parity_wasm::{interpreter, elements};
|
||||
use parity_wasm::interpreter::ModuleInstanceInterface;
|
||||
mod parser;
|
||||
|
||||
use vm::{GasLeft, ReturnData, ActionParams};
|
||||
use self::runtime::{Runtime, RuntimeContext, UserTrap};
|
||||
use wasmi::Error as InterpreterError;
|
||||
|
||||
pub use self::runtime::InterpreterError;
|
||||
use runtime::{Runtime, RuntimeContext};
|
||||
|
||||
const DEFAULT_RESULT_BUFFER: usize = 1024;
|
||||
use bigint::uint::U256;
|
||||
|
||||
/// Wrapped interpreter error
|
||||
#[derive(Debug)]
|
||||
@ -61,139 +57,110 @@ impl From<Error> for vm::Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UserTrap> for vm::Error {
|
||||
fn from(e: UserTrap) -> Self { e.into() }
|
||||
}
|
||||
|
||||
/// Wasm interpreter instance
|
||||
pub struct WasmInterpreter {
|
||||
program: runtime::InterpreterProgramInstance,
|
||||
result: Vec<u8>,
|
||||
}
|
||||
pub struct WasmInterpreter;
|
||||
|
||||
impl WasmInterpreter {
|
||||
/// New wasm interpreter instance
|
||||
pub fn new() -> Result<WasmInterpreter, Error> {
|
||||
Ok(WasmInterpreter {
|
||||
program: interpreter::ProgramInstance::new()?,
|
||||
result: Vec::with_capacity(DEFAULT_RESULT_BUFFER),
|
||||
})
|
||||
impl From<runtime::Error> for vm::Error {
|
||||
fn from(e: runtime::Error) -> Self {
|
||||
vm::Error::Wasm(format!("Wasm runtime error: {:?}", e))
|
||||
}
|
||||
}
|
||||
|
||||
impl vm::Vm for WasmInterpreter {
|
||||
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut vm::Ext) -> vm::Result<GasLeft> {
|
||||
use parity_wasm::elements::Deserialize;
|
||||
let (module, data) = parser::payload(¶ms, ext.schedule())?;
|
||||
|
||||
let code = params.code.expect("exec is only called on contract with code; qed");
|
||||
let loaded_module = wasmi::Module::from_parity_wasm_module(module).map_err(Error)?;
|
||||
|
||||
trace!(target: "wasm", "Started wasm interpreter with code.len={:?}", code.len());
|
||||
let instantiation_resolover = env::ImportResolver::with_limit(16);
|
||||
|
||||
let env_instance = self.program.module("env")
|
||||
// prefer explicit panic here
|
||||
.expect("Wasm program to contain env module");
|
||||
let module_instance = wasmi::ModuleInstance::new(
|
||||
&loaded_module,
|
||||
&wasmi::ImportsBuilder::new().with_resolver("env", &instantiation_resolover)
|
||||
).map_err(Error)?;
|
||||
|
||||
let env_memory = env_instance.memory(interpreter::ItemIndex::Internal(0))
|
||||
// prefer explicit panic here
|
||||
.expect("Linear memory to exist in wasm runtime");
|
||||
let adjusted_gas = params.gas * U256::from(ext.schedule().wasm.opcodes_div) /
|
||||
U256::from(ext.schedule().wasm.opcodes_mul);
|
||||
|
||||
if params.gas > ::std::u64::MAX.into() {
|
||||
return Err(vm::Error::Wasm("Wasm interpreter cannot run contracts with gas >= 2^64".to_owned()));
|
||||
if adjusted_gas > ::std::u64::MAX.into()
|
||||
{
|
||||
return Err(vm::Error::Wasm("Wasm interpreter cannot run contracts with gas (wasm adjusted) >= 2^64".to_owned()));
|
||||
}
|
||||
|
||||
let mut runtime = Runtime::with_params(
|
||||
ext,
|
||||
env_memory,
|
||||
DEFAULT_STACK_SPACE,
|
||||
params.gas.low_u64(),
|
||||
RuntimeContext {
|
||||
address: params.address,
|
||||
sender: params.sender,
|
||||
origin: params.origin,
|
||||
code_address: params.code_address,
|
||||
value: params.value.value(),
|
||||
},
|
||||
&self.program,
|
||||
);
|
||||
let initial_memory = instantiation_resolover.memory_size().map_err(Error)?;
|
||||
trace!(target: "wasm", "Contract requested {:?} pages of initial memory", initial_memory);
|
||||
|
||||
let (mut cursor, data_position) = match params.params_type {
|
||||
vm::ParamsType::Embedded => {
|
||||
let module_size = parity_wasm::peek_size(&*code);
|
||||
(
|
||||
::std::io::Cursor::new(&code[..module_size]),
|
||||
module_size
|
||||
)
|
||||
},
|
||||
vm::ParamsType::Separate => {
|
||||
(::std::io::Cursor::new(&code[..]), 0)
|
||||
},
|
||||
};
|
||||
|
||||
let contract_module = wasm_utils::inject_gas_counter(
|
||||
elements::Module::deserialize(
|
||||
&mut cursor
|
||||
).map_err(|err| {
|
||||
vm::Error::Wasm(format!("Error deserializing contract code ({:?})", err))
|
||||
})?,
|
||||
runtime.gas_rules(),
|
||||
);
|
||||
|
||||
let data_section_length = contract_module.data_section()
|
||||
.map(|section| section.entries().iter().fold(0, |sum, entry| sum + entry.value().len()))
|
||||
.unwrap_or(0)
|
||||
as u64;
|
||||
|
||||
let static_segment_cost = data_section_length * runtime.ext().schedule().wasm.static_region as u64;
|
||||
runtime.charge(|_| static_segment_cost).map_err(Error)?;
|
||||
|
||||
let d_ptr = {
|
||||
match params.params_type {
|
||||
vm::ParamsType::Embedded => {
|
||||
runtime.write_descriptor(
|
||||
if data_position < code.len() { &code[data_position..] } else { &[] }
|
||||
).map_err(Error)?
|
||||
let (gas_left, result) = {
|
||||
let mut runtime = Runtime::with_params(
|
||||
ext,
|
||||
instantiation_resolover.memory_ref(),
|
||||
// cannot overflow, checked above
|
||||
adjusted_gas.low_u64(),
|
||||
data.to_vec(),
|
||||
RuntimeContext {
|
||||
address: params.address,
|
||||
sender: params.sender,
|
||||
origin: params.origin,
|
||||
code_address: params.code_address,
|
||||
value: params.value.value(),
|
||||
},
|
||||
vm::ParamsType::Separate => {
|
||||
runtime.write_descriptor(¶ms.data.unwrap_or_default())
|
||||
.map_err(Error)?
|
||||
}
|
||||
}
|
||||
};
|
||||
);
|
||||
|
||||
{
|
||||
let execution_params = runtime.execution_params()
|
||||
.add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
|
||||
// cannot overflow if static_region < 2^16,
|
||||
// initial_memory ∈ [0..2^32)
|
||||
// total_charge <- static_region * 2^32 * 2^16
|
||||
// total_charge ∈ [0..2^64) if static_region ∈ [0..2^16)
|
||||
// qed
|
||||
assert!(runtime.schedule().wasm.initial_mem < 1 << 16);
|
||||
runtime.charge(|s| initial_memory as u64 * s.wasm.initial_mem as u64)?;
|
||||
|
||||
let module_instance = self.program.add_module("contract", contract_module, Some(&execution_params.externals))
|
||||
.map_err(|err| {
|
||||
trace!(target: "wasm", "Error adding contract module: {:?}", err);
|
||||
vm::Error::from(Error(err))
|
||||
})?;
|
||||
let module_instance = module_instance.run_start(&mut runtime).map_err(Error)?;
|
||||
|
||||
match module_instance.execute_export("_call", execution_params) {
|
||||
match module_instance.invoke_export("call", &[], &mut runtime) {
|
||||
Ok(_) => { },
|
||||
Err(interpreter::Error::User(UserTrap::Suicide)) => { },
|
||||
Err(InterpreterError::Host(boxed)) => {
|
||||
match boxed.downcast_ref::<runtime::Error>() {
|
||||
None => {
|
||||
return Err(vm::Error::Wasm("Invalid user error used in interpreter".to_owned()));
|
||||
}
|
||||
Some(runtime_err) => {
|
||||
match *runtime_err {
|
||||
runtime::Error::Suicide => {
|
||||
// Suicide uses trap to break execution
|
||||
}
|
||||
ref any_err => {
|
||||
trace!(target: "wasm", "Error executing contract: {:?}", boxed);
|
||||
return Err(vm::Error::from(Error::from(InterpreterError::Host(Box::new(any_err.clone())))));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
trace!(target: "wasm", "Error executing contract: {:?}", err);
|
||||
return Err(vm::Error::from(Error(err)))
|
||||
return Err(vm::Error::from(Error::from(err)))
|
||||
}
|
||||
}
|
||||
}
|
||||
(
|
||||
runtime.gas_left().expect("Cannot fail since it was not updated since last charge"),
|
||||
runtime.into_result(),
|
||||
)
|
||||
};
|
||||
|
||||
let result = result::WasmResult::new(d_ptr);
|
||||
if result.peek_empty(&*runtime.memory()).map_err(|e| Error(e))? {
|
||||
let gas_left =
|
||||
U256::from(gas_left) * U256::from(ext.schedule().wasm.opcodes_mul)
|
||||
/ U256::from(ext.schedule().wasm.opcodes_div);
|
||||
|
||||
if result.is_empty() {
|
||||
trace!(target: "wasm", "Contract execution result is empty.");
|
||||
Ok(GasLeft::Known(runtime.gas_left()?.into()))
|
||||
Ok(GasLeft::Known(gas_left))
|
||||
} else {
|
||||
self.result.clear();
|
||||
// todo: use memory views to avoid copy
|
||||
self.result.extend(result.pop(&*runtime.memory()).map_err(|e| Error(e.into()))?);
|
||||
let len = self.result.len();
|
||||
let len = result.len();
|
||||
Ok(GasLeft::NeedsReturn {
|
||||
gas_left: runtime.gas_left().map_err(|e| Error(e.into()))?.into(),
|
||||
gas_left: gas_left,
|
||||
data: ReturnData::new(
|
||||
::std::mem::replace(&mut self.result, Vec::with_capacity(DEFAULT_RESULT_BUFFER)),
|
||||
result,
|
||||
0,
|
||||
len,
|
||||
),
|
||||
|
89
ethcore/wasm/src/parser.rs
Normal file
@ -0,0 +1,89 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! ActionParams parser for wasm
|
||||
|
||||
use vm;
|
||||
use wasm_utils::{self, rules};
|
||||
use parity_wasm::elements::{self, Deserialize};
|
||||
use parity_wasm::peek_size;
|
||||
|
||||
fn gas_rules(schedule: &vm::Schedule) -> rules::Set {
|
||||
rules::Set::new({
|
||||
let mut vals = ::std::collections::HashMap::with_capacity(4);
|
||||
vals.insert(rules::InstructionType::Load, schedule.wasm.mem as u32);
|
||||
vals.insert(rules::InstructionType::Store, schedule.wasm.mem as u32);
|
||||
vals.insert(rules::InstructionType::Div, schedule.wasm.div as u32);
|
||||
vals.insert(rules::InstructionType::Mul, schedule.wasm.mul as u32);
|
||||
vals
|
||||
}).with_grow_cost(schedule.wasm.grow_mem)
|
||||
}
|
||||
|
||||
/// Splits payload to code and data according to params.params_type, also
|
||||
/// loads the module instance from payload and injects gas counter according
|
||||
/// to schedule.
|
||||
pub fn payload<'a>(params: &'a vm::ActionParams, schedule: &vm::Schedule)
|
||||
-> Result<(elements::Module, &'a [u8]), vm::Error>
|
||||
{
|
||||
let code = match params.code {
|
||||
Some(ref code) => &code[..],
|
||||
None => { return Err(vm::Error::Wasm("Invalid wasm call".to_owned())); }
|
||||
};
|
||||
|
||||
let (mut cursor, data_position) = match params.params_type {
|
||||
vm::ParamsType::Embedded => {
|
||||
let module_size = peek_size(&*code);
|
||||
(
|
||||
::std::io::Cursor::new(&code[..module_size]),
|
||||
module_size
|
||||
)
|
||||
},
|
||||
vm::ParamsType::Separate => {
|
||||
(::std::io::Cursor::new(&code[..]), 0)
|
||||
},
|
||||
};
|
||||
|
||||
let deserialized_module = elements::Module::deserialize(
|
||||
&mut cursor
|
||||
).map_err(|err| {
|
||||
vm::Error::Wasm(format!("Error deserializing contract code ({:?})", err))
|
||||
})?;
|
||||
|
||||
if deserialized_module.memory_section().map_or(false, |ms| ms.entries().len() > 0) {
|
||||
// According to WebAssembly spec, internal memory is hidden from embedder and should not
|
||||
// be interacted with. So we disable this kind of modules at decoding level.
|
||||
return Err(vm::Error::Wasm(format!("Malformed wasm module: internal memory")));
|
||||
}
|
||||
|
||||
let contract_module = wasm_utils::inject_gas_counter(
|
||||
deserialized_module,
|
||||
&gas_rules(schedule),
|
||||
);
|
||||
|
||||
let data = match params.params_type {
|
||||
vm::ParamsType::Embedded => {
|
||||
if data_position < code.len() { &code[data_position..] } else { &[] }
|
||||
},
|
||||
vm::ParamsType::Separate => {
|
||||
match params.data {
|
||||
Some(ref s) => &s[..],
|
||||
None => &[]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok((contract_module, data))
|
||||
}
|
@ -1,58 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm bound-checked ptr
|
||||
|
||||
use super::runtime::{InterpreterMemoryInstance, InterpreterError, UserTrap};
|
||||
|
||||
/// Bound-checked wrapper for webassembly memory
|
||||
pub struct WasmPtr(u32);
|
||||
|
||||
/// Error in bound check
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
AccessViolation,
|
||||
}
|
||||
|
||||
impl From<u32> for WasmPtr {
|
||||
fn from(raw: u32) -> Self {
|
||||
WasmPtr(raw)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for InterpreterError {
|
||||
fn from(_e: Error) -> Self {
|
||||
UserTrap::MemoryAccessViolation.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl WasmPtr {
|
||||
// todo: use memory view when they are on
|
||||
/// Check memory range and return data with given length starting from the current pointer value
|
||||
pub fn slice(&self, len: u32, mem: &InterpreterMemoryInstance) -> Result<Vec<u8>, Error> {
|
||||
mem.get(self.0, len as usize).map_err(|_| Error::AccessViolation)
|
||||
}
|
||||
|
||||
// todo: maybe 2gb limit can be enhanced
|
||||
/// Convert i32 from wasm stack to the wrapped pointer
|
||||
pub fn from_i32(raw_ptr: i32) -> Result<Self, Error> {
|
||||
if raw_ptr < 0 { return Err(Error::AccessViolation); }
|
||||
Ok(WasmPtr(raw_ptr as u32))
|
||||
}
|
||||
|
||||
/// Return pointer raw value
|
||||
pub fn as_raw(&self) -> u32 { self.0 }
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm evm results helper
|
||||
|
||||
use byteorder::{LittleEndian, ByteOrder};
|
||||
|
||||
use super::ptr::WasmPtr;
|
||||
use super::runtime::{InterpreterError, InterpreterMemoryInstance};
|
||||
|
||||
/// Wrapper for wasm contract call result
|
||||
pub struct WasmResult {
|
||||
ptr: WasmPtr,
|
||||
}
|
||||
|
||||
impl WasmResult {
|
||||
/// New call result from given ptr
|
||||
pub fn new(descriptor_ptr: WasmPtr) -> WasmResult {
|
||||
WasmResult { ptr: descriptor_ptr }
|
||||
}
|
||||
|
||||
/// Check if the result contains any data
|
||||
pub fn peek_empty(&self, mem: &InterpreterMemoryInstance) -> Result<bool, InterpreterError> {
|
||||
let result_len = LittleEndian::read_u32(&self.ptr.slice(16, mem)?[12..16]);
|
||||
Ok(result_len == 0)
|
||||
}
|
||||
|
||||
/// Consume the result ptr and return the actual data from wasm linear memory
|
||||
pub fn pop(self, mem: &InterpreterMemoryInstance) -> Result<Vec<u8>, InterpreterError> {
|
||||
let result_ptr = LittleEndian::read_u32(&self.ptr.slice(16, mem)?[8..12]);
|
||||
let result_len = LittleEndian::read_u32(&self.ptr.slice(16, mem)?[12..16]);
|
||||
trace!(target: "wasm", "contract result: {} bytes at @{}", result_len, result_ptr);
|
||||
|
||||
Ok(mem.get(result_ptr, result_len as usize)?)
|
||||
}
|
||||
}
|
@ -31,6 +31,34 @@ macro_rules! load_sample {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! reqrep_test {
|
||||
($name: expr, $input: expr) => {
|
||||
reqrep_test!($name, $input, vm::EnvInfo::default(), HashMap::new())
|
||||
};
|
||||
($name: expr, $input: expr, $info: expr, $block_hashes: expr) => {
|
||||
{
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!($name);
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some($input);
|
||||
|
||||
let mut fake_ext = FakeExt::new();
|
||||
fake_ext.info = $info;
|
||||
fake_ext.blockhashes = $block_hashes;
|
||||
|
||||
let mut interpreter = wasm_interpreter();
|
||||
interpreter.exec(params, &mut fake_ext)
|
||||
.map(|result| match result {
|
||||
GasLeft::Known(_) => { panic!("Test is expected to return payload to check"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn test_finalize(res: Result<GasLeft, vm::Error>) -> Result<U256, vm::Error> {
|
||||
match res {
|
||||
Ok(GasLeft::Known(gas)) => Ok(gas),
|
||||
@ -40,7 +68,7 @@ fn test_finalize(res: Result<GasLeft, vm::Error>) -> Result<U256, vm::Error> {
|
||||
}
|
||||
|
||||
fn wasm_interpreter() -> WasmInterpreter {
|
||||
WasmInterpreter::new().expect("wasm interpreter to create without errors")
|
||||
WasmInterpreter
|
||||
}
|
||||
|
||||
/// Empty contract does almost nothing except producing 1 (one) local node debug log message
|
||||
@ -60,7 +88,7 @@ fn empty() {
|
||||
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(96_678));
|
||||
assert_eq!(gas_left, U256::from(98462));
|
||||
}
|
||||
|
||||
// This test checks if the contract deserializes payload header properly.
|
||||
@ -112,7 +140,7 @@ fn logger() {
|
||||
U256::from(1_000_000_000),
|
||||
"Logger sets 0x04 key to the trasferred value"
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(15_860));
|
||||
assert_eq!(gas_left, U256::from(17_578));
|
||||
}
|
||||
|
||||
// This test checks if the contract can allocate memory and pass pointer to the result stream properly.
|
||||
@ -147,7 +175,7 @@ fn identity() {
|
||||
sender,
|
||||
"Idenity test contract does not return the sender passed"
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(96_540));
|
||||
assert_eq!(gas_left, U256::from(98_408));
|
||||
}
|
||||
|
||||
// Dispersion test sends byte array and expect the contract to 'disperse' the original elements with
|
||||
@ -156,6 +184,8 @@ fn identity() {
|
||||
// This also tests byte-perfect memory allocation and in/out ptr lifecycle.
|
||||
#[test]
|
||||
fn dispersion() {
|
||||
::ethcore_logger::init_log();
|
||||
|
||||
let code = load_sample!("dispersion.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
@ -175,12 +205,11 @@ fn dispersion() {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![0u8, 0, 125, 11, 197, 7, 255, 8, 19, 0]
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(96_116));
|
||||
assert_eq!(gas_left, U256::from(93_972));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -208,7 +237,7 @@ fn suicide_not() {
|
||||
result,
|
||||
vec![0u8]
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(96_461));
|
||||
assert_eq!(gas_left, U256::from(94_970));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -240,7 +269,7 @@ fn suicide() {
|
||||
};
|
||||
|
||||
assert!(ext.suicides.contains(&refund));
|
||||
assert_eq!(gas_left, U256::from(96_429));
|
||||
assert_eq!(gas_left, U256::from(94_933));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -270,7 +299,7 @@ fn create() {
|
||||
assert!(ext.calls.contains(
|
||||
&FakeCall {
|
||||
call_type: FakeCallType::Create,
|
||||
gas: U256::from(62_545),
|
||||
gas: U256::from(60_917),
|
||||
sender_address: None,
|
||||
receive_address: None,
|
||||
value: Some(1_000_000_000.into()),
|
||||
@ -278,7 +307,7 @@ fn create() {
|
||||
code_address: None,
|
||||
}
|
||||
));
|
||||
assert_eq!(gas_left, U256::from(62_538));
|
||||
assert_eq!(gas_left, U256::from(60_903));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -322,7 +351,7 @@ fn call_msg() {
|
||||
}
|
||||
));
|
||||
|
||||
assert_eq!(gas_left, U256::from(95_699));
|
||||
assert_eq!(gas_left, U256::from(93_511));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -367,7 +396,7 @@ fn call_code() {
|
||||
// siphash result
|
||||
let res = LittleEndian::read_u32(&result[..]);
|
||||
assert_eq!(res, 4198595614);
|
||||
assert_eq!(gas_left, U256::from(90_550));
|
||||
assert_eq!(gas_left, U256::from(92_381));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -415,7 +444,7 @@ fn call_static() {
|
||||
let res = LittleEndian::read_u32(&result[..]);
|
||||
assert_eq!(res, 317632590);
|
||||
|
||||
assert_eq!(gas_left, U256::from(90_550));
|
||||
assert_eq!(gas_left, U256::from(92_381));
|
||||
}
|
||||
|
||||
// Realloc test
|
||||
@ -438,13 +467,37 @@ fn realloc() {
|
||||
}
|
||||
};
|
||||
assert_eq!(result, vec![0u8; 2]);
|
||||
assert_eq!(gas_left, U256::from(96_445));
|
||||
assert_eq!(gas_left, U256::from(94_352));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn alloc() {
|
||||
let code = load_sample!("alloc.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(10_000_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(vec![0u8]);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("alloc test should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
assert_eq!(result, vec![5u8; 1024*450]);
|
||||
assert_eq!(gas_left, U256::from(6_506_844));
|
||||
}
|
||||
|
||||
// Tests that contract's ability to read from a storage
|
||||
// Test prepopulates address into storage, than executes a contract which read that address from storage and write this address into result
|
||||
#[test]
|
||||
fn storage_read() {
|
||||
::ethcore_logger::init_log();
|
||||
|
||||
let code = load_sample!("storage_read.wasm");
|
||||
let address: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap();
|
||||
|
||||
@ -464,7 +517,7 @@ fn storage_read() {
|
||||
};
|
||||
|
||||
assert_eq!(Address::from(&result[12..32]), address);
|
||||
assert_eq!(gas_left, U256::from(96_463));
|
||||
assert_eq!(gas_left, U256::from(98_298));
|
||||
}
|
||||
|
||||
// Tests keccak calculation
|
||||
@ -490,124 +543,7 @@ fn keccak() {
|
||||
};
|
||||
|
||||
assert_eq!(H256::from_slice(&result), H256::from("68371d7e884c168ae2022c82bd837d51837718a7f7dfb7aa3f753074a35e1d87"));
|
||||
assert_eq!(gas_left, U256::from(81_067));
|
||||
}
|
||||
|
||||
// memcpy test.
|
||||
#[test]
|
||||
fn memcpy() {
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!("mem.wasm");
|
||||
|
||||
let mut test_payload = Vec::with_capacity(8192);
|
||||
for i in 0..8192 {
|
||||
test_payload.push((i % 255) as u8);
|
||||
}
|
||||
let mut data = vec![0u8];
|
||||
data.extend(&test_payload);
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(data);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("mem should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(result, test_payload);
|
||||
assert_eq!(gas_left, U256::from(71_940));
|
||||
}
|
||||
|
||||
// memmove test.
|
||||
#[test]
|
||||
fn memmove() {
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!("mem.wasm");
|
||||
|
||||
let mut test_payload = Vec::with_capacity(8192);
|
||||
for i in 0..8192 {
|
||||
test_payload.push((i % 255) as u8);
|
||||
}
|
||||
let mut data = vec![1u8];
|
||||
data.extend(&test_payload);
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(data);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("mem should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(result, test_payload);
|
||||
assert_eq!(gas_left, U256::from(71_940));
|
||||
}
|
||||
|
||||
// memset test
|
||||
#[test]
|
||||
fn memset() {
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!("mem.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(vec![2u8, 228u8]);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("mem should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(result, vec![228u8; 8192]);
|
||||
assert_eq!(gas_left, U256::from(71_921));
|
||||
}
|
||||
|
||||
macro_rules! reqrep_test {
|
||||
($name: expr, $input: expr) => {
|
||||
reqrep_test!($name, $input, vm::EnvInfo::default(), HashMap::new())
|
||||
};
|
||||
($name: expr, $input: expr, $info: expr, $block_hashes: expr) => {
|
||||
{
|
||||
::ethcore_logger::init_log();
|
||||
let code = load_sample!($name);
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some($input);
|
||||
|
||||
let mut fake_ext = FakeExt::new();
|
||||
fake_ext.info = $info;
|
||||
fake_ext.blockhashes = $block_hashes;
|
||||
|
||||
let mut interpreter = wasm_interpreter();
|
||||
interpreter.exec(params, &mut fake_ext)
|
||||
.map(|result| match result {
|
||||
GasLeft::Known(_) => { panic!("Test is expected to return payload to check"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
})
|
||||
}
|
||||
};
|
||||
assert_eq!(gas_left, U256::from(84_223));
|
||||
}
|
||||
|
||||
// math_* tests check the ability of wasm contract to perform big integer operations
|
||||
@ -636,7 +572,7 @@ fn math_add() {
|
||||
U256::from_dec_str("1888888888888888888888888888887").unwrap(),
|
||||
(&result[..]).into()
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(95_384));
|
||||
assert_eq!(gas_left, U256::from(93_818));
|
||||
}
|
||||
|
||||
// multiplication
|
||||
@ -658,7 +594,7 @@ fn math_mul() {
|
||||
U256::from_dec_str("888888888888888888888888888887111111111111111111111111111112").unwrap(),
|
||||
(&result[..]).into()
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(94_374));
|
||||
assert_eq!(gas_left, U256::from(93_304));
|
||||
}
|
||||
|
||||
// subtraction
|
||||
@ -680,7 +616,7 @@ fn math_sub() {
|
||||
U256::from_dec_str("111111111111111111111111111111").unwrap(),
|
||||
(&result[..]).into()
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(95_372));
|
||||
assert_eq!(gas_left, U256::from(93_831));
|
||||
}
|
||||
|
||||
// subtraction with overflow
|
||||
@ -722,7 +658,54 @@ fn math_div() {
|
||||
U256::from_dec_str("1125000").unwrap(),
|
||||
(&result[..]).into()
|
||||
);
|
||||
assert_eq!(gas_left, U256::from(88_356));
|
||||
assert_eq!(gas_left, U256::from(90_607));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn storage_metering() {
|
||||
::ethcore_logger::init_log();
|
||||
|
||||
// #1
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let code = Arc::new(load_sample!("setter.wasm"));
|
||||
let address: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = address.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(code.clone());
|
||||
params.data = Some(vec![
|
||||
0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d,
|
||||
0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b,
|
||||
]);
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
// 0 -> not 0
|
||||
assert_eq!(gas_left, U256::from(74_410));
|
||||
|
||||
// #2
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = address.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(code.clone());
|
||||
params.data = Some(vec![
|
||||
0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d,
|
||||
0x6b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b,
|
||||
]);
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
// not 0 -> not 0
|
||||
assert_eq!(gas_left, U256::from(89_410));
|
||||
}
|
||||
|
||||
// This test checks the ability of wasm contract to invoke
|
||||
@ -810,7 +793,7 @@ fn externs() {
|
||||
"Gas limit requested and returned does not match"
|
||||
);
|
||||
|
||||
assert_eq!(gas_left, U256::from(95_321));
|
||||
assert_eq!(gas_left, U256::from(92_089));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -836,7 +819,7 @@ fn embedded_keccak() {
|
||||
};
|
||||
|
||||
assert_eq!(H256::from_slice(&result), H256::from("68371d7e884c168ae2022c82bd837d51837718a7f7dfb7aa3f753074a35e1d87"));
|
||||
assert_eq!(gas_left, U256::from(81_067));
|
||||
assert_eq!(gas_left, U256::from(84_223));
|
||||
}
|
||||
|
||||
/// This test checks the correctness of log extern
|
||||
@ -871,5 +854,5 @@ fn events() {
|
||||
assert_eq!(&log_entry.data, b"gnihtemos");
|
||||
|
||||
assert_eq!(&result, b"gnihtemos");
|
||||
assert_eq!(gas_left, U256::from(79_206));
|
||||
assert_eq!(gas_left, U256::from(81_235));
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::{fs, io};
|
||||
use std::io::Write;
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::collections::HashMap;
|
||||
use time;
|
||||
@ -152,31 +153,39 @@ impl<T> DiskDirectory<T> where T: KeyFileManager {
|
||||
)
|
||||
}
|
||||
|
||||
/// insert account with given file name
|
||||
pub fn insert_with_filename(&self, account: SafeAccount, filename: String) -> Result<SafeAccount, Error> {
|
||||
|
||||
/// insert account with given filename. if the filename is a duplicate of any stored account and dedup is set to
|
||||
/// true, a random suffix is appended to the filename.
|
||||
pub fn insert_with_filename(&self, account: SafeAccount, mut filename: String, dedup: bool) -> Result<SafeAccount, Error> {
|
||||
// path to keyfile
|
||||
let mut keyfile_path = self.path.join(filename.as_str());
|
||||
|
||||
// check for duplicate filename and append random suffix
|
||||
if dedup && keyfile_path.exists() {
|
||||
let suffix = ::random::random_string(4);
|
||||
filename.push_str(&format!("-{}", suffix));
|
||||
keyfile_path.set_file_name(&filename);
|
||||
}
|
||||
|
||||
// update account filename
|
||||
let original_account = account.clone();
|
||||
let mut account = account;
|
||||
account.filename = Some(filename.clone());
|
||||
account.filename = Some(filename);
|
||||
|
||||
{
|
||||
// Path to keyfile
|
||||
let mut keyfile_path = self.path.clone();
|
||||
keyfile_path.push(filename.as_str());
|
||||
|
||||
// save the file
|
||||
let mut file = fs::File::create(&keyfile_path)?;
|
||||
if let Err(err) = self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e))) {
|
||||
drop(file);
|
||||
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
// write key content
|
||||
self.key_manager.write(original_account, &mut file).map_err(|e| Error::Custom(format!("{:?}", e)))?;
|
||||
|
||||
file.flush()?;
|
||||
|
||||
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
|
||||
drop(file);
|
||||
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
|
||||
return Err(Error::Io(io::Error::last_os_error()));
|
||||
}
|
||||
|
||||
file.sync_all()?;
|
||||
}
|
||||
|
||||
Ok(account)
|
||||
@ -199,17 +208,13 @@ impl<T> KeyDirectory for DiskDirectory<T> where T: KeyFileManager {
|
||||
|
||||
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
|
||||
// Disk store handles updates correctly iff filename is the same
|
||||
self.insert(account)
|
||||
let filename = account_filename(&account);
|
||||
self.insert_with_filename(account, filename, false)
|
||||
}
|
||||
|
||||
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
|
||||
// build file path
|
||||
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
|
||||
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
|
||||
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
|
||||
});
|
||||
|
||||
self.insert_with_filename(account, filename)
|
||||
let filename = account_filename(&account);
|
||||
self.insert_with_filename(account, filename, true)
|
||||
}
|
||||
|
||||
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
|
||||
@ -285,6 +290,14 @@ impl KeyFileManager for DiskKeyFileManager {
|
||||
}
|
||||
}
|
||||
|
||||
fn account_filename(account: &SafeAccount) -> String {
|
||||
// build file path
|
||||
account.filename.clone().unwrap_or_else(|| {
|
||||
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
|
||||
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
extern crate tempdir;
|
||||
@ -316,6 +329,38 @@ mod test {
|
||||
let _ = fs::remove_dir_all(dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_handle_duplicate_filenames() {
|
||||
// given
|
||||
let mut dir = env::temp_dir();
|
||||
dir.push("ethstore_should_handle_duplicate_filenames");
|
||||
let keypair = Random.generate().unwrap();
|
||||
let password = "hello world";
|
||||
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
||||
|
||||
// when
|
||||
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
|
||||
let filename = "test".to_string();
|
||||
let dedup = true;
|
||||
|
||||
directory.insert_with_filename(account.clone(), "foo".to_string(), dedup).unwrap();
|
||||
let file1 = directory.insert_with_filename(account.clone(), filename.clone(), dedup).unwrap().filename.unwrap();
|
||||
let file2 = directory.insert_with_filename(account.clone(), filename.clone(), dedup).unwrap().filename.unwrap();
|
||||
let file3 = directory.insert_with_filename(account.clone(), filename.clone(), dedup).unwrap().filename.unwrap();
|
||||
|
||||
// then
|
||||
// the first file should have the original names
|
||||
assert_eq!(file1, filename);
|
||||
|
||||
// the following duplicate files should have a suffix appended
|
||||
assert!(file2 != file3);
|
||||
assert_eq!(file2.len(), filename.len() + 5);
|
||||
assert_eq!(file3.len(), filename.len() + 5);
|
||||
|
||||
// cleanup
|
||||
let _ = fs::remove_dir_all(dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_manage_vaults() {
|
||||
// given
|
||||
|
@ -106,7 +106,7 @@ impl VaultDiskDirectory {
|
||||
fn copy_to_vault(&self, vault: &VaultDiskDirectory) -> Result<(), Error> {
|
||||
for account in self.load()? {
|
||||
let filename = account.filename.clone().expect("self is instance of DiskDirectory; DiskDirectory fills filename in load; qed");
|
||||
vault.insert_with_filename(account, filename)?;
|
||||
vault.insert_with_filename(account, filename, true)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -166,9 +166,14 @@ impl SecretStore for EthStore {
|
||||
self.insert_account(vault, keypair.secret().clone(), password)
|
||||
}
|
||||
|
||||
fn import_wallet(&self, vault: SecretVaultRef, json: &[u8], password: &str) -> Result<StoreAccountRef, Error> {
|
||||
fn import_wallet(&self, vault: SecretVaultRef, json: &[u8], password: &str, gen_id: bool) -> Result<StoreAccountRef, Error> {
|
||||
let json_keyfile = json::KeyFile::load(json).map_err(|_| Error::InvalidKeyFile("Invalid JSON format".to_owned()))?;
|
||||
let mut safe_account = SafeAccount::from_file(json_keyfile, None);
|
||||
|
||||
if gen_id {
|
||||
safe_account.id = Random::random();
|
||||
}
|
||||
|
||||
let secret = safe_account.crypto.secret(password).map_err(|_| Error::InvalidPassword)?;
|
||||
safe_account.address = KeyPair::from_secret(secret)?.address();
|
||||
self.store.import(vault, safe_account)
|
||||
|
@ -116,7 +116,7 @@ pub trait SecretStore: SimpleSecretStore {
|
||||
/// Imports presale wallet
|
||||
fn import_presale(&self, vault: SecretVaultRef, json: &[u8], password: &str) -> Result<StoreAccountRef, Error>;
|
||||
/// Imports existing JSON wallet
|
||||
fn import_wallet(&self, vault: SecretVaultRef, json: &[u8], password: &str) -> Result<StoreAccountRef, Error>;
|
||||
fn import_wallet(&self, vault: SecretVaultRef, json: &[u8], password: &str, gen_id: bool) -> Result<StoreAccountRef, Error>;
|
||||
/// Copies account between stores and vaults.
|
||||
fn copy_account(&self, new_store: &SimpleSecretStore, new_vault: SecretVaultRef, account: &StoreAccountRef, password: &str, new_password: &str) -> Result<(), Error>;
|
||||
/// Checks if password matches given account.
|
||||
|
@ -4,8 +4,9 @@ set -e
|
||||
# variables
|
||||
PVER="1-9"
|
||||
PTYPE="v1"
|
||||
TRACK="beta"
|
||||
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
||||
PRE_REPO="js-dist-paritytech/parity-${CI_BUILD_REF_NAME}-${PVER}-${PTYPE}.git"
|
||||
PRE_REPO="js-dist-paritytech/parity-${TRACK}-${PVER}-${PTYPE}.git"
|
||||
PRE_REPO_TOKEN="https://${GITHUB_JS_PRECOMPILED}:@github.com/${PRE_REPO}"
|
||||
BASEDIR=`dirname $0`
|
||||
|
||||
|
@ -1 +1 @@
|
||||
// test script 23
|
||||
// test script 25
|
||||
|
@ -306,7 +306,7 @@ export function fetchTokensBalances (updates, skipNotifications = false) {
|
||||
const tokenIdsToFetch = Object.values(balances)
|
||||
.reduce((tokenIds, balance) => {
|
||||
const nextTokenIds = Object.keys(balance)
|
||||
.filter((tokenId) => balance[tokenId].gt(0));
|
||||
.filter((tokenId) => balance[tokenId] && balance[tokenId].gt(0));
|
||||
|
||||
return tokenIds.concat(nextTokenIds);
|
||||
}, []);
|
||||
@ -328,7 +328,7 @@ export function fetchTokensBalances (updates, skipNotifications = false) {
|
||||
dispatch(setBalances(balances, skipNotifications));
|
||||
})
|
||||
.catch((error) => {
|
||||
console.warn('balances::fetchTokensBalances', error);
|
||||
console.warn('v1: balances::fetchTokensBalances', error);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ export function loadTokens (options = {}) {
|
||||
}
|
||||
|
||||
export function loadTokensBasics (tokenIndexes, options) {
|
||||
const limit = 64;
|
||||
const limit = 128;
|
||||
|
||||
return (dispatch, getState) => {
|
||||
const { api } = getState();
|
||||
@ -154,7 +154,7 @@ export function loadTokensBasics (tokenIndexes, options) {
|
||||
|
||||
export function fetchTokens (_tokenIndexes) {
|
||||
const tokenIndexes = uniq(_tokenIndexes || []);
|
||||
const tokenChunks = chunk(tokenIndexes, 64);
|
||||
const tokenChunks = chunk(tokenIndexes, 128);
|
||||
|
||||
return (dispatch, getState) => {
|
||||
const { tokenReg } = Contracts.get();
|
||||
|
@ -65,15 +65,15 @@ export default function (api, browserHistory, forEmbed = false) {
|
||||
.then(() => console.log('v1: started Status Provider'))
|
||||
|
||||
.then(() => console.log('v1: starting Personal Provider...'))
|
||||
.then(() => PersonalProvider.start())
|
||||
.then(() => withTimeoutForLight('personal', PersonalProvider.start(), store))
|
||||
.then(() => console.log('v1: started Personal Provider'))
|
||||
|
||||
.then(() => console.log('v1: starting Balances Provider...'))
|
||||
.then(() => BalancesProvider.start())
|
||||
.then(() => withTimeoutForLight('balances', BalancesProvider.start(), store))
|
||||
.then(() => console.log('v1: started Balances Provider'))
|
||||
|
||||
.then(() => console.log('v1: starting Tokens Provider...'))
|
||||
.then(() => TokensProvider.start())
|
||||
.then(() => withTimeoutForLight('tokens', TokensProvider.start(), store))
|
||||
.then(() => console.log('v1: started Tokens Provider'));
|
||||
};
|
||||
|
||||
@ -97,3 +97,39 @@ export default function (api, browserHistory, forEmbed = false) {
|
||||
|
||||
return store;
|
||||
}
|
||||
|
||||
function withTimeoutForLight (id, promise, store) {
|
||||
const { nodeKind } = store.getState().nodeStatus;
|
||||
const isLightNode = nodeKind.capability !== 'full';
|
||||
|
||||
if (!isLightNode) {
|
||||
// make sure that no values are passed
|
||||
return promise.then(() => {});
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let isResolved = false;
|
||||
const doResolve = () => {
|
||||
if (!isResolved) {
|
||||
isResolved = true;
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
const timeout = setTimeout(() => {
|
||||
console.warn(`Resolving ${id} by timeout.`);
|
||||
doResolve();
|
||||
}, 1000);
|
||||
|
||||
promise
|
||||
.then(() => {
|
||||
clearTimeout(timeout);
|
||||
doResolve();
|
||||
})
|
||||
.catch(err => {
|
||||
clearTimeout(timeout);
|
||||
if (!isResolved) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import { range } from 'lodash';
|
||||
import { chunk, range } from 'lodash';
|
||||
import BigNumber from 'bignumber.js';
|
||||
|
||||
import { hashToImageUrl } from '~/redux/util';
|
||||
@ -58,13 +58,11 @@ export function fetchTokensBasics (api, tokenReg, start = 0, limit = 100) {
|
||||
return decodeArray(api, 'address[]', result);
|
||||
})
|
||||
.then((tokenAddresses) => {
|
||||
return tokenAddresses.map((tokenAddress, index) => {
|
||||
return tokenAddresses.map((address, index) => {
|
||||
const tokenIndex = start + index;
|
||||
|
||||
return {
|
||||
address: /^0x0*$/.test(tokenAddress)
|
||||
? ''
|
||||
: tokenAddress,
|
||||
address,
|
||||
id: getTokenId(tokenIndex),
|
||||
index: tokenIndex,
|
||||
fetched: false
|
||||
@ -80,12 +78,17 @@ export function fetchTokensBasics (api, tokenReg, start = 0, limit = 100) {
|
||||
|
||||
return tokens.map((token) => {
|
||||
if (balances[token.id] && balances[token.id].gt(0)) {
|
||||
token.address = '';
|
||||
token.address = null;
|
||||
}
|
||||
|
||||
return token;
|
||||
});
|
||||
});
|
||||
})
|
||||
.then((tokens) => {
|
||||
return tokens.filter(({ address }) => {
|
||||
return address && !/^0x0*$/.test(address);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -195,19 +198,22 @@ export function fetchAccountsBalances (api, tokens, updates) {
|
||||
});
|
||||
|
||||
const tokenPromise = Object.keys(tokenUpdates)
|
||||
.reduce((tokenPromise, accountAddress) => {
|
||||
.reduce((promises, accountAddress) => {
|
||||
const tokenIds = tokenUpdates[accountAddress];
|
||||
const updateTokens = tokens
|
||||
.filter((t) => tokenIds.includes(t.id));
|
||||
|
||||
return tokenPromise
|
||||
.then(() => fetchTokensBalances(api, updateTokens, [ accountAddress ]))
|
||||
.then((balances) => {
|
||||
tokensBalances[accountAddress] = balances[accountAddress];
|
||||
});
|
||||
}, Promise.resolve());
|
||||
promises.push(
|
||||
fetchTokensBalances(api, updateTokens, [ accountAddress ])
|
||||
.then((balances) => {
|
||||
tokensBalances[accountAddress] = balances[accountAddress];
|
||||
})
|
||||
);
|
||||
|
||||
return Promise.all([ ethPromise, tokenPromise ])
|
||||
return promises;
|
||||
}, []);
|
||||
|
||||
return Promise.all([ ethPromise, Promise.all(tokenPromise) ])
|
||||
.then(() => {
|
||||
const balances = Object.assign({}, tokensBalances);
|
||||
|
||||
@ -243,29 +249,24 @@ function fetchEthBalances (api, accountAddresses) {
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTokensBalances (api, tokens, accountAddresses) {
|
||||
const tokenAddresses = tokens.map((t) => t.address);
|
||||
const tokensBalancesCallData = encode(
|
||||
api,
|
||||
[ 'address[]', 'address[]' ],
|
||||
[ accountAddresses, tokenAddresses ]
|
||||
);
|
||||
function fetchTokensBalances (api, _tokens, accountAddresses) {
|
||||
const promises = chunk(_tokens, 128).map((tokens) => {
|
||||
const data = tokensBalancesBytecode + encode(
|
||||
api,
|
||||
[ 'address[]', 'address[]' ],
|
||||
[ accountAddresses, tokens.map(({ address }) => address) ]
|
||||
);
|
||||
|
||||
return api.eth
|
||||
.call({ data: tokensBalancesBytecode + tokensBalancesCallData })
|
||||
.then((result) => {
|
||||
const rawBalances = decodeArray(api, 'uint[]', result);
|
||||
return api.eth.call({ data }).then((result) => {
|
||||
const balances = {};
|
||||
const rawBalances = decodeArray(api, 'uint[]', result);
|
||||
|
||||
accountAddresses.forEach((accountAddress, accountIndex) => {
|
||||
const preIndex = accountIndex * tokens.length;
|
||||
const balance = {};
|
||||
const preIndex = accountIndex * tokenAddresses.length;
|
||||
|
||||
tokenAddresses.forEach((tokenAddress, tokenIndex) => {
|
||||
const index = preIndex + tokenIndex;
|
||||
const token = tokens[tokenIndex];
|
||||
|
||||
balance[token.id] = rawBalances[index];
|
||||
tokens.forEach((token, tokenIndex) => {
|
||||
balance[token.id] = rawBalances[preIndex + tokenIndex];
|
||||
});
|
||||
|
||||
balances[accountAddress] = balance;
|
||||
@ -273,6 +274,31 @@ function fetchTokensBalances (api, tokens, accountAddresses) {
|
||||
|
||||
return balances;
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all(promises).then((results) => {
|
||||
return results.reduce((combined, result) => {
|
||||
Object
|
||||
.keys(result)
|
||||
.forEach((address) => {
|
||||
if (!combined[address]) {
|
||||
combined[address] = {};
|
||||
}
|
||||
|
||||
Object
|
||||
.keys(result[address])
|
||||
.forEach((token) => {
|
||||
const value = result[address][token];
|
||||
|
||||
if (value && value.gt(0)) {
|
||||
combined[address][token] = result[address][token];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return combined;
|
||||
}, {});
|
||||
});
|
||||
}
|
||||
|
||||
function getTokenId (...args) {
|
||||
|
1619
js/package-lock.json
generated
@ -44,17 +44,17 @@
|
||||
"test:coverage": "cross-env NODE_ENV=test istanbul cover _mocha -- 'src/**/*.spec.js'"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@parity/dapp-console": "paritytech/dapp-console",
|
||||
"@parity/dapp-console": "parity-js/dapp-console#5562e6c11783dc82c1ee37fd85dc3403ff28f8f5",
|
||||
"@parity/dapp-dapp-methods": "js-dist-paritytech/dapp-dapp-methods",
|
||||
"@parity/dapp-dapp-visible": "js-dist-paritytech/dapp-dapp-visible",
|
||||
"@parity/dapp-dappreg": "paritytech/dapp-dappreg",
|
||||
"@parity/dapp-githubhint": "paritytech/dapp-githubhint",
|
||||
"@parity/dapp-localtx": "paritytech/dapp-localtx",
|
||||
"@parity/dapp-registry": "paritytech/dapp-registry",
|
||||
"@parity/dapp-signaturereg": "paritytech/dapp-signaturereg",
|
||||
"@parity/dapp-dappreg": "parity-js/dapp-dappreg#66f2e52dfa9a783cc0ca616505460e5e34ebdf54",
|
||||
"@parity/dapp-githubhint": "parity-js/dapp-githubhint#b5cdef016e8bead7669ca077be526897ee42f83a",
|
||||
"@parity/dapp-localtx": "parity-js/dapp-localtx#ac1d82c7f55bf0b6e2bf0e56044f95fa49c52ea9",
|
||||
"@parity/dapp-registry": "parity-js/dapp-registry#d3aeab6437ebcc5537e5f71a1cd49c8de8329f34",
|
||||
"@parity/dapp-signaturereg": "parity-js/dapp-signaturereg#f597e976bd89b07809173d7a35e3929ac81e7e75",
|
||||
"@parity/dapp-status": "js-dist-paritytech/dapp-status",
|
||||
"@parity/dapp-tokendeploy": "paritytech/dapp-tokendeploy",
|
||||
"@parity/dapp-tokenreg": "paritytech/dapp-tokenreg",
|
||||
"@parity/dapp-tokendeploy": "parity-js/dapp-tokendeploy#0f6f9f2adb82c02e35056dba792a75e95f440cdd",
|
||||
"@parity/dapp-tokenreg": "parity-js/dapp-tokenreg#9750a2c10a934f9ae0e6e7fd6fa5b9e25a7b0785",
|
||||
"babel-cli": "6.26.0",
|
||||
"babel-core": "6.26.0",
|
||||
"babel-eslint": "7.1.1",
|
||||
@ -141,12 +141,12 @@
|
||||
"yargs": "6.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@parity/api": "^2.1.14",
|
||||
"@parity/plugin-signer-account": "paritytech/plugin-signer-account",
|
||||
"@parity/plugin-signer-default": "paritytech/plugin-signer-default",
|
||||
"@parity/plugin-signer-hardware": "paritytech/plugin-signer-hardware",
|
||||
"@parity/plugin-signer-qr": "paritytech/plugin-signer-qr",
|
||||
"@parity/shared": "2.2.21",
|
||||
"@parity/api": "2.1.15",
|
||||
"@parity/plugin-signer-account": "parity-js/plugin-signer-account#c1272caa242c8b97dac78e5d0b1e068614657fdc",
|
||||
"@parity/plugin-signer-default": "parity-js/plugin-signer-default#9a47bded9d6d70b69bb2f719732bd6f7854d1842",
|
||||
"@parity/plugin-signer-hardware": "parity-js/plugin-signer-hardware#4320d818a053d4efae890b74a7476e4c8dc6ba10",
|
||||
"@parity/plugin-signer-qr": "parity-js/plugin-signer-qr#2d1fafad347ba53eaf58c14265d4d07631d6a45c",
|
||||
"@parity/shared": "2.2.23",
|
||||
"@parity/ui": "3.0.22",
|
||||
"keythereum": "1.0.2",
|
||||
"lodash.flatten": "4.4.0",
|
||||
@ -170,7 +170,6 @@
|
||||
"redux": "3.7.2",
|
||||
"semantic-ui-react": "0.77.0",
|
||||
"solc": "ngotchac/solc-js",
|
||||
"store": "1.3.20",
|
||||
"web3": "1.0.0-beta.26"
|
||||
"store": "1.3.20"
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ set -e
|
||||
# variables
|
||||
PVER="1-9"
|
||||
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
||||
BRANCH=$CI_BUILD_REF_NAME
|
||||
BRANCH="beta"
|
||||
GIT_PARITY="https://${GITHUB_JS_PRECOMPILED}:@github.com/paritytech/parity.git"
|
||||
|
||||
echo "*** [cargo] Setting up GitHub config for parity"
|
||||
|
@ -4,8 +4,9 @@ set -e
|
||||
# variables
|
||||
PVER="1-9"
|
||||
PTYPE="shell"
|
||||
TRACK="beta"
|
||||
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
||||
PRE_REPO="js-dist-paritytech/parity-${CI_BUILD_REF_NAME}-${PVER}-${PTYPE}.git"
|
||||
PRE_REPO="js-dist-paritytech/parity-${TRACK}-${PVER}-${PTYPE}.git"
|
||||
PRE_REPO_TOKEN="https://${GITHUB_JS_PRECOMPILED}:@github.com/${PRE_REPO}"
|
||||
BASEDIR=`dirname $0`
|
||||
|
||||
|
@ -1 +1 @@
|
||||
// test script 29
|
||||
// test script 31
|
||||
|
@ -26,11 +26,14 @@
|
||||
.list {
|
||||
margin: 0 !important;
|
||||
padding: 1em 1em !important;
|
||||
background-color: #f5f5f5;
|
||||
background-color: white;
|
||||
|
||||
}
|
||||
|
||||
.isDefault {
|
||||
background-color: white;
|
||||
.accountsList {
|
||||
background-color: #f5f5f5;
|
||||
height: 300px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.hasOtherAccounts {
|
||||
|
@ -68,14 +68,14 @@ class DefaultAccount extends Component {
|
||||
}
|
||||
content={
|
||||
<div>
|
||||
<List relaxed='very' selection className={ [styles.list, styles.isDefault, allAccounts.length > 1 && styles.hasOtherAccounts].join(' ') }>
|
||||
<List relaxed='very' selection className={ [styles.list, allAccounts.length > 1 && styles.hasOtherAccounts].join(' ') }>
|
||||
<AccountItem
|
||||
isDefault
|
||||
account={ defaultAccount }
|
||||
/>
|
||||
</List>
|
||||
{allAccounts.length > 1 &&
|
||||
<List relaxed='very' selection className={ styles.list } divided>
|
||||
<List relaxed='very' selection className={ [styles.list, styles.accountsList].join(' ') } divided>
|
||||
{allAccounts
|
||||
.filter(({ address }) => address !== defaultAddress)
|
||||
.map(account => (
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
import Api from '@parity/api';
|
||||
import qs from 'query-string';
|
||||
import Web3 from 'web3';
|
||||
|
||||
function initProvider () {
|
||||
const path = window.location.pathname.split('/');
|
||||
@ -48,24 +47,9 @@ function initProvider () {
|
||||
}
|
||||
|
||||
function initWeb3 (ethereum) {
|
||||
// FIXME: Use standard provider for web3
|
||||
const provider = new Api.Provider.SendAsync(ethereum);
|
||||
const web3 = new Web3(provider);
|
||||
const currentProvider = new Api.Provider.SendAsync(ethereum);
|
||||
|
||||
if (!web3.currentProvider) {
|
||||
web3.currentProvider = provider;
|
||||
}
|
||||
|
||||
// set default account
|
||||
web3.eth.getAccounts((error, accounts) => {
|
||||
if (error || !accounts || !accounts[0]) {
|
||||
return;
|
||||
}
|
||||
|
||||
web3.eth.defaultAccount = accounts[0];
|
||||
});
|
||||
|
||||
window.web3 = web3;
|
||||
window.web3 = { currentProvider };
|
||||
}
|
||||
|
||||
function initParity (ethereum) {
|
||||
|
@ -13,6 +13,8 @@
|
||||
0ACF9AC71E30FAB600D5C935 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = 0ACF9AC51E30FAB600D5C935 /* MainMenu.xib */; };
|
||||
0AE564F11E3CE42C00BD01F7 /* GetBSDProcessList.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0AE564F01E3CE42C00BD01F7 /* GetBSDProcessList.swift */; };
|
||||
0AED4DA01E3E22F800BF87C0 /* ethstore in CopyFiles */ = {isa = PBXBuildFile; fileRef = 0AED4D9F1E3E22F800BF87C0 /* ethstore */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; };
|
||||
84CF92B3200E559900AD6E78 /* parity-evm in CopyFiles */ = {isa = PBXBuildFile; fileRef = 84CF92B2200E559900AD6E78 /* parity-evm */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; };
|
||||
84CF92B6200E56AE00AD6E78 /* ethkey in CopyFiles */ = {isa = PBXBuildFile; fileRef = 84CF92B5200E56AE00AD6E78 /* ethkey */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXCopyFilesBuildPhase section */
|
||||
@ -22,6 +24,8 @@
|
||||
dstPath = "";
|
||||
dstSubfolderSpec = 6;
|
||||
files = (
|
||||
84CF92B6200E56AE00AD6E78 /* ethkey in CopyFiles */,
|
||||
84CF92B3200E559900AD6E78 /* parity-evm in CopyFiles */,
|
||||
0AED4DA01E3E22F800BF87C0 /* ethstore in CopyFiles */,
|
||||
0A7A475D1E3D2CDD0093D1AB /* parity in CopyFiles */,
|
||||
);
|
||||
@ -38,6 +42,8 @@
|
||||
0ACF9AC81E30FAB600D5C935 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
|
||||
0AE564F01E3CE42C00BD01F7 /* GetBSDProcessList.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GetBSDProcessList.swift; sourceTree = "<group>"; };
|
||||
0AED4D9F1E3E22F800BF87C0 /* ethstore */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.executable"; name = ethstore; path = ../target/release/ethstore; sourceTree = "<group>"; };
|
||||
84CF92B2200E559900AD6E78 /* parity-evm */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.executable"; name = "parity-evm"; path = "../target/release/parity-evm"; sourceTree = "<group>"; };
|
||||
84CF92B5200E56AE00AD6E78 /* ethkey */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.executable"; name = ethkey; path = ../target/release/ethkey; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@ -54,6 +60,8 @@
|
||||
0ACF9AB51E30FAB600D5C935 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
84CF92B5200E56AE00AD6E78 /* ethkey */,
|
||||
84CF92B2200E559900AD6E78 /* parity-evm */,
|
||||
0AED4D9F1E3E22F800BF87C0 /* ethstore */,
|
||||
0A7A475C1E3D2CDD0093D1AB /* parity */,
|
||||
0ACF9AC01E30FAB600D5C935 /* Parity Ethereum */,
|
||||
@ -110,7 +118,7 @@
|
||||
isa = PBXProject;
|
||||
attributes = {
|
||||
LastSwiftUpdateCheck = 0800;
|
||||
LastUpgradeCheck = 0800;
|
||||
LastUpgradeCheck = 0820;
|
||||
ORGANIZATIONNAME = "Parity Technologies";
|
||||
TargetAttributes = {
|
||||
0ACF9ABD1E30FAB600D5C935 = {
|
||||
@ -192,6 +200,7 @@
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVES = YES;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
@ -241,6 +250,7 @@
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVES = YES;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
|
@ -462,7 +462,7 @@
|
||||
<key>OVERWRITE_PERMISSIONS</key>
|
||||
<false/>
|
||||
<key>VERSION</key>
|
||||
<string>1.9.0</string>
|
||||
<string>1.9.3</string>
|
||||
</dict>
|
||||
<key>UUID</key>
|
||||
<string>2DCD5B81-7BAF-4DA1-9251-6274B089FD36</string>
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
@ -22,12 +22,12 @@ import Cocoa
|
||||
class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
@IBOutlet weak var statusMenu: NSMenu!
|
||||
@IBOutlet weak var startAtLogonMenuItem: NSMenuItem!
|
||||
|
||||
|
||||
let statusItem = NSStatusBar.system().statusItem(withLength: NSVariableStatusItemLength)
|
||||
var parityPid: Int32? = nil
|
||||
var commandLine: [String] = []
|
||||
let defaultDefaults = "{\"fat_db\":false,\"mode\":\"passive\",\"mode.alarm\":3600,\"mode.timeout\":300,\"pruning\":\"fast\",\"tracing\":false}"
|
||||
|
||||
|
||||
func menuAppPath() -> String {
|
||||
return Bundle.main.executablePath!
|
||||
}
|
||||
@ -40,20 +40,20 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
return NSRunningApplication.runningApplications(withBundleIdentifier: Bundle.main.bundleIdentifier!).count > 1
|
||||
|
||||
}
|
||||
|
||||
|
||||
func isParityRunning() -> Bool {
|
||||
if let pid = self.parityPid {
|
||||
return kill(pid, 0) == 0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
func killParity() {
|
||||
if let pid = self.parityPid {
|
||||
kill(pid, SIGKILL)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func openUI() {
|
||||
let parity = Process()
|
||||
parity.launchPath = self.parityPath()
|
||||
@ -61,29 +61,29 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
parity.arguments!.append("ui")
|
||||
parity.launch()
|
||||
}
|
||||
|
||||
|
||||
func writeConfigFiles() {
|
||||
let basePath = FileManager.default.urls(for: .applicationSupportDirectory, in: .userDomainMask).first?
|
||||
.appendingPathComponent(Bundle.main.bundleIdentifier!, isDirectory: true)
|
||||
|
||||
|
||||
if FileManager.default.fileExists(atPath: basePath!.path) {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
do {
|
||||
let defaultsFileDir = basePath?.appendingPathComponent("chains").appendingPathComponent("ethereum")
|
||||
let defaultsFile = defaultsFileDir?.appendingPathComponent("user_defaults")
|
||||
|
||||
|
||||
try FileManager.default.createDirectory(atPath: (defaultsFileDir?.path)!, withIntermediateDirectories: true, attributes: nil)
|
||||
if !FileManager.default.fileExists(atPath: defaultsFile!.path) {
|
||||
try defaultDefaults.write(to: defaultsFile!, atomically: false, encoding: String.Encoding.utf8)
|
||||
}
|
||||
|
||||
|
||||
let configFile = basePath?.appendingPathComponent("config.toml")
|
||||
}
|
||||
catch {}
|
||||
}
|
||||
|
||||
|
||||
func autostartEnabled() -> Bool {
|
||||
return itemReferencesInLoginItems().existingReference != nil
|
||||
}
|
||||
@ -123,7 +123,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
}
|
||||
return (nil, nil)
|
||||
}
|
||||
|
||||
|
||||
func toggleLaunchAtStartup() {
|
||||
let itemReferences = itemReferencesInLoginItems()
|
||||
let shouldBeToggled = (itemReferences.existingReference == nil)
|
||||
@ -155,7 +155,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
|
||||
func launchParity() {
|
||||
self.commandLine = CommandLine.arguments.dropFirst().filter({ $0 != "ui"})
|
||||
|
||||
|
||||
let processes = GetBSDProcessList()!
|
||||
let parityProcess = processes.index(where: {
|
||||
var name = $0.kp_proc.p_comm
|
||||
@ -166,7 +166,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
}
|
||||
return str == "parity"
|
||||
})
|
||||
|
||||
|
||||
if parityProcess == nil {
|
||||
let parity = Process()
|
||||
let p = self.parityPath()
|
||||
@ -178,7 +178,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
self.parityPid = processes[parityProcess!].kp_proc.p_pid
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func applicationDidFinishLaunching(_ aNotification: Notification) {
|
||||
if self.isAlreadyRunning() {
|
||||
openUI()
|
||||
@ -188,12 +188,12 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
|
||||
self.writeConfigFiles()
|
||||
self.launchParity()
|
||||
Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: {_ in
|
||||
Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: {_ in
|
||||
if !self.isParityRunning() {
|
||||
NSApplication.shared().terminate(self)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
let icon = NSImage(named: "statusIcon")
|
||||
icon?.isTemplate = true // best for dark mode
|
||||
statusItem.image = icon
|
||||
@ -206,19 +206,18 @@ class AppDelegate: NSObject, NSApplicationDelegate {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@IBAction func quitClicked(_ sender: NSMenuItem) {
|
||||
self.killParity()
|
||||
NSApplication.shared().terminate(self)
|
||||
}
|
||||
|
||||
|
||||
@IBAction func openClicked(_ sender: NSMenuItem) {
|
||||
self.openUI()
|
||||
}
|
||||
|
||||
|
||||
@IBAction func startAtLogonClicked(_ sender: NSMenuItem) {
|
||||
self.toggleLaunchAtStartup()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
Before Width: | Height: | Size: 115 KiB After Width: | Height: | Size: 51 KiB |
Before Width: | Height: | Size: 372 B After Width: | Height: | Size: 679 B |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 3.6 KiB |
Before Width: | Height: | Size: 707 B After Width: | Height: | Size: 1.7 KiB |
@ -1,4 +1,4 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
@ -21,21 +21,21 @@ import Foundation
|
||||
import Darwin
|
||||
|
||||
public func GetBSDProcessList() -> ([kinfo_proc]?) {
|
||||
|
||||
|
||||
var done = false
|
||||
var result: [kinfo_proc]?
|
||||
var err: Int32
|
||||
|
||||
|
||||
repeat {
|
||||
let name = [CTL_KERN, KERN_PROC, KERN_PROC_ALL, 0];
|
||||
let namePointer = name.withUnsafeBufferPointer { UnsafeMutablePointer<Int32>(mutating: $0.baseAddress) }
|
||||
var length: Int = 0
|
||||
|
||||
|
||||
err = sysctl(namePointer, u_int(name.count), nil, &length, nil, 0)
|
||||
if err == -1 {
|
||||
err = errno
|
||||
}
|
||||
|
||||
|
||||
if err == 0 {
|
||||
let count = length / MemoryLayout<kinfo_proc>.stride
|
||||
result = [kinfo_proc](repeating: kinfo_proc(), count: count)
|
||||
@ -54,6 +54,6 @@ public func GetBSDProcessList() -> ([kinfo_proc]?) {
|
||||
}
|
||||
}
|
||||
} while err == 0 && !done
|
||||
|
||||
|
||||
return result
|
||||
}
|
||||
|
@ -3,8 +3,6 @@ Parity Wallet
|
||||
|
||||
Welcome to Parity Wallet, your all-in-one Ethereum node and wallet.
|
||||
|
||||
WARNING: This installer is **EXPERIMENTAL**. Use it at your own risk.
|
||||
|
||||
If you continue, Parity will be installed as a user service. You will be able to use the Parity Wallet through your browser by using the menu bar icon, following the shortcut in the Launchpad or navigating to http://localhost:8080/ in your browser.
|
||||
If you continue, Parity will be installed as a user service. You will be able to use the Parity Wallet through your browser by using the menu bar icon, following the shortcut in the Launchpad or navigating to http://localhost:8180/ in your browser.
|
||||
|
||||
Parity is distributed under the terms of the GPL.
|
||||
|
@ -9,5 +9,4 @@ fi
|
||||
PLIST=~/Library/LaunchAgents/io.parity.ethereum.plist
|
||||
su $SUDO_USER -c "launchctl stop io.parity.ethereum"
|
||||
su $SUDO_USER -c "launchctl unload $PLIST"
|
||||
rm -f /usr/local/libexec/parity /usr/local/libexec/uninstall-parity.sh /usr/local/bin/ethstore $PLIST
|
||||
|
||||
rm -f /usr/local/libexec/parity /usr/local/libexec/uninstall-parity.sh /usr/local/bin/ethstore /usr/local/bin/ethkey /usr/local/bin/parity-evm $PLIST
|
||||
|
@ -6,17 +6,17 @@
|
||||
!define SYNC_TERM 0x00100001
|
||||
|
||||
!define APPNAME "Parity"
|
||||
!define COMPANYNAME "Parity"
|
||||
!define COMPANYNAME "Parity Technologies"
|
||||
!define DESCRIPTION "Fast, light, robust Ethereum implementation"
|
||||
!define VERSIONMAJOR 1
|
||||
!define VERSIONMINOR 9
|
||||
!define VERSIONBUILD 0
|
||||
!define VERSIONBUILD 3
|
||||
!define ARGS ""
|
||||
!define FIRST_START_ARGS "--mode=passive ui"
|
||||
|
||||
!addplugindir .\
|
||||
|
||||
!define HELPURL "https://github.com/paritytech/parity/wiki" # "Support Information" link
|
||||
!define HELPURL "https://paritytech.github.io/wiki/" # "Support Information" link
|
||||
!define UPDATEURL "https://github.com/paritytech/parity/releases" # "Product Updates" link
|
||||
!define ABOUTURL "https://github.com/paritytech/parity" # "Publisher" link
|
||||
!define INSTALLSIZE 26120
|
||||
@ -88,14 +88,13 @@ section "install"
|
||||
!insertmacro TerminateApp
|
||||
|
||||
# Files added here should be removed by the uninstaller (see section "uninstall")
|
||||
file /oname=parity.exe ..\target\release\parity.exe
|
||||
file /oname=parity.exe ..\target\x86_64-pc-windows-msvc\release\parity.exe
|
||||
file /oname=parity-evm.exe ..\target\x86_64-pc-windows-msvc\release\parity-evm.exe
|
||||
file /oname=ethstore.exe ..\target\x86_64-pc-windows-msvc\release\ethstore.exe
|
||||
file /oname=ethkey.exe ..\target\x86_64-pc-windows-msvc\release\ethkey.exe
|
||||
file /oname=ptray.exe ..\windows\ptray\x64\Release\ptray.exe
|
||||
|
||||
file "logo.ico"
|
||||
file vc_redist.x64.exe
|
||||
|
||||
ExecWait '"$INSTDIR\vc_redist.x64.exe" /passive /norestart'
|
||||
delete $INSTDIR\vc_redist.x64.exe
|
||||
# Add any other files for the install directory (license files, app data, etc) here
|
||||
|
||||
# Uninstaller - See function un.onInit and section "uninstall" for configuration
|
||||
@ -167,6 +166,9 @@ section "uninstall"
|
||||
|
||||
# Remove files
|
||||
delete $INSTDIR\parity.exe
|
||||
delete $INSTDIR\parity-evm.exe
|
||||
delete $INSTDIR\ethstore.exe
|
||||
delete $INSTDIR\ethkey.exe
|
||||
delete $INSTDIR\ptray.exe
|
||||
delete $INSTDIR\logo.ico
|
||||
|
||||
@ -187,4 +189,3 @@ section "uninstall"
|
||||
DeleteRegValue HKLM "Software\Microsoft\Windows\CurrentVersion\Run" "${APPNAME}"
|
||||
DeleteRegValue HKCU "Software\Microsoft\Windows\CurrentVersion\Run" "${APPNAME}"
|
||||
sectionEnd
|
||||
|
||||
|
@ -257,12 +257,7 @@ usage! {
|
||||
|
||||
ARG arg_mode: (String) = "last", or |c: &Config| otry!(c.parity).mode.clone(),
|
||||
"--mode=[MODE]",
|
||||
"Set the operating mode. MODE can be one of:
|
||||
last - Uses the last-used mode, active if none.
|
||||
active - Parity continuously syncs the chain.
|
||||
passive - Parity syncs initially, then sleeps and wakes regularly to resync.
|
||||
dark - Parity syncs only when the RPC is active.
|
||||
offline - Parity doesn't sync.",
|
||||
"Set the operating mode. MODE can be one of: last - Uses the last-used mode, active if none; active - Parity continuously syncs the chain; passive - Parity syncs initially, then sleeps and wakes regularly to resync; dark - Parity syncs only when the RPC is active; offline - Parity doesn't sync.",
|
||||
|
||||
ARG arg_mode_timeout: (u64) = 300u64, or |c: &Config| otry!(c.parity).mode_timeout.clone(),
|
||||
"--mode-timeout=[SECS]",
|
||||
@ -274,19 +269,11 @@ usage! {
|
||||
|
||||
ARG arg_auto_update: (String) = "critical", or |c: &Config| otry!(c.parity).auto_update.clone(),
|
||||
"--auto-update=[SET]",
|
||||
"Set a releases set to automatically update and install.
|
||||
all - All updates in the our release track.
|
||||
critical - Only consensus/security updates.
|
||||
none - No updates will be auto-installed.",
|
||||
"Set a releases set to automatically update and install. SET can be one of: all - All updates in the our release track; critical - Only consensus/security updates; none - No updates will be auto-installed.",
|
||||
|
||||
ARG arg_release_track: (String) = "current", or |c: &Config| otry!(c.parity).release_track.clone(),
|
||||
"--release-track=[TRACK]",
|
||||
"Set which release track we should use for updates.
|
||||
stable - Stable releases.
|
||||
beta - Beta releases.
|
||||
nightly - Nightly releases (unstable).
|
||||
testing - Testing releases (do not use).
|
||||
current - Whatever track this executable was released on",
|
||||
"Set which release track we should use for updates. TRACK can be one of: stable - Stable releases; beta - Beta releases; nightly - Nightly releases (unstable); testing - Testing releases (do not use); current - Whatever track this executable was released on.",
|
||||
|
||||
ARG arg_chain: (String) = "foundation", or |c: &Config| otry!(c.parity).chain.clone(),
|
||||
"--chain=[CHAIN]",
|
||||
@ -311,8 +298,7 @@ usage! {
|
||||
["Convenience options"]
|
||||
FLAG flag_unsafe_expose: (bool) = false, or |c: &Config| otry!(c.misc).unsafe_expose,
|
||||
"--unsafe-expose",
|
||||
"All servers will listen on external interfaces and will be remotely accessible. It's equivalent with setting the following: --{{ws,jsonrpc,ui,ipfs,secret_store,stratum}}-interface=all --*-hosts=all
|
||||
This option is UNSAFE and should be used with great care!",
|
||||
"All servers will listen on external interfaces and will be remotely accessible. It's equivalent with setting the following: --{{ws,jsonrpc,ui,ipfs,secret_store,stratum}}-interface=all --*-hosts=all This option is UNSAFE and should be used with great care!",
|
||||
|
||||
ARG arg_config: (String) = "$BASE/config.toml", or |_| None,
|
||||
"-c, --config=[CONFIG]",
|
||||
@ -498,7 +484,7 @@ usage! {
|
||||
|
||||
ARG arg_ws_hosts: (String) = "none", or |c: &Config| otry!(c.websockets).hosts.as_ref().map(|vec| vec.join(",")),
|
||||
"--ws-hosts=[HOSTS]",
|
||||
"List of allowed Host header values. This option will validate the Host header sent by the browser, it is additional security against some attack vectors. Special options: \"all\", \"none\",.",
|
||||
"List of allowed Host header values. This option will validate the Host header sent by the browser, it is additional security against some attack vectors. Special options: \"all\", \"none\".",
|
||||
|
||||
["API and console options – IPC"]
|
||||
FLAG flag_no_ipc: (bool) = false, or |c: &Config| otry!(c.ipc).disable.clone(),
|
||||
@ -582,7 +568,7 @@ usage! {
|
||||
|
||||
ARG arg_secretstore_path: (String) = "$BASE/secretstore", or |c: &Config| otry!(c.secretstore).path.clone(),
|
||||
"--secretstore-path=[PATH]",
|
||||
"Specify directory where Secret Store should save its data..",
|
||||
"Specify directory where Secret Store should save its data.",
|
||||
|
||||
ARG arg_secretstore_secret: (Option<String>) = None, or |c: &Config| otry!(c.secretstore).self_secret.clone(),
|
||||
"--secretstore-secret=[SECRET]",
|
||||
@ -671,7 +657,7 @@ usage! {
|
||||
|
||||
ARG arg_tx_queue_gas: (String) = "off", or |c: &Config| otry!(c.mining).tx_queue_gas.clone(),
|
||||
"--tx-queue-gas=[LIMIT]",
|
||||
"Maximum amount of total gas for external transactions in the queue. LIMIT can be either an amount of gas or 'auto' or 'off'. 'auto' sets the limit to be 20x the current block gas limit..",
|
||||
"Maximum amount of total gas for external transactions in the queue. LIMIT can be either an amount of gas or 'auto' or 'off'. 'auto' sets the limit to be 20x the current block gas limit.",
|
||||
|
||||
ARG arg_tx_queue_strategy: (String) = "gas_price", or |c: &Config| otry!(c.mining).tx_queue_strategy.clone(),
|
||||
"--tx-queue-strategy=[S]",
|
||||
@ -778,7 +764,7 @@ usage! {
|
||||
|
||||
ARG arg_pruning_history: (u64) = 64u64, or |c: &Config| otry!(c.footprint).pruning_history.clone(),
|
||||
"--pruning-history=[NUM]",
|
||||
"Set a minimum number of recent states to keep when pruning is active..",
|
||||
"Set a minimum number of recent states to keep when pruning is active.",
|
||||
|
||||
ARG arg_pruning_memory: (usize) = 32usize, or |c: &Config| otry!(c.footprint).pruning_memory.clone(),
|
||||
"--pruning-memory=[MB]",
|
||||
@ -1322,12 +1308,15 @@ mod tests {
|
||||
let args = Args::parse(&["parity", "--secretstore-nodes", "abc@127.0.0.1:3333,cde@10.10.10.10:4444"]).unwrap();
|
||||
assert_eq!(args.arg_secretstore_nodes, "abc@127.0.0.1:3333,cde@10.10.10.10:4444");
|
||||
|
||||
// Arguments with a single value shouldn't accept multiple values
|
||||
let args = Args::parse(&["parity", "--auto-update", "critical", "all"]);
|
||||
assert!(args.is_err());
|
||||
|
||||
let args = Args::parse(&["parity", "--password", "~/.safe/1", "~/.safe/2"]).unwrap();
|
||||
let args = Args::parse(&["parity", "--password", "~/.safe/1", "--password", "~/.safe/2", "--ui-port", "8123", "ui"]).unwrap();
|
||||
assert_eq!(args.arg_password, vec!["~/.safe/1".to_owned(), "~/.safe/2".to_owned()]);
|
||||
assert_eq!(args.arg_ui_port, 8123);
|
||||
assert_eq!(args.cmd_ui, true);
|
||||
|
||||
let args = Args::parse(&["parity", "--password", "~/.safe/1,~/.safe/2", "--ui-port", "8123", "ui"]).unwrap();
|
||||
assert_eq!(args.arg_password, vec!["~/.safe/1".to_owned(), "~/.safe/2".to_owned()]);
|
||||
assert_eq!(args.arg_ui_port, 8123);
|
||||
assert_eq!(args.cmd_ui, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -150,14 +150,20 @@ macro_rules! usage {
|
||||
}
|
||||
) => {
|
||||
use toml;
|
||||
use std::{fs, io, process};
|
||||
use std::{fs, io, process, cmp};
|
||||
use std::io::{Read, Write};
|
||||
use parity_version::version;
|
||||
use clap::{Arg, App, SubCommand, AppSettings, ArgMatches as ClapArgMatches, Error as ClapError, ErrorKind as ClapErrorKind};
|
||||
use clap::{Arg, App, SubCommand, AppSettings, ArgSettings, Error as ClapError, ErrorKind as ClapErrorKind};
|
||||
use dir::helpers::replace_home;
|
||||
use std::ffi::OsStr;
|
||||
use std::collections::HashMap;
|
||||
|
||||
extern crate textwrap;
|
||||
extern crate term_size;
|
||||
use self::textwrap::{Wrapper};
|
||||
|
||||
const MAX_TERM_WIDTH: usize = 120;
|
||||
|
||||
#[cfg(test)]
|
||||
use regex::Regex;
|
||||
|
||||
@ -365,11 +371,23 @@ macro_rules! usage {
|
||||
#[allow(unused_mut)] // subc_subc_exist may be assigned true by the macro
|
||||
#[allow(unused_assignments)] // Rust issue #22630
|
||||
pub fn print_help() -> String {
|
||||
|
||||
const TAB: &str = " ";
|
||||
const TAB_TAB: &str = " ";
|
||||
|
||||
let term_width = match term_size::dimensions() {
|
||||
None => MAX_TERM_WIDTH,
|
||||
Some((w, _)) => {
|
||||
cmp::min(w, MAX_TERM_WIDTH)
|
||||
}
|
||||
};
|
||||
|
||||
let mut help : String = include_str!("./usage_header.txt").to_owned();
|
||||
|
||||
help.push_str("\n\n");
|
||||
help.push_str("\n");
|
||||
|
||||
// Subcommands
|
||||
let mut subcommands_wrapper = Wrapper::new(term_width).subsequent_indent(TAB);
|
||||
help.push_str("parity [options]\n");
|
||||
$(
|
||||
{
|
||||
@ -386,11 +404,14 @@ macro_rules! usage {
|
||||
)*
|
||||
];
|
||||
|
||||
if subc_subc_usages.is_empty() {
|
||||
help.push_str(&format!("parity [options] {} {}\n", underscore_to_hyphen!(&stringify!($subc)[4..]), underscore_to_hyphen!(&stringify!($subc_subc)[stringify!($subc).len()+1..])));
|
||||
} else {
|
||||
help.push_str(&format!("parity [options] {} {} {}\n", underscore_to_hyphen!(&stringify!($subc)[4..]), underscore_to_hyphen!(&stringify!($subc_subc)[stringify!($subc).len()+1..]), subc_subc_usages.join(" ")));
|
||||
}
|
||||
help.push_str(&subcommands_wrapper.fill(
|
||||
format!(
|
||||
"parity [options] {} {} {}\n",
|
||||
underscore_to_hyphen!(&stringify!($subc)[4..]),
|
||||
underscore_to_hyphen!(&stringify!($subc_subc)[stringify!($subc).len()+1..]),
|
||||
subc_subc_usages.join(" ")
|
||||
).as_ref())
|
||||
);
|
||||
)*
|
||||
|
||||
// Print the subcommand on its own only if it has no subsubcommands
|
||||
@ -404,22 +425,30 @@ macro_rules! usage {
|
||||
)*
|
||||
];
|
||||
|
||||
if subc_usages.is_empty() {
|
||||
help.push_str(&format!("parity [options] {}\n", underscore_to_hyphen!(&stringify!($subc)[4..])));
|
||||
} else {
|
||||
help.push_str(&format!("parity [options] {} {}\n", underscore_to_hyphen!(&stringify!($subc)[4..]), subc_usages.join(" ")));
|
||||
}
|
||||
help.push_str(&subcommands_wrapper.fill(
|
||||
format!(
|
||||
"parity [options] {} {}\n",
|
||||
underscore_to_hyphen!(&stringify!($subc)[4..]),
|
||||
subc_usages.join(" ")
|
||||
).as_ref())
|
||||
);
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
||||
help.push_str("\n");
|
||||
|
||||
// Arguments and flags
|
||||
let args_wrapper = Wrapper::new(term_width).initial_indent(TAB_TAB).subsequent_indent(TAB_TAB);
|
||||
$(
|
||||
help.push_str("\n");
|
||||
help.push_str($group_name); help.push_str(":\n");
|
||||
|
||||
$(
|
||||
help.push_str(&format!("\t{}\n\t\t{}\n", $flag_usage, $flag_help));
|
||||
help.push_str(&format!("{}{}\n{}\n",
|
||||
TAB, $flag_usage,
|
||||
args_wrapper.fill($flag_help)
|
||||
));
|
||||
help.push_str("\n");
|
||||
)*
|
||||
|
||||
$(
|
||||
@ -429,10 +458,24 @@ macro_rules! usage {
|
||||
if_option_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN {
|
||||
help.push_str(&format!("\t{}\n\t\t{} (default: {:?})\n", $arg_usage, $arg_help, {let x : inner_option_type!($($arg_type_tt)+)> = $arg_default; x}))
|
||||
help.push_str(&format!("{}{}\n{}\n",
|
||||
TAB, $arg_usage,
|
||||
args_wrapper.fill(format!(
|
||||
"{} (default: {:?})",
|
||||
$arg_help,
|
||||
{let x : inner_option_type!($($arg_type_tt)+)> = $arg_default; x}
|
||||
).as_ref())
|
||||
))
|
||||
}
|
||||
ELSE {
|
||||
help.push_str(&format!("\t{}\n\t\t{}{}\n", $arg_usage, $arg_help, $arg_default.map(|x: inner_option_type!($($arg_type_tt)+)| format!(" (default: {})",x)).unwrap_or("".to_owned())))
|
||||
help.push_str(&format!("{}{}\n{}\n",
|
||||
TAB, $arg_usage,
|
||||
args_wrapper.fill(format!(
|
||||
"{}{}",
|
||||
$arg_help,
|
||||
$arg_default.map(|x: inner_option_type!($($arg_type_tt)+)| format!(" (default: {})",x)).unwrap_or("".to_owned())
|
||||
).as_ref())
|
||||
))
|
||||
}
|
||||
)
|
||||
}
|
||||
@ -440,14 +483,27 @@ macro_rules! usage {
|
||||
if_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN {
|
||||
help.push_str(&format!("\t{}\n\t\t{} (default: {:?})\n", $arg_usage, $arg_help, {let x : $($arg_type_tt)+ = $arg_default; x}))
|
||||
help.push_str(&format!("{}{}\n{}\n", TAB, $arg_usage,
|
||||
args_wrapper.fill(format!(
|
||||
"{} (default: {:?})",
|
||||
$arg_help,
|
||||
{let x : $($arg_type_tt)+ = $arg_default; x}
|
||||
).as_ref())
|
||||
))
|
||||
}
|
||||
ELSE {
|
||||
help.push_str(&format!("\t{}\n\t\t{} (default: {})\n", $arg_usage, $arg_help, $arg_default))
|
||||
help.push_str(&format!("{}{}\n{}\n", TAB, $arg_usage,
|
||||
args_wrapper.fill(format!(
|
||||
"{} (default: {})",
|
||||
$arg_help,
|
||||
$arg_default
|
||||
).as_ref())
|
||||
))
|
||||
}
|
||||
)
|
||||
}
|
||||
);
|
||||
help.push_str("\n");
|
||||
)*
|
||||
|
||||
)*
|
||||
@ -503,36 +559,6 @@ macro_rules! usage {
|
||||
args
|
||||
}
|
||||
|
||||
pub fn hydrate_with_globals(self: &mut Self, matches: &ClapArgMatches) -> Result<(), ClapError> {
|
||||
$(
|
||||
$(
|
||||
self.$flag = self.$flag || matches.is_present(stringify!($flag));
|
||||
)*
|
||||
$(
|
||||
if let some @ Some(_) = return_if_parse_error!(if_option!(
|
||||
$($arg_type_tt)+,
|
||||
THEN {
|
||||
if_option_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN { values_t!(matches, stringify!($arg), inner_option_vec_type!($($arg_type_tt)+)) }
|
||||
ELSE { value_t!(matches, stringify!($arg), inner_option_type!($($arg_type_tt)+)) }
|
||||
)
|
||||
}
|
||||
ELSE {
|
||||
if_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN { values_t!(matches, stringify!($arg), inner_vec_type!($($arg_type_tt)+)) }
|
||||
ELSE { value_t!(matches, stringify!($arg), $($arg_type_tt)+) }
|
||||
)
|
||||
}
|
||||
)) {
|
||||
self.$arg = some;
|
||||
}
|
||||
)*
|
||||
)*
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(unused_variables)] // the submatches of arg-less subcommands aren't used
|
||||
pub fn parse<S: AsRef<str>>(command: &[S]) -> Result<Self, ClapError> {
|
||||
|
||||
@ -582,21 +608,31 @@ macro_rules! usage {
|
||||
let matches = App::new("Parity")
|
||||
.global_setting(AppSettings::VersionlessSubcommands)
|
||||
.global_setting(AppSettings::DisableHelpSubcommand)
|
||||
.max_term_width(MAX_TERM_WIDTH)
|
||||
.help(Args::print_help().as_ref())
|
||||
.args(&usages.iter().map(|u| Arg::from_usage(u).use_delimiter(false).allow_hyphen_values(true)).collect::<Vec<Arg>>())
|
||||
.args(&usages.iter().map(|u| {
|
||||
let mut arg = Arg::from_usage(u)
|
||||
.allow_hyphen_values(true) // Allow for example --allow-ips -10.0.0.0/8
|
||||
.global(true) // Argument doesn't have to come before the first subcommand
|
||||
.hidden(true); // Hide global arguments from the (subcommand) help messages generated by Clap
|
||||
|
||||
if arg.is_set(ArgSettings::Multiple) {
|
||||
arg = arg.require_delimiter(true); // Multiple values can only be separated by commas, not spaces (#7428)
|
||||
}
|
||||
|
||||
arg
|
||||
}).collect::<Vec<Arg>>())
|
||||
$(
|
||||
.subcommand(
|
||||
SubCommand::with_name(&underscore_to_hyphen!(&stringify!($subc)[4..]))
|
||||
.about($subc_help)
|
||||
.args(&subc_usages.get(stringify!($subc)).unwrap().iter().map(|u| Arg::from_usage(u).use_delimiter(false).allow_hyphen_values(true)).collect::<Vec<Arg>>())
|
||||
.args(&usages.iter().map(|u| Arg::from_usage(u).use_delimiter(false).allow_hyphen_values(true)).collect::<Vec<Arg>>()) // accept global arguments at this position
|
||||
$(
|
||||
.setting(AppSettings::SubcommandRequired) // prevent from running `parity account`
|
||||
.subcommand(
|
||||
SubCommand::with_name(&underscore_to_hyphen!(&stringify!($subc_subc)[stringify!($subc).len()+1..]))
|
||||
.about($subc_subc_help)
|
||||
.args(&subc_usages.get(stringify!($subc_subc)).unwrap().iter().map(|u| Arg::from_usage(u).use_delimiter(false).allow_hyphen_values(true)).collect::<Vec<Arg>>())
|
||||
.args(&usages.iter().map(|u| Arg::from_usage(u).use_delimiter(false).allow_hyphen_values(true)).collect::<Vec<Arg>>()) // accept global arguments at this position
|
||||
)
|
||||
)*
|
||||
)
|
||||
@ -605,15 +641,39 @@ macro_rules! usage {
|
||||
|
||||
let mut raw_args : RawArgs = Default::default();
|
||||
|
||||
raw_args.hydrate_with_globals(&matches)?;
|
||||
// Globals
|
||||
$(
|
||||
$(
|
||||
raw_args.$flag = raw_args.$flag || matches.is_present(stringify!($flag));
|
||||
)*
|
||||
$(
|
||||
if let some @ Some(_) = return_if_parse_error!(if_option!(
|
||||
$($arg_type_tt)+,
|
||||
THEN {
|
||||
if_option_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN { values_t!(matches, stringify!($arg), inner_option_vec_type!($($arg_type_tt)+)) }
|
||||
ELSE { value_t!(matches, stringify!($arg), inner_option_type!($($arg_type_tt)+)) }
|
||||
)
|
||||
}
|
||||
ELSE {
|
||||
if_vec!(
|
||||
$($arg_type_tt)+,
|
||||
THEN { values_t!(matches, stringify!($arg), inner_vec_type!($($arg_type_tt)+)) }
|
||||
ELSE { value_t!(matches, stringify!($arg), $($arg_type_tt)+) }
|
||||
)
|
||||
}
|
||||
)) {
|
||||
raw_args.$arg = some;
|
||||
}
|
||||
)*
|
||||
)*
|
||||
|
||||
// Subcommands
|
||||
$(
|
||||
if let Some(submatches) = matches.subcommand_matches(&underscore_to_hyphen!(&stringify!($subc)[4..])) {
|
||||
raw_args.$subc = true;
|
||||
|
||||
// Globals
|
||||
raw_args.hydrate_with_globals(&submatches)?;
|
||||
// Subcommand flags
|
||||
$(
|
||||
raw_args.$subc_flag = submatches.is_present(&stringify!($subc_flag));
|
||||
@ -643,8 +703,6 @@ macro_rules! usage {
|
||||
if let Some(subsubmatches) = submatches.subcommand_matches(&underscore_to_hyphen!(&stringify!($subc_subc)[stringify!($subc).len()+1..])) {
|
||||
raw_args.$subc_subc = true;
|
||||
|
||||
// Globals
|
||||
raw_args.hydrate_with_globals(&subsubmatches)?;
|
||||
// Sub-subcommand flags
|
||||
$(
|
||||
raw_args.$subc_subc_flag = subsubmatches.is_present(&stringify!($subc_subc_flag));
|
||||
|
@ -1,3 +1,3 @@
|
||||
Parity. Ethereum Client.
|
||||
By Wood/Paronyan/Kotewicz/Drwięga/Volf et al.
|
||||
Copyright 2015, 2016, 2017 Parity Technologies (UK) Ltd
|
||||
Copyright 2015, 2016, 2017, 2018 Parity Technologies (UK) Ltd
|
||||
|
@ -1,6 +1,6 @@
|
||||
Parity
|
||||
version {}
|
||||
Copyright 2015, 2016, 2017 Parity Technologies (UK) Ltd
|
||||
Copyright 2015, 2016, 2017, 2018 Parity Technologies (UK) Ltd
|
||||
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
|
||||
This is free software: you are free to change and redistribute it.
|
||||
There is NO WARRANTY, to the extent permitted by law.
|
||||
|
@ -586,7 +586,12 @@ impl Configuration {
|
||||
let mut extra_embed = dev_ui.clone();
|
||||
match self.ui_hosts() {
|
||||
// In case host validation is disabled allow all frame ancestors
|
||||
None => extra_embed.push(("*".to_owned(), ui_port)),
|
||||
None => {
|
||||
// NOTE Chrome does not seem to support "*:<port>"
|
||||
// we use `http(s)://*:<port>` instead.
|
||||
extra_embed.push(("http://*".to_owned(), ui_port));
|
||||
extra_embed.push(("https://*".to_owned(), ui_port));
|
||||
},
|
||||
Some(hosts) => extra_embed.extend(hosts.into_iter().filter_map(|host| {
|
||||
let mut it = host.split(":");
|
||||
let host = it.next();
|
||||
|
@ -16,6 +16,8 @@
|
||||
|
||||
use std::fmt;
|
||||
use std::sync::{Arc, Weak};
|
||||
use std::time::{Duration, Instant};
|
||||
use std::thread;
|
||||
use std::net::{TcpListener};
|
||||
|
||||
use ctrlc::CtrlC;
|
||||
@ -171,8 +173,10 @@ impl ::local_store::NodeInfo for FullNodeInfo {
|
||||
}
|
||||
}
|
||||
|
||||
type LightClient = ::light::client::Client<::light_helpers::EpochFetch>;
|
||||
|
||||
// helper for light execution.
|
||||
fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<(bool, Option<String>), String> {
|
||||
fn execute_light_impl(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<((bool, Option<String>), Weak<LightClient>), String> {
|
||||
use light::client as light_client;
|
||||
use ethsync::{LightSyncParams, LightSync, ManageNetwork};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
@ -237,8 +241,9 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
|
||||
let service = light_client::Service::start(config, &spec, fetch, &db_dirs.client_path(algorithm), cache.clone())
|
||||
.map_err(|e| format!("Error starting light client: {}", e))?;
|
||||
let client = service.client();
|
||||
let txq = Arc::new(RwLock::new(::light::transaction_queue::TransactionQueue::default()));
|
||||
let provider = ::light::provider::LightProvider::new(service.client().clone(), txq.clone());
|
||||
let provider = ::light::provider::LightProvider::new(client.clone(), txq.clone());
|
||||
|
||||
// start network.
|
||||
// set up bootnodes
|
||||
@ -275,7 +280,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
|
||||
// queue cull service.
|
||||
let queue_cull = Arc::new(::light_helpers::QueueCull {
|
||||
client: service.client().clone(),
|
||||
client: client.clone(),
|
||||
sync: light_sync.clone(),
|
||||
on_demand: on_demand.clone(),
|
||||
txq: txq.clone(),
|
||||
@ -299,7 +304,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
let signer_service = Arc::new(signer::new_service(&cmd.ws_conf, &cmd.ui_conf, &cmd.logger_config));
|
||||
let (node_health, dapps_deps) = {
|
||||
let contract_client = Arc::new(::dapps::LightRegistrar {
|
||||
client: service.client().clone(),
|
||||
client: client.clone(),
|
||||
sync: light_sync.clone(),
|
||||
on_demand: on_demand.clone(),
|
||||
});
|
||||
@ -342,7 +347,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
let dapps_service = dapps::service(&dapps_middleware);
|
||||
let deps_for_rpc_apis = Arc::new(rpc_apis::LightDependencies {
|
||||
signer_service: signer_service,
|
||||
client: service.client().clone(),
|
||||
client: client.clone(),
|
||||
sync: light_sync.clone(),
|
||||
net: light_sync.clone(),
|
||||
health: node_health,
|
||||
@ -382,7 +387,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
// the informant
|
||||
let informant = Arc::new(Informant::new(
|
||||
LightNodeInformantData {
|
||||
client: service.client().clone(),
|
||||
client: client.clone(),
|
||||
sync: light_sync.clone(),
|
||||
cache: cache,
|
||||
},
|
||||
@ -397,26 +402,13 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
||||
let res = wait_for_exit(None, None, can_restart);
|
||||
informant.shutdown();
|
||||
|
||||
Ok(res)
|
||||
// Create a weak reference to the client so that we can wait on shutdown until it is dropped
|
||||
let weak_client = Arc::downgrade(&client);
|
||||
|
||||
Ok((res, weak_client))
|
||||
}
|
||||
|
||||
pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<(bool, Option<String>), String> {
|
||||
if cmd.ui && cmd.dapps_conf.enabled {
|
||||
// Check if Parity is already running
|
||||
let addr = format!("{}:{}", cmd.ui_conf.interface, cmd.ui_conf.port);
|
||||
if !TcpListener::bind(&addr as &str).is_ok() {
|
||||
return open_ui(&cmd.ws_conf, &cmd.ui_conf, &cmd.logger_config).map(|_| (false, None));
|
||||
}
|
||||
}
|
||||
|
||||
// increase max number of open files
|
||||
raise_fd_limit();
|
||||
|
||||
// run as light client.
|
||||
if cmd.light {
|
||||
return execute_light(cmd, can_restart, logger);
|
||||
}
|
||||
|
||||
pub fn execute_impl(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<((bool, Option<String>), Weak<Client>), String> {
|
||||
// load spec
|
||||
let spec = cmd.spec.spec(&cmd.dirs.cache)?;
|
||||
|
||||
@ -855,6 +847,9 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
||||
open_dapp(&cmd.dapps_conf, &cmd.http_conf, &dapp)?;
|
||||
}
|
||||
|
||||
// Create a weak reference to the client so that we can wait on shutdown until it is dropped
|
||||
let weak_client = Arc::downgrade(&client);
|
||||
|
||||
// Handle exit
|
||||
let restart = wait_for_exit(Some(updater), Some(client), can_restart);
|
||||
|
||||
@ -868,7 +863,33 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
||||
// just Arc is dropping here, to allow other reference release in its default time
|
||||
drop(informant);
|
||||
|
||||
Ok(restart)
|
||||
Ok((restart, weak_client))
|
||||
}
|
||||
|
||||
pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<(bool, Option<String>), String> {
|
||||
if cmd.ui && cmd.dapps_conf.enabled {
|
||||
// Check if Parity is already running
|
||||
let addr = format!("{}:{}", cmd.ui_conf.interface, cmd.ui_conf.port);
|
||||
if !TcpListener::bind(&addr as &str).is_ok() {
|
||||
return open_ui(&cmd.ws_conf, &cmd.ui_conf, &cmd.logger_config).map(|_| (false, None));
|
||||
}
|
||||
}
|
||||
|
||||
// increase max number of open files
|
||||
raise_fd_limit();
|
||||
|
||||
fn wait<T>(res: Result<((bool, Option<String>), Weak<T>), String>) -> Result<(bool, Option<String>), String> {
|
||||
res.map(|(restart, weak_client)| {
|
||||
wait_for_drop(weak_client);
|
||||
restart
|
||||
})
|
||||
}
|
||||
|
||||
if cmd.light {
|
||||
wait(execute_light_impl(cmd, can_restart, logger))
|
||||
} else {
|
||||
wait(execute_impl(cmd, can_restart, logger))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
@ -1001,3 +1022,27 @@ fn wait_for_exit(
|
||||
let _ = exit.1.wait(&mut l);
|
||||
l.clone()
|
||||
}
|
||||
|
||||
fn wait_for_drop<T>(w: Weak<T>) {
|
||||
let sleep_duration = Duration::from_secs(1);
|
||||
let warn_timeout = Duration::from_secs(60);
|
||||
let max_timeout = Duration::from_secs(300);
|
||||
|
||||
let instant = Instant::now();
|
||||
let mut warned = false;
|
||||
|
||||
while instant.elapsed() < max_timeout {
|
||||
if w.upgrade().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
if !warned && instant.elapsed() > warn_timeout {
|
||||
warned = true;
|
||||
warn!("Shutdown is taking longer than expected.");
|
||||
}
|
||||
|
||||
thread::sleep(sleep_duration);
|
||||
}
|
||||
|
||||
warn!("Shutdown timeout reached, exiting uncleanly.");
|
||||
}
|
||||
|
@ -147,19 +147,30 @@ impl LightFetch {
|
||||
Err(e) => return Box::new(future::err(e)),
|
||||
};
|
||||
|
||||
let maybe_future = self.sync.with_context(move |ctx| {
|
||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||
.expect("all back-references known to be valid; qed")
|
||||
.map(|res| extract_header(&res, header_ref)
|
||||
.expect("these responses correspond to requests that header_ref belongs to. \
|
||||
therefore it will not fail; qed"))
|
||||
.map_err(errors::on_demand_cancel))
|
||||
});
|
||||
|
||||
match maybe_future {
|
||||
Some(recv) => recv,
|
||||
None => Box::new(future::err(errors::network_disabled()))
|
||||
}
|
||||
self.send_requests(reqs, |res|
|
||||
extract_header(&res, header_ref)
|
||||
.expect("these responses correspond to requests that header_ref belongs to \
|
||||
therefore it will not fail; qed")
|
||||
)
|
||||
}
|
||||
|
||||
/// Helper for getting contract code at a given block.
|
||||
pub fn code(&self, address: Address, id: BlockId) -> BoxFuture<Vec<u8>> {
|
||||
let mut reqs = Vec::new();
|
||||
let header_ref = match self.make_header_requests(id, &mut reqs) {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Box::new(future::err(e)),
|
||||
};
|
||||
|
||||
reqs.push(request::Account { header: header_ref.clone(), address: address }.into());
|
||||
let account_idx = reqs.len() - 1;
|
||||
reqs.push(request::Code { header: header_ref, code_hash: Field::back_ref(account_idx, 0) }.into());
|
||||
|
||||
self.send_requests(reqs, |mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Code(code)) => code,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Helper for getting account info at a given block.
|
||||
@ -173,20 +184,10 @@ impl LightFetch {
|
||||
|
||||
reqs.push(request::Account { header: header_ref, address: address }.into());
|
||||
|
||||
let maybe_future = self.sync.with_context(move |ctx| {
|
||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||
.expect("all back-references known to be valid; qed")
|
||||
.map(|mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Account(acc)) => acc,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
.map_err(errors::on_demand_cancel))
|
||||
});
|
||||
|
||||
match maybe_future {
|
||||
Some(recv) => recv,
|
||||
None => Box::new(future::err(errors::network_disabled()))
|
||||
}
|
||||
self.send_requests(reqs, |mut res|match res.pop() {
|
||||
Some(OnDemandResponse::Account(acc)) => acc,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Helper for getting proved execution.
|
||||
@ -277,20 +278,10 @@ impl LightFetch {
|
||||
|
||||
reqs.push(request::Body(header_ref).into());
|
||||
|
||||
let maybe_future = self.sync.with_context(move |ctx| {
|
||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||
.expect(NO_INVALID_BACK_REFS)
|
||||
.map(|mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Body(b)) => b,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
.map_err(errors::on_demand_cancel))
|
||||
});
|
||||
|
||||
match maybe_future {
|
||||
Some(recv) => recv,
|
||||
None => Box::new(future::err(errors::network_disabled()))
|
||||
}
|
||||
self.send_requests(reqs, |mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Body(b)) => b,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the block receipts. Fails on unknown block ID.
|
||||
@ -303,20 +294,10 @@ impl LightFetch {
|
||||
|
||||
reqs.push(request::BlockReceipts(header_ref).into());
|
||||
|
||||
let maybe_future = self.sync.with_context(move |ctx| {
|
||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||
.expect(NO_INVALID_BACK_REFS)
|
||||
.map(|mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Receipts(b)) => b,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
.map_err(errors::on_demand_cancel))
|
||||
});
|
||||
|
||||
match maybe_future {
|
||||
Some(recv) => recv,
|
||||
None => Box::new(future::err(errors::network_disabled()))
|
||||
}
|
||||
self.send_requests(reqs, |mut res| match res.pop() {
|
||||
Some(OnDemandResponse::Receipts(b)) => b,
|
||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get transaction logs
|
||||
@ -433,6 +414,23 @@ impl LightFetch {
|
||||
Either::B(extract_transaction)
|
||||
}))
|
||||
}
|
||||
|
||||
fn send_requests<T, F>(&self, reqs: Vec<OnDemandRequest>, parse_response: F) -> BoxFuture<T> where
|
||||
F: FnOnce(Vec<OnDemandResponse>) -> T + Send + 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
let maybe_future = self.sync.with_context(move |ctx| {
|
||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||
.expect(NO_INVALID_BACK_REFS)
|
||||
.map(parse_response)
|
||||
.map_err(errors::on_demand_cancel))
|
||||
});
|
||||
|
||||
match maybe_future {
|
||||
Some(recv) => recv,
|
||||
None => Box::new(future::err(errors::network_disabled()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -349,8 +349,8 @@ impl<T: LightChainClient + 'static> Eth for EthClient<T> {
|
||||
}))
|
||||
}
|
||||
|
||||
fn code_at(&self, _address: RpcH160, _num: Trailing<BlockNumber>) -> BoxFuture<Bytes> {
|
||||
Box::new(future::err(errors::unimplemented(None)))
|
||||
fn code_at(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<Bytes> {
|
||||
Box::new(self.fetcher().code(address.into(), num.unwrap_or_default().into()).map(Into::into))
|
||||
}
|
||||
|
||||
fn send_raw_transaction(&self, raw: Bytes) -> Result<RpcH256> {
|
||||
|
@ -208,7 +208,12 @@ impl Parity for ParityClient {
|
||||
}
|
||||
|
||||
fn registry_address(&self) -> Result<Option<H160>> {
|
||||
Err(errors::light_unimplemented(None))
|
||||
let reg = self.light_dispatch.client.engine().params().registrar;
|
||||
if reg == Default::default() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(reg.into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn rpc_settings(&self) -> Result<RpcSettings> {
|
||||
|
@ -95,7 +95,7 @@ impl ParityAccounts for ParityAccountsClient {
|
||||
let store = self.account_provider()?;
|
||||
|
||||
store.import_presale(json.as_bytes(), &pass)
|
||||
.or_else(|_| store.import_wallet(json.as_bytes(), &pass))
|
||||
.or_else(|_| store.import_wallet(json.as_bytes(), &pass, true))
|
||||
.map(Into::into)
|
||||
.map_err(|e| errors::account("Could not create account.", e))
|
||||
}
|
||||
|
@ -480,7 +480,7 @@ fn should_export_account() {
|
||||
// given
|
||||
let tester = setup();
|
||||
let wallet = r#"{"id":"6a186c80-7797-cff2-bc2e-7c1d6a6cc76e","version":3,"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"a1c6ff99070f8032ca1c4e8add006373"},"ciphertext":"df27e3db64aa18d984b6439443f73660643c2d119a6f0fa2fa9a6456fc802d75","kdf":"pbkdf2","kdfparams":{"c":10240,"dklen":32,"prf":"hmac-sha256","salt":"ddc325335cda5567a1719313e73b4842511f3e4a837c9658eeb78e51ebe8c815"},"mac":"3dc888ae79cbb226ff9c455669f6cf2d79be72120f2298f6cb0d444fddc0aa3d"},"address":"0042e5d2a662eeaca8a7e828c174f98f35d8925b","name":"parity-export-test","meta":"{\"passwordHint\":\"parity-export-test\",\"timestamp\":1490017814987}"}"#;
|
||||
tester.accounts.import_wallet(wallet.as_bytes(), "parity-export-test").unwrap();
|
||||
tester.accounts.import_wallet(wallet.as_bytes(), "parity-export-test", false).unwrap();
|
||||
let accounts = tester.accounts.accounts().unwrap();
|
||||
assert_eq!(accounts.len(), 1);
|
||||
|
||||
@ -501,6 +501,26 @@ fn should_export_account() {
|
||||
assert_eq!(result, Some(response.into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_import_wallet() {
|
||||
let tester = setup();
|
||||
|
||||
let id = "6a186c80-7797-cff2-bc2e-7c1d6a6cc76e";
|
||||
let request = r#"{"jsonrpc":"2.0","method":"parity_newAccountFromWallet","params":["{\"id\":\"<ID>\",\"version\":3,\"crypto\":{\"cipher\":\"aes-128-ctr\",\"cipherparams\":{\"iv\":\"478736fb55872c1baf01b27b1998c90b\"},\"ciphertext\":\"fe5a63cc0055d7b0b3b57886f930ad9b63f48950d1348145d95996c41e05f4e0\",\"kdf\":\"pbkdf2\",\"kdfparams\":{\"c\":10240,\"dklen\":32,\"prf\":\"hmac-sha256\",\"salt\":\"658436d6738a19731149a98744e5cf02c8d5aa1f8e80c1a43cc9351c70a984e4\"},\"mac\":\"c7384b26ecf25539d942030230062af9b69de5766cbcc4690bffce1536644631\"},\"address\":\"00bac56a8a27232baa044c03f43bf3648c961735\",\"name\":\"hello world\",\"meta\":\"{}\"}", "himom"],"id":1}"#;
|
||||
let request = request.replace("<ID>", id);
|
||||
let response = r#"{"jsonrpc":"2.0","result":"0x00bac56a8a27232baa044c03f43bf3648c961735","id":1}"#;
|
||||
|
||||
let res = tester.io.handle_request_sync(&request).unwrap();
|
||||
|
||||
assert_eq!(res, response);
|
||||
|
||||
let account_meta = tester.accounts.account_meta("0x00bac56a8a27232baa044c03f43bf3648c961735".into()).unwrap();
|
||||
let account_uuid: String = account_meta.uuid.unwrap().into();
|
||||
|
||||
// the RPC should import the account with a new id
|
||||
assert!(account_uuid != id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_sign_message() {
|
||||
let tester = setup();
|
||||
|
@ -1,9 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -e # fail on any error
|
||||
set -u # treat unset variables as error
|
||||
|
||||
cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
git clone https://github.com/paritytech/parity-import-tests
|
||||
cp target/release/parity parity-import-tests/aura/parity
|
||||
cd parity-import-tests/aura
|
||||
echo "Start Aura test"
|
||||
parity import blocks.rlp --chain chain.json
|
||||
parity restore snap --chain chain.json
|
||||
./parity import blocks.rlp --chain chain.json
|
||||
./parity restore snap --chain chain.json
|
||||
echo "Aura test complete"
|
||||
|
@ -12,42 +12,21 @@
|
||||
### Running coverage
|
||||
|
||||
set -x
|
||||
|
||||
KCOV=${1:-kcov}
|
||||
|
||||
if ! type $KCOV > /dev/null; then
|
||||
echo "Install kcov first (details inside this file). Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RUSTFLAGS="-C link-dead-code" cargo test --all --exclude parity-ipfs-api --exclude evmjit --no-run || exit $?
|
||||
|
||||
|
||||
RUSTFLAGS="-C link-dead-code" cargo test --all --exclude evmjit --no-run || exit $?
|
||||
KCOV_TARGET="target/cov"
|
||||
KCOV_FLAGS="--verify"
|
||||
EXCLUDE="/usr/lib,\
|
||||
/usr/include,\
|
||||
$HOME/.cargo,\
|
||||
$HOME/.multirust,\
|
||||
rocksdb,\
|
||||
secp256k1
|
||||
"
|
||||
|
||||
rm -rf $KCOV_TARGET
|
||||
EXCLUDE="/usr/lib,/usr/include,$HOME/.cargo,$HOME/.multirust,rocksdb,secp256k1"
|
||||
mkdir -p $KCOV_TARGET
|
||||
echo "Cover RUST"
|
||||
for FILE in `find target/debug/deps ! -name "*.*"`
|
||||
do
|
||||
$KCOV --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET $FILE
|
||||
done
|
||||
|
||||
$KCOV --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET target/debug/parity-*
|
||||
do
|
||||
timeout --signal=SIGKILL 5m kcov --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET $FILE
|
||||
done
|
||||
timeout --signal=SIGKILL 5m kcov --exclude-pattern $EXCLUDE $KCOV_FLAGS $KCOV_TARGET target/debug/parity-*
|
||||
echo "Cover JS"
|
||||
cd js
|
||||
npm install&&npm run test:coverage
|
||||
cd ..
|
||||
codecov
|
||||
bash <(curl -s https://codecov.io/bash)&&
|
||||
echo "Uploaded code coverage"
|
||||
|
||||
exit 0
|
||||
|
350
scripts/gitlab-build.sh
Executable file
@ -0,0 +1,350 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e # fail on any error
|
||||
set -u # treat unset variables as error
|
||||
#ARGUMENTS: 1. BUILD_PLATFORM (target for binaries) 2. PLATFORM (target for cargo) 3. ARC (architecture) 4. & 5. CC & CXX flags 6. binary identifier
|
||||
BUILD_PLATFORM=$1
|
||||
PLATFORM=$2
|
||||
ARC=$3
|
||||
CC=$4
|
||||
CXX=$5
|
||||
IDENT=$6
|
||||
VER="$(grep -m 1 "version =" Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")"
|
||||
S3WIN=""
|
||||
echo "--------------------"
|
||||
echo "Build for platform: " $BUILD_PLATFORM
|
||||
echo "Build identifier: " $IDENT
|
||||
echo "Cargo target: " $PLATFORM
|
||||
echo "CC&CXX flags: " $CC ", " $CXX
|
||||
echo "Architecture: " $ARC
|
||||
echo "Libssl version: " $LIBSSL
|
||||
echo "Parity version: " $VER
|
||||
echo "Branch: " $CI_BUILD_REF_NAME
|
||||
echo "--------------------"
|
||||
|
||||
echo "Rhash version:"
|
||||
# NOTE for md5 and sha256 we want to display filename as well
|
||||
# hence we use --* instead of -p *
|
||||
MD5_BIN="rhash --md5"
|
||||
SHA256_BIN="rhash --sha256"
|
||||
# NOTE For SHA3 we need only hash (hence -p)
|
||||
SHA3_BIN="rhash -p %{sha3-256}"
|
||||
|
||||
set_env () {
|
||||
echo "Set ENVIROMENT"
|
||||
export HOST_CC=gcc
|
||||
export HOST_CXX=g++
|
||||
rm -rf .cargo
|
||||
mkdir -p .cargo
|
||||
echo "[target.$PLATFORM]" >> .cargo/config
|
||||
echo "linker= \"$CC\"" >> .cargo/config
|
||||
cat .cargo/config
|
||||
}
|
||||
set_env_win () {
|
||||
set PLATFORM=x86_64-pc-windows-msvc
|
||||
set INCLUDE="C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt"
|
||||
set LIB="C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64"
|
||||
set RUST_BACKTRACE=1
|
||||
#export RUSTFLAGS=$RUSTFLAGS
|
||||
rustup default stable-x86_64-pc-windows-msvc
|
||||
echo "MsBuild.exe windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release" > msbuild.cmd
|
||||
echo "@ signtool sign /f "\%"1 /p "\%"2 /tr http://timestamp.comodoca.com /du https://parity.io "\%"3" > sign.cmd
|
||||
}
|
||||
build () {
|
||||
echo "Build parity:"
|
||||
cargo build --target $PLATFORM --features final --release
|
||||
echo "Build evmbin:"
|
||||
cargo build --target $PLATFORM --release -p evmbin
|
||||
echo "Build ethstore-cli:"
|
||||
cargo build --target $PLATFORM --release -p ethstore-cli
|
||||
echo "Build ethkey-cli:"
|
||||
cargo build --target $PLATFORM --release -p ethkey-cli
|
||||
}
|
||||
strip_binaries () {
|
||||
echo "Strip binaries:"
|
||||
$STRIP_BIN -v target/$PLATFORM/release/parity
|
||||
$STRIP_BIN -v target/$PLATFORM/release/parity-evm
|
||||
$STRIP_BIN -v target/$PLATFORM/release/ethstore
|
||||
$STRIP_BIN -v target/$PLATFORM/release/ethkey;
|
||||
}
|
||||
calculate_checksums () {
|
||||
echo "Checksum calculation:"
|
||||
rhash --version
|
||||
rm -rf *.md5
|
||||
rm -rf *.sha256
|
||||
|
||||
export SHA3="$($SHA3_BIN target/$PLATFORM/release/parity$S3WIN)"
|
||||
# NOTE rhash 1.3.1 doesnt support keccak, workaround
|
||||
if [ "$SHA3" == "%{sha3-256}" ]; then
|
||||
export SHA3="$(target/$PLATFORM/release/parity$S3WIN tools hash target/$PLATFORM/release/parity$S3WIN)"
|
||||
fi
|
||||
|
||||
echo "Parity file SHA3: $SHA3"
|
||||
$MD5_BIN target/$PLATFORM/release/parity$S3WIN > parity$S3WIN.md5
|
||||
$SHA256_BIN target/$PLATFORM/release/parity$S3WIN > parity$S3WIN.sha256
|
||||
$MD5_BIN target/$PLATFORM/release/parity-evm$S3WIN > parity-evm$S3WIN.md5
|
||||
$SHA256_BIN target/$PLATFORM/release/parity-evm$S3WIN > parity-evm$S3WIN.sha256
|
||||
$MD5_BIN target/$PLATFORM/release/ethstore$S3WIN > ethstore$S3WIN.md5
|
||||
$SHA256_BIN target/$PLATFORM/release/ethstore$S3WIN > ethstore$S3WIN.sha256
|
||||
$MD5_BIN target/$PLATFORM/release/ethkey$S3WIN > ethkey$S3WIN.md5
|
||||
$SHA256_BIN target/$PLATFORM/release/ethkey$S3WIN > ethkey$S3WIN.sha256
|
||||
}
|
||||
make_deb () {
|
||||
rm -rf deb
|
||||
echo "create DEBIAN files"
|
||||
mkdir -p deb/usr/bin/
|
||||
mkdir -p deb/DEBIAN
|
||||
echo "create copyright, docs, compat"
|
||||
cp LICENSE deb/DEBIAN/copyright
|
||||
echo "https://github.com/paritytech/parity/wiki" >> deb/DEBIAN/docs
|
||||
echo "8" >> deb/DEBIAN/compat
|
||||
echo "create control file"
|
||||
control=deb/DEBIAN/control
|
||||
echo "Package: parity" >> $control
|
||||
echo "Version: $VER" >> $control
|
||||
echo "Source: parity" >> $control
|
||||
echo "Section: science" >> $control
|
||||
echo "Priority: extra" >> $control
|
||||
echo "Maintainer: Parity Technologies <devops@parity.io>" >> $control
|
||||
echo "Build-Depends: debhelper (>=9)" >> $control
|
||||
echo "Standards-Version: 3.9.5" >> $control
|
||||
echo "Homepage: https://parity.io" >> $control
|
||||
echo "Vcs-Git: git://github.com/paritytech/parity.git" >> $control
|
||||
echo "Vcs-Browser: https://github.com/paritytech/parity" >> $control
|
||||
echo "Architecture: $ARC" >> $control
|
||||
echo "Depends: $LIBSSL" >> $control
|
||||
echo "Description: Ethereum network client by Parity Technologies" >> $control
|
||||
size=`du deb/|awk 'END {print $1}'`
|
||||
echo "Installed-Size: $size" >> $control
|
||||
echo "build .deb package"
|
||||
cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
dpkg-deb -b deb "parity_"$VER"_"$IDENT"_"$ARC".deb"
|
||||
$MD5_BIN "parity_"$VER"_"$IDENT"_"$ARC".deb" > "parity_"$VER"_"$IDENT"_"$ARC".deb.md5"
|
||||
$SHA256_BIN "parity_"$VER"_"$IDENT"_"$ARC".deb" > "parity_"$VER"_"$IDENT"_"$ARC".deb.sha256"
|
||||
}
|
||||
make_rpm () {
|
||||
rm -rf /install
|
||||
mkdir -p /install/usr/bin
|
||||
cp target/$PLATFORM/release/parity /install/usr/bin
|
||||
cp target/$PLATFORM/release/parity-evm /install/usr/bin/parity-evm
|
||||
cp target/$PLATFORM/release/ethstore /install/usr/bin/ethstore
|
||||
cp target/$PLATFORM/release/ethkey /install/usr/bin/ethkey
|
||||
|
||||
rm -rf "parity-"$VER"-1."$ARC".rpm" || true
|
||||
fpm -s dir -t rpm -n parity -v $VER --epoch 1 --license GPLv3 -d openssl --provides parity --url https://parity.io --vendor "Parity Technologies" -a x86_64 -m "<devops@parity.io>" --description "Ethereum network client by Parity Technologies" -C /install/
|
||||
cp "parity-"$VER"-1."$ARC".rpm" "parity_"$VER"_"$IDENT"_"$ARC".rpm"
|
||||
$MD5_BIN "parity_"$VER"_"$IDENT"_"$ARC".rpm" > "parity_"$VER"_"$IDENT"_"$ARC".rpm.md5"
|
||||
$SHA256_BIN "parity_"$VER"_"$IDENT"_"$ARC".rpm" > "parity_"$VER"_"$IDENT"_"$ARC".rpm.sha256"
|
||||
}
|
||||
make_pkg () {
|
||||
echo "make PKG"
|
||||
cp target/$PLATFORM/release/parity target/release/parity
|
||||
cp target/$PLATFORM/release/parity-evm target/release/parity-evm
|
||||
cp target/$PLATFORM/release/ethstore target/release/ethstore
|
||||
cp target/$PLATFORM/release/ethkey target/release/ethkey
|
||||
cd mac
|
||||
xcodebuild -configuration Release
|
||||
cd ..
|
||||
packagesbuild -v mac/Parity.pkgproj
|
||||
productsign --sign 'Developer ID Installer: PARITY TECHNOLOGIES LIMITED (P2PX3JU8FT)' target/release/Parity\ Ethereum.pkg target/release/Parity\ Ethereum-signed.pkg
|
||||
mv target/release/Parity\ Ethereum-signed.pkg "parity_"$VER"_"$IDENT"_"$ARC".pkg"
|
||||
$MD5_BIN "parity_"$VER"_"$IDENT"_"$ARC"."$EXT >> "parity_"$VER"_"$IDENT"_"$ARC".pkg.md5"
|
||||
$SHA256_BIN "parity_"$VER"_"$IDENT"_"$ARC"."$EXT >> "parity_"$VER"_"$IDENT"_"$ARC".pkg.sha256"
|
||||
}
|
||||
sign_exe () {
|
||||
./sign.cmd $keyfile $certpass "target/$PLATFORM/release/parity.exe"
|
||||
}
|
||||
make_exe () {
|
||||
./msbuild.cmd
|
||||
./sign.cmd $keyfile $certpass windows/ptray/x64/release/ptray.exe
|
||||
cd nsis
|
||||
curl -sL --url "https://github.com/paritytech/win-build/raw/master/vc_redist.x64.exe" -o vc_redist.x64.exe
|
||||
echo "makensis.exe installer.nsi" > nsis.cmd
|
||||
./nsis.cmd
|
||||
cd ..
|
||||
cp nsis/installer.exe "parity_"$VER"_"$IDENT"_"$ARC"."$EXT
|
||||
./sign.cmd $keyfile $certpass "parity_"$VER"_"$IDENT"_"$ARC"."$EXT
|
||||
$MD5_BIN "parity_"$VER"_"$IDENT"_"$ARC"."$EXT -p %h > "parity_"$VER"_"$IDENT"_"$ARC"."$EXT".md5"
|
||||
$SHA256_BIN "parity_"$VER"_"$IDENT"_"$ARC"."$EXT -p %h > "parity_"$VER"_"$IDENT"_"$ARC"."$EXT".sha256"
|
||||
}
|
||||
push_binaries () {
|
||||
echo "Push binaries to AWS S3"
|
||||
aws configure set aws_access_key_id $s3_key
|
||||
aws configure set aws_secret_access_key $s3_secret
|
||||
if [[ "$CI_BUILD_REF_NAME" = "master" || "$CI_BUILD_REF_NAME" = "beta" || "$CI_BUILD_REF_NAME" = "stable" || "$CI_BUILD_REF_NAME" = "nightly" ]];
|
||||
then
|
||||
export S3_BUCKET=builds-parity-published;
|
||||
else
|
||||
export S3_BUCKET=builds-parity;
|
||||
fi
|
||||
aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$BUILD_PLATFORM
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity$S3WIN --body target/$PLATFORM/release/parity$S3WIN
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity$S3WIN.md5 --body parity$S3WIN.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity$S3WIN.sha256 --body parity$S3WIN.sha256
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity-evm$S3WIN --body target/$PLATFORM/release/parity-evm$S3WIN
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity-evm$S3WIN.md5 --body parity-evm$S3WIN.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/parity-evm$S3WIN.sha256 --body parity-evm$S3WIN.sha256
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethstore$S3WIN --body target/$PLATFORM/release/ethstore$S3WIN
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethstore$S3WIN.md5 --body ethstore$S3WIN.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethstore$S3WIN.sha256 --body ethstore$S3WIN.sha256
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethkey$S3WIN --body target/$PLATFORM/release/ethkey$S3WIN
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethkey$S3WIN.md5 --body ethkey$S3WIN.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/ethkey$S3WIN.sha256 --body ethkey$S3WIN.sha256
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/"parity_"$VER"_"$IDENT"_"$ARC"."$EXT --body "parity_"$VER"_"$IDENT"_"$ARC"."$EXT
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/"parity_"$VER"_"$IDENT"_"$ARC"."$EXT".md5" --body "parity_"$VER"_"$IDENT"_"$ARC"."$EXT".md5"
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$BUILD_PLATFORM/"parity_"$VER"_"$IDENT"_"$ARC"."$EXT".sha256" --body "parity_"$VER"_"$IDENT"_"$ARC"."$EXT".sha256"
|
||||
}
|
||||
make_archive () {
|
||||
echo "add artifacts to archive"
|
||||
rm -rf parity.zip
|
||||
zip -r parity.zip target/$PLATFORM/release/parity$S3WIN target/$PLATFORM/release/parity-evm$S3WIN target/$PLATFORM/release/ethstore$S3WIN target/$PLATFORM/release/ethkey$S3WIN parity$S3WIN.md5 parity-evm$S3WIN.md5 ethstore$S3WIN.md5 ethkey$S3WIN.md5 parity$S3WIN.sha256 parity-evm$S3WIN.sha256 ethstore$S3WIN.sha256 ethkey$S3WIN.sha256
|
||||
}
|
||||
|
||||
updater_push_release () {
|
||||
echo "push release"
|
||||
|
||||
DATA="commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity$S3WIN&secret=$RELEASES_SECRET"
|
||||
# Mainnet
|
||||
source scripts/safe_curl.sh $DATA "http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$BUILD_PLATFORM"
|
||||
# Kovan
|
||||
source scripts/safe_curl.sh $DATA "http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$BUILD_PLATFORM"
|
||||
}
|
||||
|
||||
case $BUILD_PLATFORM in
|
||||
x86_64-unknown-linux-gnu)
|
||||
#set strip bin
|
||||
STRIP_BIN="strip"
|
||||
#package extention
|
||||
EXT="deb"
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
updater_push_release
|
||||
;;
|
||||
x86_64-unknown-debian-gnu)
|
||||
STRIP_BIN="strip"
|
||||
EXT="deb"
|
||||
LIBSSL="libssl1.1 (>=1.1.0)"
|
||||
echo "Use libssl1.1 (>=1.1.0) for Debian builds"
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
x86_64-unknown-centos-gnu)
|
||||
STRIP_BIN="strip"
|
||||
EXT="rpm"
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_rpm
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
i686-unknown-linux-gnu)
|
||||
STRIP_BIN="strip"
|
||||
EXT="deb"
|
||||
set_env
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
armv7-unknown-linux-gnueabihf)
|
||||
STRIP_BIN="arm-linux-gnueabihf-strip"
|
||||
EXT="deb"
|
||||
set_env
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
arm-unknown-linux-gnueabihf)
|
||||
STRIP_BIN="arm-linux-gnueabihf-strip"
|
||||
EXT="deb"
|
||||
set_env
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
aarch64-unknown-linux-gnu)
|
||||
STRIP_BIN="aarch64-linux-gnu-strip"
|
||||
EXT="deb"
|
||||
set_env
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_deb
|
||||
make_archive
|
||||
push_binaries
|
||||
;;
|
||||
x86_64-apple-darwin)
|
||||
STRIP_BIN="strip"
|
||||
PLATFORM="x86_64-apple-darwin"
|
||||
EXT="pkg"
|
||||
build
|
||||
strip_binaries
|
||||
calculate_checksums
|
||||
make_pkg
|
||||
make_archive
|
||||
push_binaries
|
||||
updater_push_release
|
||||
;;
|
||||
x86_64-unknown-snap-gnu)
|
||||
ARC="amd64"
|
||||
EXT="snap"
|
||||
apt install -y expect zip rhash
|
||||
snapcraft clean
|
||||
echo "Prepare snapcraft.yaml for build on Gitlab CI in Docker image"
|
||||
sed -i 's/git/'"$VER"'/g' snap/snapcraft.yaml
|
||||
if [[ "$CI_BUILD_REF_NAME" = "beta" || "$VER" == *1.9* ]];
|
||||
then
|
||||
sed -i -e 's/grade: devel/grade: stable/' snap/snapcraft.yaml;
|
||||
fi
|
||||
mv -f snap/snapcraft.yaml snapcraft.yaml
|
||||
snapcraft -d
|
||||
snapcraft_login=$(expect -c "
|
||||
spawn snapcraft login
|
||||
expect \"Email:\"
|
||||
send \"$SNAP_EMAIL\n\"
|
||||
expect \"Password:\"
|
||||
send \"$SNAP_PASS\n\"
|
||||
expect \"\$\"
|
||||
")
|
||||
echo "$snapcraft_login"
|
||||
snapcraft push "parity_"$VER"_amd64.snap"
|
||||
snapcraft status parity
|
||||
snapcraft logout
|
||||
$MD5_BIN "parity_"$VER"_amd64.snap" > "parity_"$VER"_amd64.snap.md5"
|
||||
$SHA256_BIN "parity_"$VER"_amd64.snap" > "parity_"$VER"_amd64.snap.sha256"
|
||||
echo "add artifacts to archive"
|
||||
rm -rf parity.zip
|
||||
zip -r parity.zip "parity_"$VER"_amd64.snap" "parity_"$VER"_amd64.snap.md5" "parity_"$VER"_amd64.snap.sha256"
|
||||
;;
|
||||
x86_64-pc-windows-msvc)
|
||||
set_env_win
|
||||
EXT="exe"
|
||||
S3WIN=".exe"
|
||||
build
|
||||
sign_exe
|
||||
calculate_checksums
|
||||
make_exe
|
||||
make_archive
|
||||
push_binaries
|
||||
updater_push_release
|
||||
esac
|
12
scripts/gitlab-push-release.sh
Executable file
@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e # fail on any error
|
||||
set -u # treat unset variables as error
|
||||
|
||||
DATA="secret=$RELEASES_SECRET"
|
||||
|
||||
echo "Pushing release to Mainnet"
|
||||
./scripts/safe_curl.sh $DATA "http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF"
|
||||
|
||||
echo "Pushing release to Kovan"
|
||||
./scripts/safe_curl.sh $DATA "http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF"
|
101
scripts/gitlab-test.sh
Executable file
@ -0,0 +1,101 @@
|
||||
#!/bin/bash
|
||||
#ARGUMENT test for RUST, JS, COVERAGE or JS_RELEASE
|
||||
set -e # fail on any error
|
||||
set -u # treat unset variables as error
|
||||
|
||||
if [[ "$CI_COMMIT_REF_NAME" = "beta" || "$CI_COMMIT_REF_NAME" = "stable" ]]; then
|
||||
export GIT_COMPARE=$CI_COMMIT_REF_NAME;
|
||||
else
|
||||
export GIT_COMPARE=master;
|
||||
fi
|
||||
export JS_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep ^js/ | wc -l)"
|
||||
export JS_OLD_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep ^js-old/ | wc -l)"
|
||||
export RUST_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^test.sh -e ^windows/ -e ^scripts/ -e ^mac/ -e ^nsis/ | wc -l)"
|
||||
|
||||
echo "RUST_FILES_MODIFIED: $RUST_FILES_MODIFIED"
|
||||
echo "JS_FILES_MODIFIED: $JS_FILES_MODIFIED"
|
||||
echo "JS_OLD_FILES_MODIFIED: $JS_OLD_FILES_MODIFIED"
|
||||
TEST_SWITCH=$1
|
||||
rust_test () {
|
||||
git submodule update --init --recursive
|
||||
rustup show
|
||||
if [[ "${RUST_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping Rust tests since no Rust files modified.";
|
||||
else ./test.sh || exit $?;
|
||||
fi
|
||||
# if [[ "$CI_COMMIT_REF_NAME" == "nightly" ]];
|
||||
# ### @TODO re-enable fail after https://github.com/paritytech/parity-import-tests/issues/3
|
||||
# then sh scripts/aura-test.sh; # || exit $?;
|
||||
# fi
|
||||
}
|
||||
js_test () {
|
||||
git submodule update --init --recursive
|
||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS deps install since no JS files modified.";
|
||||
else ./js/scripts/install-deps.sh;
|
||||
fi
|
||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS (old) deps install since no JS files modified.";
|
||||
else ./js-old/scripts/install-deps.sh;
|
||||
fi
|
||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS lint since no JS files modified.";
|
||||
else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh;
|
||||
fi
|
||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS (old) lint since no JS files modified.";
|
||||
else ./js-old/scripts/lint.sh && ./js-old/scripts/test.sh && ./js-old/scripts/build.sh;
|
||||
fi
|
||||
}
|
||||
js_release () {
|
||||
rustup default stable
|
||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS deps install since no JS files modified.";
|
||||
else echo "install JS deps---------------"&&./js/scripts/install-deps.sh&&echo "done----------------";
|
||||
fi
|
||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS rebuild since no JS files modified.";
|
||||
else echo "build JS--------------"&&./js/scripts/build.sh&&echo "Puch JS precompiled-----------------"&&./js/scripts/push-precompiled.sh&&echo "done----------------";
|
||||
fi
|
||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS (old) deps install since no JS files modified.";
|
||||
else echo "install JS_OLD deps---------------"&&./js-old/scripts/install-deps.sh&&echo "done----------------";
|
||||
fi
|
||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping JS (old) rebuild since no JS files modified.";
|
||||
else echo "build JS--------------"&&./js-old/scripts/build.sh&&echo "Puch JS precompiled-----------------"&&./js-old/scripts/push-precompiled.sh&&echo "done----------------";
|
||||
fi
|
||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]] && [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
||||
then echo "Skipping Cargo update since no JS files modified.";
|
||||
else echo "push cargo---------"&&./js/scripts/push-cargo.sh&&echo "done----------------";
|
||||
fi
|
||||
}
|
||||
coverage_test () {
|
||||
git submodule update --init --recursive
|
||||
rm -rf target/*
|
||||
rm -rf js/.coverage
|
||||
scripts/cov.sh
|
||||
}
|
||||
case $TEST_SWITCH in
|
||||
stable )
|
||||
rustup default stable
|
||||
rust_test
|
||||
;;
|
||||
beta)
|
||||
rustup default beta
|
||||
rust_test
|
||||
;;
|
||||
nightly)
|
||||
rustup default nightly
|
||||
rust_test
|
||||
;;
|
||||
js-test)
|
||||
js_test
|
||||
;;
|
||||
js-release)
|
||||
js_release
|
||||
;;
|
||||
test-coverage)
|
||||
coverage_test
|
||||
;;
|
||||
esac
|
20
scripts/safe_curl.sh
Executable file
@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
DATA=$1
|
||||
ADDRESS=$2
|
||||
|
||||
CODE=$(curl -o out.txt -w '%{http_code}' --data $DATA $ADDRESS)
|
||||
cat out.txt && rm out.txt
|
||||
echo "\n"
|
||||
|
||||
if [[ $CODE -eq 200 ]]; then
|
||||
echo 'Pushed to updater service.';
|
||||
elif [[ $CODE -eq 202 ]]; then
|
||||
echo 'Updater service ignored request.';
|
||||
else
|
||||
echo 'Unable to push info to updater service.';
|
||||
exit 2
|
||||
fi
|
||||
|
@ -1,22 +0,0 @@
|
||||
name: parity
|
||||
version: master
|
||||
summary: Fast, light, robust Ethereum implementation
|
||||
description: |
|
||||
Parity's goal is to be the fastest, lightest, and most secure Ethereum
|
||||
client. We are developing Parity using the sophisticated and cutting-edge
|
||||
Rust programming language. Parity is licensed under the GPLv3, and can be
|
||||
used for all your Ethereum needs.
|
||||
|
||||
grade: devel
|
||||
confinement: strict
|
||||
|
||||
apps:
|
||||
parity:
|
||||
command: parity
|
||||
plugs: [network, network-bind]
|
||||
|
||||
parts:
|
||||
parity:
|
||||
source: ..
|
||||
plugin: rust
|
||||
build-packages: [g++, libudev-dev, libssl-dev, make, pkg-config]
|
@ -4,5 +4,5 @@ Encoding=UTF-8
|
||||
Name=parity
|
||||
Comment=Fast, light, robust Ethereum implementation
|
||||
Exec=parity
|
||||
Icon=${SNAP}/meta/gui/icon.png
|
||||
Icon=/usr/share/pixmaps/icon.png
|
||||
Terminal=true
|
||||
|
@ -13,9 +13,20 @@ confinement: strict
|
||||
apps:
|
||||
parity:
|
||||
command: parity
|
||||
plugs: [network, network-bind, mount-observe, x11, unity7]
|
||||
plugs: [home, network, network-bind, mount-observe, x11, unity7, desktop, desktop-legacy, wayland]
|
||||
desktop: usr/share/applications/parity.desktop
|
||||
|
||||
icon: snap/gui/icon.png
|
||||
|
||||
parts:
|
||||
desktop-icon:
|
||||
source: ./snap
|
||||
plugin: nil
|
||||
prepare: |
|
||||
mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/applications
|
||||
mkdir -p $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps
|
||||
cp -v gui/parity.desktop $SNAPCRAFT_PART_INSTALL/usr/share/applications/
|
||||
cp -v gui/icon.png $SNAPCRAFT_PART_INSTALL/usr/share/pixmaps/
|
||||
parity:
|
||||
source: .
|
||||
plugin: rust
|
||||
|
@ -170,7 +170,18 @@ pub struct AttachedProtocol {
|
||||
}
|
||||
|
||||
impl AttachedProtocol {
|
||||
fn register(&self, _network: &NetworkService) {}
|
||||
fn register(&self, network: &NetworkService) {
|
||||
let res = network.register_protocol(
|
||||
self.handler.clone(),
|
||||
self.protocol_id,
|
||||
self.packet_count,
|
||||
self.versions
|
||||
);
|
||||
|
||||
if let Err(e) = res {
|
||||
warn!(target: "sync", "Error attaching protocol {:?}: {:?}", self.protocol_id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// EthSync initialization parameters.
|
||||
|
@ -522,6 +522,10 @@ impl BlockDownloader {
|
||||
trace!(target: "sync", "Unknown new block parent, restarting sync");
|
||||
break;
|
||||
},
|
||||
Err(BlockImportError::Block(BlockError::TemporarilyInvalid(_))) => {
|
||||
debug!(target: "sync", "Block temporarily invalid, restarting sync");
|
||||
break;
|
||||
},
|
||||
Err(e) => {
|
||||
debug!(target: "sync", "Bad block {:?} : {:?}", h, e);
|
||||
bad = true;
|
||||
|
@ -35,6 +35,7 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::mem;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Instant, Duration};
|
||||
|
||||
use ethcore::encoded;
|
||||
use light::client::{AsLightClient, LightChainClient};
|
||||
@ -58,6 +59,13 @@ mod sync_round;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// Base number of milliseconds for the header request timeout.
|
||||
const REQ_TIMEOUT_MILLISECS_BASE: u64 = 7000;
|
||||
// Additional number of milliseconds for each requested header.
|
||||
// If we request N headers, then the timeout will be:
|
||||
// REQ_TIMEOUT_MILLISECS_BASE + N * REQ_TIMEOUT_MILLISECS_PER_HEADER
|
||||
const REQ_TIMEOUT_MILLISECS_PER_HEADER: u64 = 10;
|
||||
|
||||
/// Peer chain info.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct ChainInfo {
|
||||
@ -225,12 +233,18 @@ pub struct LightSync<L: AsLightClient> {
|
||||
start_block_number: u64,
|
||||
best_seen: Mutex<Option<ChainInfo>>, // best seen block on the network.
|
||||
peers: RwLock<HashMap<PeerId, Mutex<Peer>>>, // peers which are relevant to synchronization.
|
||||
pending_reqs: Mutex<HashSet<ReqId>>, // requests from this handler.
|
||||
pending_reqs: Mutex<HashMap<ReqId, PendingReq>>, // requests from this handler
|
||||
client: Arc<L>,
|
||||
rng: Mutex<OsRng>,
|
||||
state: Mutex<SyncState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct PendingReq {
|
||||
started: Instant,
|
||||
timeout: Duration,
|
||||
}
|
||||
|
||||
impl<L: AsLightClient + Send + Sync> Handler for LightSync<L> {
|
||||
fn on_connect(
|
||||
&self,
|
||||
@ -353,7 +367,7 @@ impl<L: AsLightClient + Send + Sync> Handler for LightSync<L> {
|
||||
return
|
||||
}
|
||||
|
||||
if !self.pending_reqs.lock().remove(&req_id) {
|
||||
if self.pending_reqs.lock().remove(&req_id).is_none() {
|
||||
return
|
||||
}
|
||||
|
||||
@ -412,7 +426,7 @@ impl<L: AsLightClient> LightSync<L> {
|
||||
*state = SyncState::AncestorSearch(AncestorSearch::begin(chain_info.best_block_number));
|
||||
}
|
||||
|
||||
// handles request dispatch, block import, and state machine transitions.
|
||||
// handles request dispatch, block import, state machine transitions, and timeouts.
|
||||
fn maintain_sync(&self, ctx: &BasicContext) {
|
||||
const DRAIN_AMOUNT: usize = 128;
|
||||
|
||||
@ -502,6 +516,32 @@ impl<L: AsLightClient> LightSync<L> {
|
||||
}
|
||||
}
|
||||
|
||||
// handle requests timeouts
|
||||
{
|
||||
let mut pending_reqs = self.pending_reqs.lock();
|
||||
let mut unfulfilled = Vec::new();
|
||||
for (req_id, info) in pending_reqs.iter() {
|
||||
if info.started.elapsed() >= info.timeout {
|
||||
debug!(target: "sync", "{} timed out", req_id);
|
||||
unfulfilled.push(req_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !unfulfilled.is_empty() {
|
||||
for unfulfilled in unfulfilled.iter() {
|
||||
pending_reqs.remove(unfulfilled);
|
||||
}
|
||||
drop(pending_reqs);
|
||||
|
||||
*state = match mem::replace(&mut *state, SyncState::Idle) {
|
||||
SyncState::Idle => SyncState::Idle,
|
||||
SyncState::AncestorSearch(search) =>
|
||||
SyncState::AncestorSearch(search.requests_abandoned(&unfulfilled)),
|
||||
SyncState::Rounds(round) => SyncState::Rounds(round.requests_abandoned(&unfulfilled)),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// allow dispatching of requests.
|
||||
{
|
||||
let peers = self.peers.read();
|
||||
@ -535,7 +575,12 @@ impl<L: AsLightClient> LightSync<L> {
|
||||
if requested_from.contains(peer) { continue }
|
||||
match ctx.request_from(*peer, request.clone()) {
|
||||
Ok(id) => {
|
||||
self.pending_reqs.lock().insert(id.clone());
|
||||
let timeout_ms = REQ_TIMEOUT_MILLISECS_BASE +
|
||||
req.max * REQ_TIMEOUT_MILLISECS_PER_HEADER;
|
||||
self.pending_reqs.lock().insert(id.clone(), PendingReq {
|
||||
started: Instant::now(),
|
||||
timeout: Duration::from_millis(timeout_ms),
|
||||
});
|
||||
requested_from.insert(peer.clone());
|
||||
|
||||
return Some(id)
|
||||
@ -571,7 +616,7 @@ impl<L: AsLightClient> LightSync<L> {
|
||||
start_block_number: client.as_light_client().chain_info().best_block_number,
|
||||
best_seen: Mutex::new(None),
|
||||
peers: RwLock::new(HashMap::new()),
|
||||
pending_reqs: Mutex::new(HashSet::new()),
|
||||
pending_reqs: Mutex::new(HashMap::new()),
|
||||
client: client,
|
||||
rng: Mutex::new(OsRng::new()?),
|
||||
state: Mutex::new(SyncState::Idle),
|
||||
|
@ -179,7 +179,7 @@ impl Fetcher {
|
||||
};
|
||||
|
||||
trace!(target: "sync", "Received response for subchain ({} -> {})",
|
||||
request.subchain_parent.0 + 1, request.subchain_end.0);
|
||||
request.subchain_parent.0, request.subchain_end.0);
|
||||
|
||||
let headers = ctx.data();
|
||||
|
||||
@ -241,6 +241,8 @@ impl Fetcher {
|
||||
}
|
||||
|
||||
fn requests_abandoned(mut self, abandoned: &[ReqId]) -> SyncRound {
|
||||
trace!(target: "sync", "Abandonned requests {:?}", abandoned);
|
||||
|
||||
for abandoned in abandoned {
|
||||
match self.pending.remove(abandoned) {
|
||||
None => {},
|
||||
@ -258,12 +260,14 @@ impl Fetcher {
|
||||
while let Some(pending_req) = self.requests.pop() {
|
||||
match dispatcher(pending_req.headers_request.clone()) {
|
||||
Some(req_id) => {
|
||||
trace!(target: "sync", "Assigned request for subchain ({} -> {})",
|
||||
pending_req.subchain_parent.0 + 1, pending_req.subchain_end.0);
|
||||
trace!(target: "sync", "Assigned request {} for subchain ({} -> {})",
|
||||
req_id, pending_req.subchain_parent.0, pending_req.subchain_end.0);
|
||||
|
||||
self.pending.insert(req_id, pending_req);
|
||||
}
|
||||
None => {
|
||||
trace!(target: "sync", "Failed to assign request for subchain ({} -> {})",
|
||||
pending_req.subchain_parent.0, pending_req.subchain_end.0);
|
||||
self.requests.push(pending_req);
|
||||
break;
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ use std::cmp;
|
||||
use std::collections::HashMap;
|
||||
use std::marker::PhantomData;
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::{mem, fs, io};
|
||||
use std::{fs, io, mem, result};
|
||||
|
||||
use parking_lot::{Mutex, MutexGuard, RwLock};
|
||||
use rocksdb::{
|
||||
@ -257,7 +257,25 @@ pub struct Database {
|
||||
flushing_lock: Mutex<bool>,
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn check_for_corruption<T, P: AsRef<Path>>(path: P, res: result::Result<T, String>) -> result::Result<T, String> {
|
||||
if let Err(ref s) = res {
|
||||
if s.starts_with("Corruption:") {
|
||||
warn!("DB corrupted: {}. Repair will be triggered on next restart", s);
|
||||
let _ = fs::File::create(path.as_ref().join(Database::CORRUPTION_FILE_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn is_corrupted(s: &str) -> bool {
|
||||
s.starts_with("Corruption:") || s.starts_with("Invalid argument: You have to open all column families")
|
||||
}
|
||||
|
||||
impl Database {
|
||||
const CORRUPTION_FILE_NAME: &'static str = "CORRUPTED";
|
||||
|
||||
/// Open database with default settings.
|
||||
pub fn open_default(path: &str) -> Result<Database> {
|
||||
Database::open(&DatabaseConfig::default(), path)
|
||||
@ -287,6 +305,14 @@ impl Database {
|
||||
block_opts.set_cache(cache);
|
||||
}
|
||||
|
||||
// attempt database repair if it has been previously marked as corrupted
|
||||
let db_corrupted = Path::new(path).join(Database::CORRUPTION_FILE_NAME);
|
||||
if db_corrupted.exists() {
|
||||
warn!("DB has been previously marked as corrupted, attempting repair");
|
||||
DB::repair(&opts, path)?;
|
||||
fs::remove_file(db_corrupted)?;
|
||||
}
|
||||
|
||||
let columns = config.columns.unwrap_or(0) as usize;
|
||||
|
||||
let mut cf_options = Vec::with_capacity(columns);
|
||||
@ -306,12 +332,11 @@ impl Database {
|
||||
|
||||
let mut cfs: Vec<Column> = Vec::new();
|
||||
let db = match config.columns {
|
||||
Some(columns) => {
|
||||
Some(_) => {
|
||||
match DB::open_cf(&opts, path, &cfnames, &cf_options) {
|
||||
Ok(db) => {
|
||||
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
||||
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
||||
assert!(cfs.len() == columns as usize);
|
||||
Ok(db)
|
||||
}
|
||||
Err(_) => {
|
||||
@ -321,7 +346,7 @@ impl Database {
|
||||
cfs = cfnames.iter().enumerate().map(|(i, n)| db.create_cf(n, &cf_options[i])).collect::<::std::result::Result<_, _>>()?;
|
||||
Ok(db)
|
||||
},
|
||||
err @ Err(_) => err,
|
||||
err => err,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -331,14 +356,18 @@ impl Database {
|
||||
|
||||
let db = match db {
|
||||
Ok(db) => db,
|
||||
Err(ref s) if s.starts_with("Corruption:") => {
|
||||
info!("{}", s);
|
||||
info!("Attempting DB repair for {}", path);
|
||||
Err(ref s) if is_corrupted(s) => {
|
||||
warn!("DB corrupted: {}, attempting repair", s);
|
||||
DB::repair(&opts, path)?;
|
||||
|
||||
match cfnames.is_empty() {
|
||||
true => DB::open(&opts, path)?,
|
||||
false => DB::open_cf(&opts, path, &cfnames, &cf_options)?
|
||||
false => {
|
||||
let db = DB::open_cf(&opts, path, &cfnames, &cf_options)?;
|
||||
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
||||
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
||||
db
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(s) => { return Err(s.into()); }
|
||||
@ -425,7 +454,11 @@ impl Database {
|
||||
}
|
||||
}
|
||||
}
|
||||
db.write_opt(batch, &self.write_opts)?;
|
||||
|
||||
check_for_corruption(
|
||||
&self.path,
|
||||
db.write_opt(batch, &self.write_opts))?;
|
||||
|
||||
for column in self.flushing.write().iter_mut() {
|
||||
column.clear();
|
||||
column.shrink_to_fit();
|
||||
@ -471,7 +504,10 @@ impl Database {
|
||||
},
|
||||
}
|
||||
}
|
||||
db.write_opt(batch, &self.write_opts).map_err(Into::into)
|
||||
|
||||
check_for_corruption(
|
||||
&self.path,
|
||||
db.write_opt(batch, &self.write_opts)).map_err(Into::into)
|
||||
},
|
||||
None => Err("Database is closed".into())
|
||||
}
|
||||
|
@ -31,7 +31,9 @@ ethcore-logger = { path ="../../logger" }
|
||||
ipnetwork = "0.12.6"
|
||||
keccak-hash = { path = "../hash" }
|
||||
snappy = { git = "https://github.com/paritytech/rust-snappy" }
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
serde_derive = "1.0"
|
||||
error-chain = { version = "0.11", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -719,7 +719,7 @@ impl Host {
|
||||
let address = {
|
||||
let mut nodes = self.nodes.write();
|
||||
if let Some(node) = nodes.get_mut(id) {
|
||||
node.last_attempted = Some(::time::now());
|
||||
node.attempts += 1;
|
||||
node.endpoint.address
|
||||
}
|
||||
else {
|
||||
@ -738,6 +738,7 @@ impl Host {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = self.create_connection(socket, Some(id), io) {
|
||||
debug!(target: "network", "Can't create connection: {:?}", e);
|
||||
}
|
||||
@ -1281,4 +1282,3 @@ fn host_client_url() {
|
||||
let host: Host = Host::new(config, Arc::new(NetworkStats::new()), None).unwrap();
|
||||
assert!(host.local_url().starts_with("enode://101b3ef5a4ea7a1c7928e24c4c75fd053c235d7b80c22ae5c03d145d0ac7396e2a4ffff9adee3133a7b05044a5cee08115fd65145e5165d646bde371010d803c@"));
|
||||
}
|
||||
|
||||
|
@ -80,14 +80,16 @@ extern crate path;
|
||||
extern crate ethcore_logger;
|
||||
extern crate ipnetwork;
|
||||
extern crate keccak_hash as hash;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate snappy;
|
||||
|
||||
#[macro_use]
|
||||
extern crate error_chain;
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate ethcore_devtools as devtools;
|
||||
@ -213,4 +215,3 @@ pub enum AllowIP {
|
||||
/// Block all addresses
|
||||
None,
|
||||
}
|
||||
|
||||
|
@ -14,25 +14,20 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::mem;
|
||||
use std::slice::from_raw_parts;
|
||||
use std::net::{SocketAddr, ToSocketAddrs, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::str::{FromStr};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::path::{PathBuf};
|
||||
use std::fmt;
|
||||
use std::fs;
|
||||
use std::io::{Read, Write};
|
||||
use bigint::hash::*;
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::net::{SocketAddr, ToSocketAddrs, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::{fs, mem, slice};
|
||||
use bigint::hash::H512;
|
||||
use rlp::*;
|
||||
use time::Tm;
|
||||
use error::{Error, ErrorKind};
|
||||
use {AllowIP, IpFilter};
|
||||
use discovery::{TableUpdates, NodeEntry};
|
||||
use ip_utils::*;
|
||||
use serde_json::Value;
|
||||
use serde_json;
|
||||
|
||||
/// Node public key
|
||||
pub type NodeId = H512;
|
||||
@ -80,7 +75,7 @@ impl NodeEndpoint {
|
||||
4 => Ok(SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(addr_bytes[0], addr_bytes[1], addr_bytes[2], addr_bytes[3]), tcp_port))),
|
||||
16 => unsafe {
|
||||
let o: *const u16 = mem::transmute(addr_bytes.as_ptr());
|
||||
let o = from_raw_parts(o, 8);
|
||||
let o = slice::from_raw_parts(o, 8);
|
||||
Ok(SocketAddr::V6(SocketAddrV6::new(Ipv6Addr::new(o[0], o[1], o[2], o[3], o[4], o[5], o[6], o[7]), tcp_port, 0, 0)))
|
||||
},
|
||||
_ => Err(DecoderError::RlpInconsistentLengthAndData)
|
||||
@ -95,7 +90,7 @@ impl NodeEndpoint {
|
||||
}
|
||||
SocketAddr::V6(a) => unsafe {
|
||||
let o: *const u8 = mem::transmute(a.ip().segments().as_ptr());
|
||||
rlp.append(&from_raw_parts(o, 16));
|
||||
rlp.append(&slice::from_raw_parts(o, 16));
|
||||
}
|
||||
};
|
||||
rlp.append(&self.udp_port);
|
||||
@ -143,18 +138,30 @@ pub struct Node {
|
||||
pub id: NodeId,
|
||||
pub endpoint: NodeEndpoint,
|
||||
pub peer_type: PeerType,
|
||||
pub attempts: u32,
|
||||
pub failures: u32,
|
||||
pub last_attempted: Option<Tm>,
|
||||
}
|
||||
|
||||
const DEFAULT_FAILURE_PERCENTAGE: usize = 50;
|
||||
|
||||
impl Node {
|
||||
pub fn new(id: NodeId, endpoint: NodeEndpoint) -> Node {
|
||||
Node {
|
||||
id: id,
|
||||
endpoint: endpoint,
|
||||
peer_type: PeerType::Optional,
|
||||
attempts: 0,
|
||||
failures: 0,
|
||||
last_attempted: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the node's failure percentage (0..100) in buckets of 5%. If there are 0 connection attempts for this
|
||||
/// node the default failure percentage is returned (50%).
|
||||
pub fn failure_percentage(&self) -> usize {
|
||||
if self.attempts == 0 {
|
||||
DEFAULT_FAILURE_PERCENTAGE
|
||||
} else {
|
||||
(self.failures * 100 / self.attempts / 5 * 5) as usize
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -184,7 +191,7 @@ impl FromStr for Node {
|
||||
id: id,
|
||||
endpoint: endpoint,
|
||||
peer_type: PeerType::Optional,
|
||||
last_attempted: None,
|
||||
attempts: 0,
|
||||
failures: 0,
|
||||
})
|
||||
}
|
||||
@ -203,6 +210,9 @@ impl Hash for Node {
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_NODES: usize = 1024;
|
||||
const NODES_FILE: &str = "nodes.json";
|
||||
|
||||
/// Node table backed by disk file.
|
||||
pub struct NodeTable {
|
||||
nodes: HashMap<NodeId, Node>,
|
||||
@ -221,23 +231,37 @@ impl NodeTable {
|
||||
|
||||
/// Add a node to table
|
||||
pub fn add_node(&mut self, mut node: Node) {
|
||||
// preserve failure counter
|
||||
let failures = self.nodes.get(&node.id).map_or(0, |n| n.failures);
|
||||
// preserve attempts and failure counter
|
||||
let (attempts, failures) =
|
||||
self.nodes.get(&node.id).map_or((0, 0), |n| (n.attempts, n.failures));
|
||||
|
||||
node.attempts = attempts;
|
||||
node.failures = failures;
|
||||
|
||||
self.nodes.insert(node.id.clone(), node);
|
||||
}
|
||||
|
||||
/// Returns node ids sorted by number of failures
|
||||
/// Returns node ids sorted by failure percentage, for nodes with the same failure percentage the absolute number of
|
||||
/// failures is considered.
|
||||
pub fn nodes(&self, filter: IpFilter) -> Vec<NodeId> {
|
||||
let mut refs: Vec<&Node> = self.nodes.values().filter(|n| !self.useless_nodes.contains(&n.id) && n.endpoint.is_allowed(&filter)).collect();
|
||||
refs.sort_by(|a, b| a.failures.cmp(&b.failures));
|
||||
refs.iter().map(|n| n.id.clone()).collect()
|
||||
let mut refs: Vec<&Node> = self.nodes.values()
|
||||
.filter(|n| !self.useless_nodes.contains(&n.id))
|
||||
.filter(|n| n.endpoint.is_allowed(&filter))
|
||||
.collect();
|
||||
refs.sort_by(|a, b| {
|
||||
a.failure_percentage().cmp(&b.failure_percentage())
|
||||
.then_with(|| a.failures.cmp(&b.failures))
|
||||
.then_with(|| b.attempts.cmp(&a.attempts)) // we use reverse ordering for number of attempts
|
||||
});
|
||||
refs.into_iter().map(|n| n.id).collect()
|
||||
}
|
||||
|
||||
/// Unordered list of all entries
|
||||
pub fn unordered_entries(&self) -> Vec<NodeEntry> {
|
||||
// preserve failure counter
|
||||
self.nodes.values().map(|n| NodeEntry { endpoint: n.endpoint.clone(), id: n.id.clone() }).collect()
|
||||
self.nodes.values().map(|n| NodeEntry {
|
||||
endpoint: n.endpoint.clone(),
|
||||
id: n.id.clone(),
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Get particular node
|
||||
@ -270,7 +294,7 @@ impl NodeTable {
|
||||
}
|
||||
}
|
||||
|
||||
/// Mark as useless, no furter attempts to connect until next call to `clear_useless`.
|
||||
/// Mark as useless, no further attempts to connect until next call to `clear_useless`.
|
||||
pub fn mark_as_useless(&mut self, id: &NodeId) {
|
||||
self.useless_nodes.insert(id.clone());
|
||||
}
|
||||
@ -282,77 +306,62 @@ impl NodeTable {
|
||||
|
||||
/// Save the nodes.json file.
|
||||
pub fn save(&self) {
|
||||
if let Some(ref path) = self.path {
|
||||
let mut path_buf = PathBuf::from(path);
|
||||
if let Err(e) = fs::create_dir_all(path_buf.as_path()) {
|
||||
warn!("Error creating node table directory: {:?}", e);
|
||||
return;
|
||||
};
|
||||
path_buf.push("nodes.json");
|
||||
let mut json = String::new();
|
||||
json.push_str("{\n");
|
||||
json.push_str("\"nodes\": [\n");
|
||||
let node_ids = self.nodes(IpFilter::default());
|
||||
for i in 0 .. node_ids.len() {
|
||||
let node = self.nodes.get(&node_ids[i]).expect("self.nodes() only returns node IDs from self.nodes");
|
||||
json.push_str(&format!("\t{{ \"url\": \"{}\", \"failures\": {} }}{}\n", node, node.failures, if i == node_ids.len() - 1 {""} else {","}))
|
||||
}
|
||||
json.push_str("]\n");
|
||||
json.push_str("}");
|
||||
let mut file = match fs::File::create(path_buf.as_path()) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
warn!("Error creating node table file: {:?}", e);
|
||||
return;
|
||||
let mut path = match self.path {
|
||||
Some(ref path) => PathBuf::from(path),
|
||||
None => return,
|
||||
};
|
||||
if let Err(e) = fs::create_dir_all(&path) {
|
||||
warn!("Error creating node table directory: {:?}", e);
|
||||
return;
|
||||
}
|
||||
path.push(NODES_FILE);
|
||||
let node_ids = self.nodes(IpFilter::default());
|
||||
let nodes = node_ids.into_iter()
|
||||
.map(|id| self.nodes.get(&id).expect("self.nodes() only returns node IDs from self.nodes"))
|
||||
.take(MAX_NODES)
|
||||
.map(|node| node.clone())
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
let table = json::NodeTable { nodes };
|
||||
|
||||
match fs::File::create(&path) {
|
||||
Ok(file) => {
|
||||
if let Err(e) = serde_json::to_writer_pretty(file, &table) {
|
||||
warn!("Error writing node table file: {:?}", e);
|
||||
}
|
||||
};
|
||||
if let Err(e) = file.write(&json.into_bytes()) {
|
||||
warn!("Error writing node table file: {:?}", e);
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Error creating node table file: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load(path: Option<String>) -> HashMap<NodeId, Node> {
|
||||
let mut nodes: HashMap<NodeId, Node> = HashMap::new();
|
||||
if let Some(path) = path {
|
||||
let mut path_buf = PathBuf::from(path);
|
||||
path_buf.push("nodes.json");
|
||||
let mut file = match fs::File::open(path_buf.as_path()) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
debug!("Error opening node table file: {:?}", e);
|
||||
return nodes;
|
||||
}
|
||||
};
|
||||
let mut buf = String::new();
|
||||
match file.read_to_string(&mut buf) {
|
||||
Ok(_) => {},
|
||||
Err(e) => {
|
||||
warn!("Error reading node table file: {:?}", e);
|
||||
return nodes;
|
||||
}
|
||||
}
|
||||
let json: Value = match ::serde_json::from_str(&buf) {
|
||||
Ok(json) => json,
|
||||
Err(e) => {
|
||||
warn!("Error parsing node table file: {:?}", e);
|
||||
return nodes;
|
||||
}
|
||||
};
|
||||
if let Some(list) = json.as_object().and_then(|o| o.get("nodes")).and_then(|n| n.as_array()) {
|
||||
for n in list.iter().filter_map(|n| n.as_object()) {
|
||||
if let Some(url) = n.get("url").and_then(|u| u.as_str()) {
|
||||
if let Ok(mut node) = Node::from_str(url) {
|
||||
if let Some(failures) = n.get("failures").and_then(|f| f.as_u64()) {
|
||||
node.failures = failures as u32;
|
||||
}
|
||||
nodes.insert(node.id.clone(), node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let path = match path {
|
||||
Some(path) => PathBuf::from(path).join(NODES_FILE),
|
||||
None => return Default::default(),
|
||||
};
|
||||
|
||||
let file = match fs::File::open(&path) {
|
||||
Ok(file) => file,
|
||||
Err(e) => {
|
||||
debug!("Error opening node table file: {:?}", e);
|
||||
return Default::default();
|
||||
},
|
||||
};
|
||||
let res: Result<json::NodeTable, _> = serde_json::from_reader(file);
|
||||
match res {
|
||||
Ok(table) => {
|
||||
table.nodes.into_iter()
|
||||
.filter_map(|n| n.into_node())
|
||||
.map(|n| (n.id.clone(), n))
|
||||
.collect()
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Error reading node table file: {:?}", e);
|
||||
Default::default()
|
||||
},
|
||||
}
|
||||
nodes
|
||||
}
|
||||
}
|
||||
|
||||
@ -364,13 +373,51 @@ impl Drop for NodeTable {
|
||||
|
||||
/// Check if node url is valid
|
||||
pub fn validate_node_url(url: &str) -> Option<Error> {
|
||||
use std::str::FromStr;
|
||||
match Node::from_str(url) {
|
||||
Ok(_) => None,
|
||||
Err(e) => Some(e)
|
||||
}
|
||||
}
|
||||
|
||||
mod json {
|
||||
use super::*;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct NodeTable {
|
||||
pub nodes: Vec<Node>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Node {
|
||||
pub url: String,
|
||||
pub attempts: u32,
|
||||
pub failures: u32,
|
||||
}
|
||||
|
||||
impl Node {
|
||||
pub fn into_node(self) -> Option<super::Node> {
|
||||
match super::Node::from_str(&self.url) {
|
||||
Ok(mut node) => {
|
||||
node.attempts = self.attempts;
|
||||
node.failures = self.failures;
|
||||
Some(node)
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a super::Node> for Node {
|
||||
fn from(node: &'a super::Node) -> Self {
|
||||
Node {
|
||||
url: format!("{}", node),
|
||||
attempts: node.attempts,
|
||||
failures: node.failures,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -408,26 +455,42 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_failure_order() {
|
||||
fn table_failure_percentage_order() {
|
||||
let node1 = Node::from_str("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
|
||||
let node2 = Node::from_str("enode://b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
|
||||
let node3 = Node::from_str("enode://c979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
|
||||
let node4 = Node::from_str("enode://d979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
|
||||
let id1 = H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
|
||||
let id2 = H512::from_str("b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
|
||||
let id3 = H512::from_str("c979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
|
||||
let id4 = H512::from_str("d979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
|
||||
let mut table = NodeTable::new(None);
|
||||
table.add_node(node3);
|
||||
|
||||
table.add_node(node1);
|
||||
table.add_node(node2);
|
||||
table.add_node(node3);
|
||||
table.add_node(node4);
|
||||
|
||||
// node 1 - failure percentage 100%
|
||||
table.get_mut(&id1).unwrap().attempts = 2;
|
||||
table.note_failure(&id1);
|
||||
table.note_failure(&id1);
|
||||
|
||||
// node2 - failure percentage 33%
|
||||
table.get_mut(&id2).unwrap().attempts = 3;
|
||||
table.note_failure(&id2);
|
||||
|
||||
// node3 - failure percentage 0%
|
||||
table.get_mut(&id3).unwrap().attempts = 1;
|
||||
|
||||
// node4 - failure percentage 50% (default when no attempts)
|
||||
|
||||
let r = table.nodes(IpFilter::default());
|
||||
|
||||
assert_eq!(r[0][..], id3[..]);
|
||||
assert_eq!(r[1][..], id2[..]);
|
||||
assert_eq!(r[2][..], id1[..]);
|
||||
assert_eq!(r[2][..], id4[..]);
|
||||
assert_eq!(r[3][..], id1[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -441,6 +504,9 @@ mod tests {
|
||||
let mut table = NodeTable::new(Some(tempdir.path().to_str().unwrap().to_owned()));
|
||||
table.add_node(node1);
|
||||
table.add_node(node2);
|
||||
|
||||
table.get_mut(&id1).unwrap().attempts = 1;
|
||||
table.get_mut(&id2).unwrap().attempts = 1;
|
||||
table.note_failure(&id2);
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,26 @@
|
||||
# NOTE This file is used by the auto-updater service.
|
||||
# Make sure to update the service if it's moved or the structure is changed.
|
||||
[package]
|
||||
name = "parity-version"
|
||||
# NOTE: this value is used for Parity version string.
|
||||
version = "1.9.0"
|
||||
version = "1.9.3"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
|
||||
[package.metadata]
|
||||
# This versions track. Should be changed to `stable` or `beta` when on respective branches.
|
||||
# Used by auto-updater and for Parity version string.
|
||||
track = "beta"
|
||||
|
||||
# Indicates a critical release in this track (i.e. consensus issue)
|
||||
critical = false
|
||||
|
||||
# Latest supported fork blocks for various networks. Used ONLY by auto-updater.
|
||||
[package.metadata.forks]
|
||||
foundation = 4370000
|
||||
ropsten = 10
|
||||
kovan = 5067000
|
||||
|
||||
[dependencies]
|
||||
ethcore-bytes = { path = "../bytes" }
|
||||
rlp = { path = "../rlp" }
|
||||
@ -13,6 +29,7 @@ target_info = "0.1"
|
||||
[build-dependencies]
|
||||
vergen = "0.1"
|
||||
rustc_version = "0.1.0"
|
||||
toml = "0.4"
|
||||
|
||||
[features]
|
||||
final = []
|
||||
|
@ -28,7 +28,7 @@ include!(concat!(env!("OUT_DIR"), "/version.rs"));
|
||||
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
|
||||
|
||||
#[cfg(feature = "final")]
|
||||
const THIS_TRACK: &'static str = "nightly";
|
||||
const THIS_TRACK: &'static str = "beta";
|
||||
// ^^^ should be reset to "stable" or "beta" according to the release branch.
|
||||
|
||||
#[cfg(not(feature = "final"))]
|
||||
|