Compare commits
618 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c50f9007a | ||
|
|
78acefd35c | ||
|
|
c3a53e7545 | ||
|
|
b06fa73cff | ||
|
|
2caf71fdde | ||
|
|
4bfabb6752 | ||
|
|
9b54d3f2d3 | ||
|
|
db86dec404 | ||
|
|
d2fa3e3236 | ||
|
|
590362b23c | ||
|
|
5e2258c6de | ||
|
|
f65003c630 | ||
|
|
ceb23638fc | ||
|
|
24ab53245b | ||
|
|
1952d05d9c | ||
|
|
98cccd5b26 | ||
|
|
fd07b608bd | ||
|
|
944ace3b61 | ||
|
|
c697e40e84 | ||
|
|
b322af35bc | ||
|
|
987d26ac46 | ||
|
|
143d4840c7 | ||
|
|
fae7cf6b41 | ||
|
|
d8c1372b7c | ||
|
|
28821142e6 | ||
|
|
791d944a53 | ||
|
|
9e2b3308d4 | ||
|
|
680cc9c0c3 | ||
|
|
346c80ef11 | ||
|
|
efb310f251 | ||
|
|
1ede2fbed2 | ||
|
|
31ef44d718 | ||
|
|
19fc9fcc58 | ||
|
|
835dcaec2a | ||
|
|
2c93b0fdf7 | ||
|
|
e322fd824b | ||
|
|
366320fb8d | ||
|
|
29dc3ca2dd | ||
|
|
5f590a0df5 | ||
|
|
e3d32a4abb | ||
|
|
9c7359e343 | ||
|
|
628f0878b1 | ||
|
|
a835c13e6b | ||
|
|
abcfcacf95 | ||
|
|
9abc6bd7d1 | ||
|
|
5e775f3e27 | ||
|
|
1eab85afc6 | ||
|
|
662416f05b | ||
|
|
00b5d30d33 | ||
|
|
2d051e4f79 | ||
|
|
a257827f27 | ||
|
|
c3727266e1 | ||
|
|
364bf48cef | ||
|
|
849e574c2c | ||
|
|
e93872e7bb | ||
|
|
61c3e1a2d6 | ||
|
|
54bae9a0f2 | ||
|
|
42dd3addea | ||
|
|
c74c8c1ac1 | ||
|
|
4e39c318aa | ||
|
|
03b1e9d8ba | ||
|
|
fdaedc8c00 | ||
|
|
d60fe51553 | ||
|
|
c7621b9bbf | ||
|
|
0ec35d9ac5 | ||
|
|
ef702b77dc | ||
|
|
a8cf4efc91 | ||
|
|
aeb43b6af0 | ||
|
|
dcdde49d91 | ||
|
|
b49c44a198 | ||
|
|
561e843207 | ||
|
|
6ad5d559ca | ||
|
|
91db3535f8 | ||
|
|
baa1223736 | ||
|
|
a2d5edb8f5 | ||
|
|
454b4518f2 | ||
|
|
303036cab0 | ||
|
|
b7e9152cc2 | ||
|
|
b1b5ffff95 | ||
|
|
067cbe78d2 | ||
|
|
1211c1f10c | ||
|
|
148ec3731c | ||
|
|
1b6588cb27 | ||
|
|
0e4a06d078 | ||
|
|
d470773fec | ||
|
|
8f0eb3e192 | ||
|
|
48c7e4ab8c | ||
|
|
8362bc7f2d | ||
|
|
284fc65c70 | ||
|
|
9882902f31 | ||
|
|
789c85561e | ||
|
|
18dff68278 | ||
|
|
d9a92c2bea | ||
|
|
2145388103 | ||
|
|
77d00e3dab | ||
|
|
aa6909ff99 | ||
|
|
0085d6a47b | ||
|
|
7472bc3570 | ||
|
|
3e7b775961 | ||
|
|
edbbd42d80 | ||
|
|
e51e54eeeb | ||
|
|
fee68a5331 | ||
|
|
761ba164ee | ||
|
|
301190a16f | ||
|
|
99143c5872 | ||
|
|
1d2319207a | ||
|
|
0b666f605f | ||
|
|
7feb82c242 | ||
|
|
34c65f60ef | ||
|
|
df19b8df69 | ||
|
|
1fec0b1833 | ||
|
|
bfff19ca9f | ||
|
|
a101a23ba3 | ||
|
|
4f86f5b76d | ||
|
|
7029dda87c | ||
|
|
0c86357187 | ||
|
|
0bb845a05c | ||
|
|
f238c97c6f | ||
|
|
3a9b3b4729 | ||
|
|
2bfb510554 | ||
|
|
604cdde71d | ||
|
|
690f32c298 | ||
|
|
504b2de4a8 | ||
|
|
e3fa460374 | ||
|
|
3c4fa5583a | ||
|
|
025c5aebc9 | ||
|
|
6279ff32f5 | ||
|
|
cc681c623f | ||
|
|
007629464a | ||
|
|
e5a7a674f9 | ||
|
|
7234a7c630 | ||
|
|
4e8853c9f7 | ||
|
|
dbd1976fc1 | ||
|
|
1601030081 | ||
|
|
8d1964bc3b | ||
|
|
65ca2f9a07 | ||
|
|
de0d12d73c | ||
|
|
ca6d5660c1 | ||
|
|
b010fb5004 | ||
|
|
360ecd3728 | ||
|
|
59365b0133 | ||
|
|
1b45870af8 | ||
|
|
07c63c14f3 | ||
|
|
77a2c77c45 | ||
|
|
4e9d439f39 | ||
|
|
6431459bcf | ||
|
|
6a1eea1b3d | ||
|
|
d8094e0629 | ||
|
|
77933aa8a1 | ||
|
|
7dc281ff2f | ||
|
|
c4c2c77a40 | ||
|
|
223467c816 | ||
|
|
bae6a5eeec | ||
|
|
3f520b864b | ||
|
|
c7ea25227a | ||
|
|
fb65732076 | ||
|
|
e8b418ca03 | ||
|
|
8961d987a9 | ||
|
|
492da38d67 | ||
|
|
6956d218b5 | ||
|
|
b7c2a30d7d | ||
|
|
4260910db6 | ||
|
|
1b1548f539 | ||
|
|
0092417b85 | ||
|
|
ec1a892f44 | ||
|
|
5e83ff8835 | ||
|
|
3091552504 | ||
|
|
fb38c20c16 | ||
|
|
f8d256dafa | ||
|
|
0a5c9b0465 | ||
|
|
41c3b05a20 | ||
|
|
931a7a2210 | ||
|
|
fee056a251 | ||
|
|
9a086face4 | ||
|
|
561e8b42a8 | ||
|
|
3a60d723d8 | ||
|
|
7cc43893d8 | ||
|
|
61daa5f3e7 | ||
|
|
a56c5e6ba8 | ||
|
|
cc759530fe | ||
|
|
bc167a211b | ||
|
|
d8af9f4e7b | ||
|
|
59d946bf00 | ||
|
|
6840df4c09 | ||
|
|
04e36456bd | ||
|
|
5c08698fa0 | ||
|
|
0e43ce6e07 | ||
|
|
70be064aa5 | ||
|
|
f576926fa2 | ||
|
|
b74065a471 | ||
|
|
aab1cf020c | ||
|
|
5f025cc1b8 | ||
|
|
339f63a61a | ||
|
|
1b7c90e9e1 | ||
|
|
965e8ae74e | ||
|
|
684b142711 | ||
|
|
4dc7d3dc45 | ||
|
|
2b39c43e81 | ||
|
|
9e0d2c15cd | ||
|
|
65ca9afce2 | ||
|
|
99e981abcf | ||
|
|
8a21cde188 | ||
|
|
f38d34919b | ||
|
|
9196c7268a | ||
|
|
1b6d840ada | ||
|
|
25b35ebddd | ||
|
|
b602fb4a5e | ||
|
|
54bd7d26dc | ||
|
|
d69dd17d3c | ||
|
|
dd7a60c7a8 | ||
|
|
ade37be25b | ||
|
|
79659bdc76 | ||
|
|
06ff866e9d | ||
|
|
e3fc3ccada | ||
|
|
75b6a31e87 | ||
|
|
e9abcb2f6d | ||
|
|
8fda8e2071 | ||
|
|
03e039b13d | ||
|
|
dcea17f8c6 | ||
|
|
f1a050366f | ||
|
|
ee14a3fb31 | ||
|
|
375668bc40 | ||
|
|
246b5282e5 | ||
|
|
8f434cd02c | ||
|
|
ffced4e17f | ||
|
|
7d1c7a0474 | ||
|
|
ad39446e87 | ||
|
|
7e3c081007 | ||
|
|
78c264769e | ||
|
|
b6a1e29d11 | ||
|
|
4e66d3ca21 | ||
|
|
8b5ce1a772 | ||
|
|
118081c45a | ||
|
|
14d00a7f72 | ||
|
|
899538ae25 | ||
|
|
e5bbabb2ba | ||
|
|
be745f711f | ||
|
|
a62238c19d | ||
|
|
7462a69583 | ||
|
|
4d0e05a8c1 | ||
|
|
2b11448b45 | ||
|
|
0a71ee95af | ||
|
|
c49beccadc | ||
|
|
3497eab26c | ||
|
|
236b6f1c3e | ||
|
|
2b39af1f09 | ||
|
|
eed0e8b03a | ||
|
|
ccd6ad52cc | ||
|
|
c623e5f232 | ||
|
|
dccdedc001 | ||
|
|
d1930ac708 | ||
|
|
a688370c9d | ||
|
|
2df61d0a8c | ||
|
|
3c60f99def | ||
|
|
e54784bfeb | ||
|
|
308f36a532 | ||
|
|
06e97d107b | ||
|
|
7ac3dd0889 | ||
|
|
ffde22e711 | ||
|
|
e69e8254db | ||
|
|
372779915f | ||
|
|
e00de28556 | ||
|
|
b731ccea18 | ||
|
|
0309462b36 | ||
|
|
b4cd7ca49a | ||
|
|
7b8af30590 | ||
|
|
1d95fe481f | ||
|
|
2faa28ce9b | ||
|
|
0b5285c282 | ||
|
|
ffae847b48 | ||
|
|
6b8c9cac23 | ||
|
|
6d98f6fc9a | ||
|
|
cf366bdb29 | ||
|
|
56f46edab8 | ||
|
|
47f7366a5c | ||
|
|
e9cc4c848b | ||
|
|
d31b7d8968 | ||
|
|
ba3b2712a1 | ||
|
|
94f717a255 | ||
|
|
7849fff41e | ||
|
|
f0e8abb07b | ||
|
|
e04d58f647 | ||
|
|
abfa8e95a2 | ||
|
|
e120c75d17 | ||
|
|
0e088d783d | ||
|
|
6b5ad69c22 | ||
|
|
f7e15f2450 | ||
|
|
8a420d6580 | ||
|
|
88200a1193 | ||
|
|
b47e76a1a5 | ||
|
|
4e1fefc8ea | ||
|
|
da91a07906 | ||
|
|
c1c7d7a3e2 | ||
|
|
36d046f3ca | ||
|
|
d3c3e0b164 | ||
|
|
53401d6962 | ||
|
|
e3666457cf | ||
|
|
50495c6898 | ||
|
|
d520aa2633 | ||
|
|
a3eceb0c6c | ||
|
|
100d1c7bf6 | ||
|
|
e390e6b0af | ||
|
|
96e9a73a1b | ||
|
|
b8e6799d67 | ||
|
|
e7cc0477fb | ||
|
|
17fa7edbbf | ||
|
|
c6b3faca64 | ||
|
|
8ead8061ee | ||
|
|
bf68c87a08 | ||
|
|
d90ec3d192 | ||
|
|
322da07010 | ||
|
|
8019d222a7 | ||
|
|
f9a08e285c | ||
|
|
abecd80f54 | ||
|
|
3226e1ca33 | ||
|
|
752dad6829 | ||
|
|
4f69ee0437 | ||
|
|
e0c2995f78 | ||
|
|
622632616c | ||
|
|
4ac95b44e2 | ||
|
|
9cb4c99e5b | ||
|
|
301a4720b7 | ||
|
|
15c3233376 | ||
|
|
faf0428b60 | ||
|
|
dd3b6d49ee | ||
|
|
7246c9c74b | ||
|
|
5395f13528 | ||
|
|
9083923f27 | ||
|
|
5d6c53c9bd | ||
|
|
b6024adf85 | ||
|
|
5b6d47aeef | ||
|
|
c79ecee094 | ||
|
|
b953f9b66a | ||
|
|
5c0f9f1c40 | ||
|
|
871a9c063e | ||
|
|
2ff3dff6ea | ||
|
|
4bda7bf175 | ||
|
|
7f3e718851 | ||
|
|
5dac15fa85 | ||
|
|
6e8f97667a | ||
|
|
d41dd13918 | ||
|
|
b4d3f78d67 | ||
|
|
9ae2ed39ec | ||
|
|
6c00dc71c1 | ||
|
|
6f03c2bfd1 | ||
|
|
98f86d553a | ||
|
|
10914c8346 | ||
|
|
73644adf69 | ||
|
|
f3e7d7c725 | ||
|
|
2985561012 | ||
|
|
e8841ae9e2 | ||
|
|
b68375d462 | ||
|
|
a247d5b2dc | ||
|
|
f79159a69c | ||
|
|
4ccc82be92 | ||
|
|
9e4c122cf3 | ||
|
|
407c8c3fb9 | ||
|
|
6ab0fc4e14 | ||
|
|
5ed14c1064 | ||
|
|
2c0a1b652c | ||
|
|
010be34660 | ||
|
|
cd0c77dc5c | ||
|
|
444233bd1b | ||
|
|
7de4a3a2df | ||
|
|
2129edaae9 | ||
|
|
209e8eff35 | ||
|
|
32d2cd74d4 | ||
|
|
4cb610d9ae | ||
|
|
697d17ae9b | ||
|
|
fefc756870 | ||
|
|
febf774b4e | ||
|
|
a02db13278 | ||
|
|
f19911660d | ||
|
|
d5252ddf64 | ||
|
|
67a07adb0b | ||
|
|
5ff771b125 | ||
|
|
a34bea1dad | ||
|
|
1e269c94a6 | ||
|
|
60f6a3fed3 | ||
|
|
b9c1d0bd18 | ||
|
|
4ab607d4de | ||
|
|
d115006597 | ||
|
|
10068cee72 | ||
|
|
b5b6e3dd2a | ||
|
|
604ea5d684 | ||
|
|
146feea4a6 | ||
|
|
65482c5e9d | ||
|
|
bfd238e000 | ||
|
|
ecd880c8e7 | ||
|
|
30be0972b9 | ||
|
|
b21932687a | ||
|
|
e128285a77 | ||
|
|
7d17d77254 | ||
|
|
d6eb053826 | ||
|
|
cc95edf4dc | ||
|
|
a8a11e56db | ||
|
|
45087599ef | ||
|
|
9b34c77f98 | ||
|
|
33ba5b63f3 | ||
|
|
d209100a60 | ||
|
|
78b3d71745 | ||
|
|
e93466c897 | ||
|
|
72fa6a79a2 | ||
|
|
e7a7bd6695 | ||
|
|
80db43442e | ||
|
|
f2929f3b19 | ||
|
|
afbda7baa8 | ||
|
|
872e5537bb | ||
|
|
a20892e5e6 | ||
|
|
67ccfa1da1 | ||
|
|
bdeea80032 | ||
|
|
455ecf700c | ||
|
|
82f90085ee | ||
|
|
66ab102111 | ||
|
|
2adc25eec7 | ||
|
|
ee07bf29ce | ||
|
|
e43b1084c3 | ||
|
|
dd91121017 | ||
|
|
f157461ee1 | ||
|
|
35bfbc39f8 | ||
|
|
7ddfd2f030 | ||
|
|
f72196f1bb | ||
|
|
63f8cc3503 | ||
|
|
62153b1ff0 | ||
|
|
655ed93ee8 | ||
|
|
ae9f35668e | ||
|
|
d30e47a50e | ||
|
|
6b3f5c977a | ||
|
|
0abf2abc81 | ||
|
|
b0f9d73f6a | ||
|
|
30434325a2 | ||
|
|
01a02a8985 | ||
|
|
01ea968d4f | ||
|
|
2e840bc89c | ||
|
|
0c7c34e609 | ||
|
|
8de1e92b99 | ||
|
|
47c058a337 | ||
|
|
191f409741 | ||
|
|
b24053f4df | ||
|
|
7f85c0ce2a | ||
|
|
af8c5bb31a | ||
|
|
a2c05123fd | ||
|
|
5b0eeb75ef | ||
|
|
d82c2c8cc6 | ||
|
|
c1108da6e9 | ||
|
|
017430e803 | ||
|
|
b7006034b1 | ||
|
|
6e5a563c06 | ||
|
|
c4025622de | ||
|
|
a6891e9f9f | ||
|
|
bfbeda4ede | ||
|
|
3c7ba5045d | ||
|
|
46126b5fb6 | ||
|
|
141c2fd34a | ||
|
|
2ca4adb62c | ||
|
|
81b57ceddb | ||
|
|
fe6bdc870c | ||
|
|
8826fdb03a | ||
|
|
e8095826bf | ||
|
|
9374e31072 | ||
|
|
1a3f3ffdbf | ||
|
|
45043c3f52 | ||
|
|
f2d12aebef | ||
|
|
3fdb912db6 | ||
|
|
5086dc3024 | ||
|
|
204a63a00e | ||
|
|
06862c7dee | ||
|
|
003eef982b | ||
|
|
bb043ba2fd | ||
|
|
94efa3ac19 | ||
|
|
9c5ef1f776 | ||
|
|
32fafd7a24 | ||
|
|
88cc4cd17a | ||
|
|
48f28fe29c | ||
|
|
2b02651bbf | ||
|
|
c4989ddc44 | ||
|
|
eecd823d32 | ||
|
|
fbb9affadb | ||
|
|
9f0bfa01ca | ||
|
|
2bd5c3dba7 | ||
|
|
f212ae6322 | ||
|
|
b5f1524e78 | ||
|
|
ad30a6899b | ||
|
|
e234b7fdbf | ||
|
|
e84f308264 | ||
|
|
1f3f91136c | ||
|
|
b1935340c8 | ||
|
|
671ed1b9db | ||
|
|
af1fbb39ca | ||
|
|
53c3b772dd | ||
|
|
73128039a6 | ||
|
|
16f39487ba | ||
|
|
ab6153853b | ||
|
|
7a810def28 | ||
|
|
9902714fb4 | ||
|
|
0209c6e0ff | ||
|
|
d53028d0a8 | ||
|
|
f245d66c8a | ||
|
|
98e0618750 | ||
|
|
2d9a15324d | ||
|
|
c345bc3d85 | ||
|
|
8238fb37f3 | ||
|
|
70ef33f6fe | ||
|
|
18fbf3c174 | ||
|
|
1be3e5ec5e | ||
|
|
7c05a906d0 | ||
|
|
b13cd0d484 | ||
|
|
c466def1e8 | ||
|
|
5eb8cea6e7 | ||
|
|
7d348e2260 | ||
|
|
dcfb8c1a10 | ||
|
|
417a037ac5 | ||
|
|
689993a592 | ||
|
|
4938dfd971 | ||
|
|
eb895fbb31 | ||
|
|
2e9df2c39d | ||
|
|
ee1dfb5605 | ||
|
|
55bef6ec2f | ||
|
|
9e30d85fdc | ||
|
|
aa09cb266d | ||
|
|
fb68b0924a | ||
|
|
985b0fbf7f | ||
|
|
58fec9181f | ||
|
|
0b46cce95f | ||
|
|
45f2b82411 | ||
|
|
8b3ec61151 | ||
|
|
c522f564a9 | ||
|
|
78fab4e471 | ||
|
|
2d6a1160d5 | ||
|
|
5180919e52 | ||
|
|
5fb32229f9 | ||
|
|
079b24175c | ||
|
|
b31b067743 | ||
|
|
e33f9c1d11 | ||
|
|
0fa9c93f6f | ||
|
|
abe7518424 | ||
|
|
17e6d2d51c | ||
|
|
6ae93cf14e | ||
|
|
c72c020182 | ||
|
|
c369bba03b | ||
|
|
3605593d37 | ||
|
|
e3a8780da7 | ||
|
|
2d0d7150cc | ||
|
|
ae3dfe9327 | ||
|
|
db0d20e17b | ||
|
|
0ec917e980 | ||
|
|
99f4bc76d7 | ||
|
|
5fb9652af5 | ||
|
|
023e5b4b90 | ||
|
|
797e7d98d3 | ||
|
|
96ff69e7ef | ||
|
|
e7f82cf62e | ||
|
|
c94c799347 | ||
|
|
b9ad093d06 | ||
|
|
a35db9f454 | ||
|
|
9a9c4f6ad6 | ||
|
|
80b9e931f5 | ||
|
|
02d462e263 | ||
|
|
10efc7e2d3 | ||
|
|
b640df8fbb | ||
|
|
6345b54034 | ||
|
|
7208b9b525 | ||
|
|
ad1835e87c | ||
|
|
4c32177ef3 | ||
|
|
253bc333c7 | ||
|
|
9ad0ff960e | ||
|
|
7664ff5acd | ||
|
|
7d35f994d2 | ||
|
|
2cc1c92901 | ||
|
|
5080cc3c9e | ||
|
|
e5042d0d38 | ||
|
|
81de7e1075 | ||
|
|
d9101b9559 | ||
|
|
49a55886d3 | ||
|
|
298ea1d748 | ||
|
|
d157930f2c | ||
|
|
88145752ff | ||
|
|
319a01b34f | ||
|
|
d51958dbf5 | ||
|
|
801fcf3271 | ||
|
|
df144a6fcb | ||
|
|
410ded5d45 | ||
|
|
d994d7a10c | ||
|
|
649552e0f0 | ||
|
|
69561da15f | ||
|
|
04b641ff5f | ||
|
|
fefeb50cae | ||
|
|
d93a5d3da1 | ||
|
|
360f1fa34f | ||
|
|
c7af702270 | ||
|
|
fec4ccbbb8 | ||
|
|
30f2057bdf | ||
|
|
bb893daf7d | ||
|
|
4fc16a9eec | ||
|
|
895350e77c | ||
|
|
b5cbe34a2a | ||
|
|
ea14290f52 | ||
|
|
c647963911 | ||
|
|
99075ad22a | ||
|
|
a4fa6a3ac7 | ||
|
|
3308c40440 | ||
|
|
69d45f8ce4 | ||
|
|
5cc40d4525 | ||
|
|
18582d7b65 | ||
|
|
b975efa2bb | ||
|
|
4fea18d945 | ||
|
|
0f37261dbf | ||
|
|
3fb8a85f83 | ||
|
|
664dcc063a | ||
|
|
debbfc117a | ||
|
|
9c3bd83d3b | ||
|
|
e0b15116a5 | ||
|
|
451ef7f09b | ||
|
|
2d52c7b42f | ||
|
|
d4175ca6c6 | ||
|
|
0ad0c7882f | ||
|
|
9c1030878e | ||
|
|
f1dcdab75d |
@@ -9,7 +9,7 @@ trim_trailing_whitespace=true
|
||||
max_line_length=120
|
||||
insert_final_newline=true
|
||||
|
||||
[.travis.yml]
|
||||
[*.{yml,sh}]
|
||||
indent_style=space
|
||||
indent_size=2
|
||||
tab_width=8
|
||||
|
||||
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
_Before filing a new issue, please **provide the following information**._
|
||||
|
||||
> I'm running:
|
||||
>
|
||||
> - **Parity version**: 0.0.0
|
||||
> - **Operating system**: Windows / MacOS / Linux
|
||||
> - **And installed**: via installer / homebrew / binaries / from source
|
||||
|
||||
_Your issue description goes here below. Try to include **actual** vs. **expected behavior** and **steps to reproduce** the issue._
|
||||
|
||||
---
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -15,6 +15,7 @@
|
||||
|
||||
# vim stuff
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# mac stuff
|
||||
.DS_Store
|
||||
|
||||
551
.gitlab-ci.yml
551
.gitlab-ci.yml
@@ -4,14 +4,15 @@ stages:
|
||||
- push-release
|
||||
- build
|
||||
variables:
|
||||
GIT_DEPTH: "3"
|
||||
SIMPLECOV: "true"
|
||||
RUST_BACKTRACE: "1"
|
||||
RUSTFLAGS: ""
|
||||
CARGOFLAGS: ""
|
||||
CI_SERVER_NAME: "GitLab CI"
|
||||
LIBSSL: "libssl1.0.0 (>=1.0.0)"
|
||||
cache:
|
||||
key: "$CI_BUILD_STAGE/$CI_BUILD_REF_NAME"
|
||||
key: "$CI_BUILD_STAGE-$CI_BUILD_REF_NAME"
|
||||
paths:
|
||||
- target
|
||||
untracked: true
|
||||
linux-stable:
|
||||
stage: build
|
||||
@@ -23,77 +24,14 @@ linux-stable:
|
||||
- triggers
|
||||
script:
|
||||
- rustup default stable
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh amd64
|
||||
- cp target/release/parity deb/usr/bin/parity
|
||||
- cp target/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||
- md5sum "parity_"$VER"_amd64.deb" > "parity_"$VER"_amd64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
# ARGUMENTS: 1. BUILD_PLATFORM (target for binaries) 2. PLATFORM (target for cargo) 3. ARC (architecture) 4. & 5. CC & CXX flags 6. binary identifier
|
||||
- scripts/gitlab-build.sh x86_64-unknown-linux-gnu x86_64-unknown-linux-gnu amd64 gcc g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- target/release/parity-evm
|
||||
- target/release/ethstore
|
||||
- target/release/ethkey
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-linux-gnu_parity"
|
||||
linux-snap:
|
||||
stage: build
|
||||
image: parity/snapcraft:gitlab-ci
|
||||
only:
|
||||
- snap
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
script:
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- cd snap
|
||||
- rm -rf *snap
|
||||
- sed -i 's/master/'"$VER"'/g' snapcraft.yaml
|
||||
- echo "Version:"$VER
|
||||
- snapcraft
|
||||
- ls
|
||||
- cp "parity_"$CI_BUILD"_REF_NAME_amd64.snap" "parity_"$VER"_amd64.snap"
|
||||
- md5sum "parity_"$VER"_amd64.snap" > "parity_"$VER"_amd64.snap.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.snap" --body "parity_"$VER"_amd64.snap"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.snap.md5" --body "parity_"$VER"_amd64.snap.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- scripts/parity_*_amd64.snap
|
||||
name: "stable-x86_64-unknown-snap-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-stable-debian:
|
||||
stage: build
|
||||
image: parity/rust-debian:gitlab-ci
|
||||
@@ -103,81 +41,14 @@ linux-stable-debian:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh amd64
|
||||
- cp target/release/parity deb/usr/bin/parity
|
||||
- cp target/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||
- md5sum "parity_"$VER"_amd64.deb" > "parity_"$VER"_amd64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/x86_64-unknown-debian-gnu
|
||||
- export LIBSSL="libssl1.1 (>=1.1.0)"
|
||||
- scripts/gitlab-build.sh x86_64-unknown-debian-gnu x86_64-unknown-linux-gnu amd64 gcc g++ debian
|
||||
tags:
|
||||
- rust
|
||||
- rust-debian
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-debian-gnu_parity"
|
||||
linux-beta:
|
||||
stage: build
|
||||
image: parity/rust:gitlab-ci
|
||||
only:
|
||||
- beta
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- rustup default beta
|
||||
- cargo build -j $(nproc) --release $CARGOFLAGS
|
||||
- strip target/release/parity
|
||||
tags:
|
||||
- rust
|
||||
- rust-beta
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
name: "beta-x86_64-unknown-linux-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-nightly:
|
||||
stage: build
|
||||
image: parity/rust:gitlab-ci
|
||||
only:
|
||||
- beta
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- rustup default nightly
|
||||
- cargo build -j $(nproc) --release $CARGOFLAGS
|
||||
- strip target/release/parity
|
||||
tags:
|
||||
- rust
|
||||
- rust-nightly
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
name: "nigthly-x86_64-unknown-linux-gnu_parity"
|
||||
allow_failure: true
|
||||
linux-centos:
|
||||
stage: build
|
||||
image: parity/rust-centos:gitlab-ci
|
||||
@@ -187,42 +58,12 @@ linux-centos:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CXX="g++"
|
||||
- export CC="gcc"
|
||||
- export PLATFORM=x86_64-unknown-centos-gnu
|
||||
- cargo build -j $(nproc) --release --features final $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --release -p evmbin
|
||||
- cargo build -j $(nproc) --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --release -p ethkey-cli
|
||||
- strip target/release/parity
|
||||
- strip target/release/parity-evm
|
||||
- strip target/release/ethstore
|
||||
- strip target/release/ethkey
|
||||
- md5sum target/release/parity > parity.md5
|
||||
- md5sum target/release/parity-evm > parity-evm.md5
|
||||
- md5sum target/release/ethstore > ethstore.md5
|
||||
- md5sum target/release/ethkey > ethkey.md5
|
||||
- export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity-evm --body target/release/parity-evm
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity-evm.md5 --body parity-evm.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethstore --body target/release/ethstore
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethstore.md5 --body ethstore.md5
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethkey --body target/release/ethkey
|
||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/ethkey.md5 --body ethkey.md5
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh x86_64-unknown-centos-gnu x86_64-unknown-linux-gnu x86_64 gcc g++ centos
|
||||
tags:
|
||||
- rust
|
||||
- rust-centos
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "x86_64-unknown-centos-gnu_parity"
|
||||
linux-i686:
|
||||
stage: build
|
||||
@@ -233,47 +74,13 @@ linux-i686:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export COMMIT=$(git rev-parse HEAD)
|
||||
- export PLATFORM=i686-unknown-linux-gnu
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- strip target/$PLATFORM/release/parity
|
||||
- strip target/$PLATFORM/release/parity-evm
|
||||
- strip target/$PLATFORM/release/ethstore
|
||||
- strip target/$PLATFORM/release/ethkey
|
||||
- strip target/$PLATFORM/release/parity
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- export SHA3=$(target/$PLATFORM/release/parity tools hash target/$PLATFORM/release/parity)
|
||||
- sh scripts/deb-build.sh i386
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_i386.deb"
|
||||
- md5sum "parity_"$VER"_i386.deb" > "parity_"$VER"_i386.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh i686-unknown-linux-gnu i686-unknown-linux-gnu i386 gcc g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-i686
|
||||
artifacts:
|
||||
paths:
|
||||
- target/i686-unknown-linux-gnu/release/parity
|
||||
- parity.zip
|
||||
name: "i686-unknown-linux-gnu"
|
||||
allow_failure: true
|
||||
linux-armv7:
|
||||
stage: build
|
||||
image: parity/rust-armv7:gitlab-ci
|
||||
@@ -283,55 +90,13 @@ linux-armv7:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=arm-linux-gnueabihf-gcc
|
||||
- export CXX=arm-linux-gnueabihf-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=armv7-unknown-linux-gnueabihf
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- export SHA3=$(target/$PLATFORM/release/parity tools hash target/$PLATFORM/release/parity)
|
||||
- sh scripts/deb-build.sh i386
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity-evm
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethstore
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh armhf
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||
- md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh armv7-unknown-linux-gnueabihf armv7-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/armv7-unknown-linux-gnueabihf/release/parity
|
||||
- parity.zip
|
||||
name: "armv7_unknown_linux_gnueabihf_parity"
|
||||
allow_failure: true
|
||||
linux-arm:
|
||||
stage: build
|
||||
image: parity/rust-arm:gitlab-ci
|
||||
@@ -341,52 +106,13 @@ linux-arm:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=arm-linux-gnueabihf-gcc
|
||||
- export CXX=arm-linux-gnueabihf-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=arm-unknown-linux-gnueabihf
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/parity-evm
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethstore
|
||||
- arm-linux-gnueabihf-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh armhf
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||
- md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh arm-unknown-linux-gnueabihf arm-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/arm-unknown-linux-gnueabihf/release/parity
|
||||
- parity.zip
|
||||
name: "arm-unknown-linux-gnueabihf_parity"
|
||||
allow_failure: true
|
||||
linux-aarch64:
|
||||
stage: build
|
||||
image: parity/rust-arm64:gitlab-ci
|
||||
@@ -396,50 +122,29 @@ linux-aarch64:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- export CC=aarch64-linux-gnu-gcc
|
||||
- export CXX=aarch64-linux-gnu-g++
|
||||
- export HOST_CC=gcc
|
||||
- export HOST_CXX=g++
|
||||
- export PLATFORM=aarch64-unknown-linux-gnu
|
||||
- rm -rf .cargo
|
||||
- mkdir -p .cargo
|
||||
- echo "[target.$PLATFORM]" >> .cargo/config
|
||||
- echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config
|
||||
- cat .cargo/config
|
||||
- cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p evmbin
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethstore-cli
|
||||
- cargo build -j $(nproc) --target $PLATFORM --release -p ethkey-cli
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/parity
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/parity-evm
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/ethstore
|
||||
- aarch64-linux-gnu-strip target/$PLATFORM/release/ethkey
|
||||
- export SHA3=$(rhash --sha3-256 target/$PLATFORM/release/parity -p %h)
|
||||
- md5sum target/$PLATFORM/release/parity > parity.md5
|
||||
- sh scripts/deb-build.sh arm64
|
||||
- cp target/$PLATFORM/release/parity deb/usr/bin/parity
|
||||
- cp target/$PLATFORM/release/parity-evm deb/usr/bin/parity-evm
|
||||
- cp target/$PLATFORM/release/ethstore deb/usr/bin/ethstore
|
||||
- cp target/$PLATFORM/release/ethkey deb/usr/bin/ethkey
|
||||
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
|
||||
- md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5"
|
||||
- aws configure set aws_access_key_id $s3_key
|
||||
- aws configure set aws_secret_access_key $s3_secret
|
||||
- if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
- aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb"
|
||||
- aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5"
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
- scripts/gitlab-build.sh aarch64-unknown-linux-gnu aarch64-unknown-linux-gnu arm64 aarch64-linux-gnu-gcc aarch64-linux-gnu-g++ ubuntu
|
||||
tags:
|
||||
- rust
|
||||
- rust-arm
|
||||
artifacts:
|
||||
paths:
|
||||
- target/aarch64-unknown-linux-gnu/release/parity
|
||||
- parity.zip
|
||||
name: "aarch64-unknown-linux-gnu_parity"
|
||||
linux-snap:
|
||||
stage: build
|
||||
image: snapcore/snapcraft:stable
|
||||
only:
|
||||
- stable
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
script:
|
||||
- scripts/gitlab-build.sh x86_64-unknown-snap-gnu x86_64-unknown-linux-gnu amd64 gcc g++ snap
|
||||
tags:
|
||||
- rust-stable
|
||||
artifacts:
|
||||
paths:
|
||||
- parity.zip
|
||||
name: "stable-x86_64-unknown-snap-gnu_parity"
|
||||
allow_failure: true
|
||||
darwin:
|
||||
stage: build
|
||||
@@ -448,45 +153,17 @@ darwin:
|
||||
- tags
|
||||
- stable
|
||||
- triggers
|
||||
script: |
|
||||
export COMMIT=$(git rev-parse HEAD)
|
||||
export PLATFORM=x86_64-apple-darwin
|
||||
rustup default stable
|
||||
cargo clean
|
||||
cargo build -j 8 --features final --release #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p ethstore-cli #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p ethkey-cli #$CARGOFLAGS
|
||||
cargo build -j 8 --release -p evmbin #$CARGOFLAGS
|
||||
rm -rf parity.md5
|
||||
md5sum target/release/parity > parity.md5
|
||||
export SHA3=$(target/release/parity tools hash target/release/parity)
|
||||
cd mac
|
||||
xcodebuild -configuration Release
|
||||
cd ..
|
||||
packagesbuild -v mac/Parity.pkgproj
|
||||
productsign --sign 'Developer ID Installer: PARITY TECHNOLOGIES LIMITED (P2PX3JU8FT)' target/release/Parity\ Ethereum.pkg target/release/Parity\ Ethereum-signed.pkg
|
||||
export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||
mv target/release/Parity\ Ethereum-signed.pkg "parity-"$VER"-macos-installer.pkg"
|
||||
md5sum "parity-"$VER"-macos-installer.pkg" >> "parity-"$VER"-macos-installer.pkg.md5"
|
||||
aws configure set aws_access_key_id $s3_key
|
||||
aws configure set aws_secret_access_key $s3_secret
|
||||
if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable|nightly)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi
|
||||
aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/release/parity
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-macos-installer.pkg" --body "parity-"$VER"-macos-installer.pkg"
|
||||
aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity-"$VER"-macos-installer.pkg.md5" --body "parity-"$VER"-macos-installer.pkg.md5"
|
||||
curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://update.parity.io:1338/push-build/$CI_BUILD_REF_NAME/$PLATFORM
|
||||
script:
|
||||
- scripts/gitlab-build.sh x86_64-apple-darwin x86_64-apple-darwin macos gcc g++ macos
|
||||
tags:
|
||||
- osx
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity
|
||||
- parity.zip
|
||||
name: "x86_64-apple-darwin_parity"
|
||||
windows:
|
||||
cache:
|
||||
key: "%CI_BUILD_STAGE%/%CI_BUILD_REF_NAME%"
|
||||
key: "%CI_BUILD_STAGE%-%CI_BUILD_REF_NAME%"
|
||||
untracked: true
|
||||
stage: build
|
||||
only:
|
||||
@@ -495,62 +172,12 @@ windows:
|
||||
- stable
|
||||
- triggers
|
||||
script:
|
||||
- set PLATFORM=x86_64-pc-windows-msvc
|
||||
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
|
||||
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
|
||||
- set RUST_BACKTRACE=1
|
||||
- set RUSTFLAGS=%RUSTFLAGS%
|
||||
- rustup default stable-x86_64-pc-windows-msvc
|
||||
- cargo clean
|
||||
- cargo build --features final --release #%CARGOFLAGS%
|
||||
- cargo build --release -p ethstore-cli #%CARGOFLAGS%
|
||||
- cargo build --release -p ethkey-cli #%CARGOFLAGS%
|
||||
- cargo build --release -p evmbin #%CARGOFLAGS%
|
||||
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
|
||||
- target\release\parity.exe tools hash target\release\parity.exe > parity.sha3
|
||||
- set /P SHA3=<parity.sha3
|
||||
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
||||
- curl -sL --url "https://github.com/paritytech/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
||||
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
|
||||
- signtool sign /f %keyfile% /p %certpass% windows\ptray\x64\release\ptray.exe
|
||||
- cd nsis
|
||||
- makensis.exe installer.nsi
|
||||
- copy installer.exe InstallParity.exe
|
||||
- signtool sign /f %keyfile% /p %certpass% InstallParity.exe
|
||||
- md5sums InstallParity.exe > InstallParity.exe.md5
|
||||
- zip win-installer.zip InstallParity.exe InstallParity.exe.md5
|
||||
- md5sums win-installer.zip > win-installer.zip.md5
|
||||
- cd ..\target\release\
|
||||
- md5sums parity.exe > parity.exe.md5
|
||||
- zip parity.zip parity.exe parity.md5
|
||||
- md5sums parity.zip > parity.zip.md5
|
||||
- cd ..\..
|
||||
- aws configure set aws_access_key_id %s3_key%
|
||||
- aws configure set aws_secret_access_key %s3_secret%
|
||||
- echo %CI_BUILD_REF_NAME%
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "master" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "beta" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "stable" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %CI_BUILD_REF_NAME% | findstr /R "nightly" >nul 2>&1 && set S3_BUCKET=builds-parity-published|| set S3_BUCKET=builds-parity
|
||||
- echo %S3_BUCKET%
|
||||
- aws s3 rm --recursive s3://%S3_BUCKET%/%CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe --body target\release\parity.exe
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe.md5 --body target\release\parity.exe.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip --body target\release\parity.zip
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip.md5 --body target\release\parity.zip.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe --body nsis\InstallParity.exe
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip
|
||||
- aws s3api put-object --bucket %S3_BUCKET% --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5
|
||||
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1337/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
|
||||
- curl --data "commit=%CI_BUILD_REF%&sha3=%SHA3%&filename=parity.exe&secret=%RELEASES_SECRET%" http://update.parity.io:1338/push-build/%CI_BUILD_REF_NAME%/%PLATFORM%
|
||||
- sh scripts/gitlab-build.sh x86_64-pc-windows-msvc x86_64-pc-windows-msvc installer "" "" windows
|
||||
tags:
|
||||
- rust-windows
|
||||
artifacts:
|
||||
paths:
|
||||
- target/release/parity.exe
|
||||
- target/release/parity.pdb
|
||||
- nsis/InstallParity.exe
|
||||
- parity.zip
|
||||
name: "x86_64-pc-windows-msvc_parity"
|
||||
docker-build:
|
||||
stage: build
|
||||
@@ -561,11 +188,9 @@ docker-build:
|
||||
- docker info
|
||||
script:
|
||||
- if [ "$CI_BUILD_REF_NAME" == "beta-release" ]; then DOCKER_TAG="latest"; else DOCKER_TAG=$CI_BUILD_REF_NAME; fi
|
||||
- docker login -u $Docker_Hub_User -p $Docker_Hub_Pass
|
||||
- sh scripts/docker-build.sh $DOCKER_TAG ethcore
|
||||
- docker logout
|
||||
- echo "Tag:" $DOCKER_TAG
|
||||
- docker login -u $Docker_Hub_User_Parity -p $Docker_Hub_Pass_Parity
|
||||
- sh scripts/docker-build.sh $DOCKER_TAG parity
|
||||
- scripts/docker-build.sh $DOCKER_TAG
|
||||
- docker logout
|
||||
tags:
|
||||
- docker
|
||||
@@ -574,64 +199,16 @@ test-coverage:
|
||||
only:
|
||||
- master
|
||||
script:
|
||||
- git submodule update --init --recursive
|
||||
- rm -rf target/*
|
||||
- rm -rf js/.coverage
|
||||
- scripts/cov.sh
|
||||
# - COVERAGE=$(grep -Po 'covered":.*?[^\\]"' target/cov/index.json | grep "[0-9]*\.[0-9]" -o)
|
||||
# - echo "Coverage:" $COVERAGE
|
||||
- scripts/gitlab-test.sh test-coverage
|
||||
tags:
|
||||
- kcov
|
||||
allow_failure: true
|
||||
test-darwin:
|
||||
stage: test
|
||||
only:
|
||||
- triggers
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
- export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v -e "^js/" -e ^\\. -e ^LICENSE -e ^README.md -e ^appveyor.yml -e ^test.sh -e ^windows/ -e ^scripts/ -e^mac/ -e ^nsis/ | wc -l)
|
||||
script:
|
||||
- export RUST_BACKTRACE=1
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
tags:
|
||||
- osx
|
||||
allow_failure: true
|
||||
test-windows:
|
||||
stage: test
|
||||
only:
|
||||
- triggers
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
script:
|
||||
- set RUST_BACKTRACE=1
|
||||
- echo cargo test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p parity-dapps -p parity-rpc -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity-rpc-client -p parity %CARGOFLAGS% --verbose --release
|
||||
tags:
|
||||
- rust-windows
|
||||
allow_failure: true
|
||||
test-rust-stable:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
- export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^appveyor.yml -e ^test.sh -e ^windows/ -e ^scripts/ -e^mac/ -e ^nsis/ | wc -l)
|
||||
script:
|
||||
- rustup show
|
||||
- export RUST_BACKTRACE=1
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- scripts/gitlab-test.sh stable
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
js-test:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep ^js/ | wc -l)
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
|
||||
script:
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS lint since no JS files modified."; else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh; fi
|
||||
tags:
|
||||
- rust
|
||||
- rust-stable
|
||||
test-rust-beta:
|
||||
stage: test
|
||||
@@ -639,15 +216,9 @@ test-rust-beta:
|
||||
- triggers
|
||||
- master
|
||||
image: parity/rust:gitlab-ci
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
- export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^appveyor.yml -e ^test.sh -e ^windows/ -e ^scripts/ -e^mac/ -e ^nsis/ | wc -l)
|
||||
script:
|
||||
- rustup default beta
|
||||
- export RUST_BACKTRACE=1
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- scripts/gitlab-test.sh beta
|
||||
tags:
|
||||
- rust
|
||||
- rust-beta
|
||||
allow_failure: true
|
||||
test-rust-nightly:
|
||||
@@ -656,34 +227,30 @@ test-rust-nightly:
|
||||
- triggers
|
||||
- master
|
||||
image: parity/rust:gitlab-ci
|
||||
before_script:
|
||||
- git submodule update --init --recursive
|
||||
- export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^appveyor.yml -e ^test.sh -e ^windows/ -e ^scripts/ -e^mac/ -e ^nsis/ | wc -l)
|
||||
script:
|
||||
- rustup default nightly
|
||||
- export RUST_BACKTRACE=1
|
||||
- if [ $RUST_FILES_MODIFIED -eq 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
|
||||
- scripts/gitlab-test.sh nightly
|
||||
tags:
|
||||
- rust
|
||||
- rust-nightly
|
||||
allow_failure: true
|
||||
js-test:
|
||||
stage: test
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- scripts/gitlab-test.sh js-test
|
||||
tags:
|
||||
- rust-stable
|
||||
js-release:
|
||||
stage: js-build
|
||||
only:
|
||||
- master
|
||||
- beta
|
||||
- stable
|
||||
- beta
|
||||
- tags
|
||||
- triggers
|
||||
image: parity/rust:gitlab-ci
|
||||
before_script:
|
||||
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep ^js/ | wc -l)
|
||||
- echo $JS_FILES_MODIFIED
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
|
||||
script:
|
||||
- rustup default stable
|
||||
- echo $JS_FILES_MODIFIED
|
||||
- if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS rebuild since no JS files modified."; else ./js/scripts/build.sh && ./js/scripts/release.sh; fi
|
||||
- scripts/gitlab-test.sh js-release
|
||||
tags:
|
||||
- javascript
|
||||
push-release:
|
||||
@@ -693,8 +260,6 @@ push-release:
|
||||
- triggers
|
||||
image: parity/rust:gitlab-ci
|
||||
script:
|
||||
- rustup default stable
|
||||
- curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1337/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
|
||||
- curl --data "secret=$RELEASES_SECRET" http://update.parity.io:1338/push-release/$CI_BUILD_REF_NAME/$CI_BUILD_REF
|
||||
- scripts/gitlab-push-release.sh
|
||||
tags:
|
||||
- curl
|
||||
|
||||
4560
CHANGELOG.md
4560
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
2959
Cargo.lock
generated
2959
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
23
Cargo.toml
23
Cargo.toml
@@ -1,7 +1,8 @@
|
||||
[package]
|
||||
description = "Parity Ethereum client"
|
||||
name = "parity"
|
||||
version = "1.7.0"
|
||||
# NOTE Make sure to update util/version/Cargo.toml as well
|
||||
version = "1.8.10"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
@@ -11,12 +12,16 @@ log = "0.3"
|
||||
env_logger = "0.4"
|
||||
rustc-hex = "1.0"
|
||||
docopt = "0.8"
|
||||
clap = "2"
|
||||
term_size = "0.3"
|
||||
textwrap = "0.9"
|
||||
time = "0.1"
|
||||
num_cpus = "1.2"
|
||||
number_prefix = "0.2"
|
||||
rpassword = "0.2.1"
|
||||
semver = "0.6"
|
||||
ansi_term = "0.9"
|
||||
parking_lot = "0.4"
|
||||
regex = "0.2"
|
||||
isatty = "0.1"
|
||||
toml = "0.4"
|
||||
@@ -29,10 +34,12 @@ futures-cpupool = "0.1"
|
||||
fdlimit = "0.1"
|
||||
ws2_32-sys = "0.2"
|
||||
ctrlc = { git = "https://github.com/paritytech/rust-ctrlc.git" }
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.8" }
|
||||
ethsync = { path = "sync" }
|
||||
ethcore = { path = "ethcore" }
|
||||
ethcore-util = { path = "util" }
|
||||
ethcore-bytes = { path = "util/bytes" }
|
||||
ethcore-bigint = { path = "util/bigint" }
|
||||
ethcore-io = { path = "util/io" }
|
||||
ethcore-devtools = { path = "devtools" }
|
||||
ethcore-ipc = { path = "ipc/rpc" }
|
||||
@@ -41,7 +48,10 @@ ethcore-ipc-hypervisor = { path = "ipc/hypervisor" }
|
||||
ethcore-light = { path = "ethcore/light" }
|
||||
ethcore-logger = { path = "logger" }
|
||||
ethcore-stratum = { path = "stratum" }
|
||||
ethcore-network = { path = "util/network" }
|
||||
node-filter = { path = "ethcore/node_filter" }
|
||||
ethkey = { path = "ethkey" }
|
||||
node-health = { path = "dapps/node-health" }
|
||||
rlp = { path = "util/rlp" }
|
||||
rpc-cli = { path = "rpc_cli" }
|
||||
parity-hash-fetch = { path = "hash-fetch" }
|
||||
@@ -51,7 +61,13 @@ parity-reactor = { path = "util/reactor" }
|
||||
parity-rpc = { path = "rpc" }
|
||||
parity-rpc-client = { path = "rpc_client" }
|
||||
parity-updater = { path = "updater" }
|
||||
parity-version = { path = "util/version" }
|
||||
parity-whisper = { path = "whisper" }
|
||||
path = { path = "util/path" }
|
||||
panic_hook = { path = "panic_hook" }
|
||||
hash = { path = "util/hash" }
|
||||
migration = { path = "util/migration" }
|
||||
kvdb-rocksdb = { path = "util/kvdb-rocksdb" }
|
||||
|
||||
parity-dapps = { path = "dapps", optional = true }
|
||||
clippy = { version = "0.0.103", optional = true}
|
||||
@@ -63,6 +79,7 @@ rustc_version = "0.2"
|
||||
[dev-dependencies]
|
||||
ethcore-ipc-tests = { path = "ipc/tests" }
|
||||
pretty_assertions = "0.1"
|
||||
ipnetwork = "0.12.6"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = "0.2"
|
||||
@@ -106,4 +123,4 @@ lto = false
|
||||
panic = "abort"
|
||||
|
||||
[workspace]
|
||||
members = ["ethstore/cli", "ethkey/cli", "evmbin"]
|
||||
members = ["ethstore/cli", "ethkey/cli", "evmbin", "whisper", "chainspec", "dapps/js-glue"]
|
||||
|
||||
52
README.md
52
README.md
@@ -1,59 +1,47 @@
|
||||
# [Parity](https://parity.io/parity.html)
|
||||
### Fast, light, and robust Ethereum implementation
|
||||
# [Parity](https://parity.io/) - fast, light, and robust Ethereum client
|
||||
|
||||
### [Download latest release](https://github.com/paritytech/parity/releases)
|
||||
[](https://gitlab.parity.io/parity/parity/commits/master)
|
||||
[](https://build.snapcraft.io/user/paritytech/parity)
|
||||
[](https://www.gnu.org/licenses/gpl-3.0.en.html)
|
||||
|
||||
[](https://gitlab.parity.io/parity/parity/commits/master) [![Coverage Status][coveralls-image]][coveralls-url] [![GPLv3][license-image]][license-url]
|
||||
- [Download the latest release here.](https://github.com/paritytech/parity/releases)
|
||||
|
||||
### Join the chat!
|
||||
|
||||
Parity [![Join the chat at https://gitter.im/ethcore/parity][gitter-image]][gitter-url] and
|
||||
parity.js [](https://gitter.im/ethcore/parity.js?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
[Internal Documentation][doc-url]
|
||||
|
||||
|
||||
Be sure to check out [our wiki][wiki-url] for more information.
|
||||
|
||||
[coveralls-image]: https://coveralls.io/repos/github/paritytech/parity/badge.svg?branch=master
|
||||
[coveralls-url]: https://coveralls.io/github/paritytech/parity?branch=master
|
||||
[gitter-image]: https://badges.gitter.im/Join%20Chat.svg
|
||||
[gitter-url]: https://gitter.im/ethcore/parity?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||
[license-image]: https://img.shields.io/badge/license-GPL%20v3-green.svg
|
||||
[license-url]: https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
[doc-url]: https://paritytech.github.io/parity/ethcore/index.html
|
||||
[wiki-url]: https://github.com/paritytech/parity/wiki
|
||||
Get in touch with us on Gitter:
|
||||
[](https://gitter.im/paritytech/parity)
|
||||
[](https://gitter.im/paritytech/parity.js)
|
||||
[](https://gitter.im/paritytech/parity/miners)
|
||||
[](https://gitter.im/paritytech/parity-poa)
|
||||
|
||||
Be sure to check out [our wiki](https://github.com/paritytech/parity/wiki) and the [internal documentation](https://paritytech.github.io/parity/ethcore/index.html) for more information.
|
||||
|
||||
----
|
||||
|
||||
|
||||
## About Parity
|
||||
|
||||
Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and
|
||||
cutting-edge Rust programming language. Parity is licensed under the GPLv3, and can be used for all your Ethereum needs.
|
||||
Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and cutting-edge Rust programming language. Parity is licensed under the GPLv3, and can be used for all your Ethereum needs.
|
||||
|
||||
Parity comes with a built-in wallet. To access [Parity Wallet](http://web3.site/) simply go to http://web3.site/ (if you don't have access to the internet, but still want to use the service, you can also use http://127.0.0.1:8180/). It includes various functionality allowing you to:
|
||||
|
||||
Parity comes with a built-in wallet. To access [Parity Wallet](http://web3.site/) simply go to http://web3.site/ (if you don't have access to the internet, but still want to use the service, you can also use http://127.0.0.1:8180/). It
|
||||
includes various functionality allowing you to:
|
||||
- create and manage your Ethereum accounts;
|
||||
- manage your Ether and any Ethereum tokens;
|
||||
- create and register your own tokens;
|
||||
- and much more.
|
||||
|
||||
By default, Parity will also run a JSONRPC server on `127.0.0.1:8545`. This is fully configurable and supports a number
|
||||
of RPC APIs.
|
||||
By default, Parity will also run a JSONRPC server on `127.0.0.1:8545`. This is fully configurable and supports a number of RPC APIs.
|
||||
|
||||
If you run into an issue while using parity, feel free to file one in this repository
|
||||
or hop on our [gitter chat room][gitter-url] to ask a question. We are glad to help!
|
||||
If you run into an issue while using parity, feel free to file one in this repository or hop on our [gitter chat room](https://gitter.im/paritytech/parity) to ask a question. We are glad to help!
|
||||
|
||||
Parity's current release is 1.6. You can download it at https://github.com/paritytech/parity/releases or follow the instructions
|
||||
below to build from source.
|
||||
**For security-critical issues**, please refer to the security policy outlined in `SECURITY.MD`.
|
||||
|
||||
Parity's current release is 1.7. You can download it at https://github.com/paritytech/parity/releases or follow the instructions below to build from source.
|
||||
|
||||
----
|
||||
|
||||
## Build dependencies
|
||||
|
||||
**Parity requires Rust version 1.18.0 to build**
|
||||
**Parity requires Rust version 1.19.0 to build**
|
||||
|
||||
We recommend installing Rust through [rustup](https://www.rustup.rs/). If you don't already have rustup, you can install it like this:
|
||||
|
||||
|
||||
54
SECURITY.md
Normal file
54
SECURITY.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# Security Policy
|
||||
|
||||
For security inquiries or vulnerability reports, please send a message to security@parity.io.
|
||||
|
||||
Please use a descriptive subject line so we can identify the report as such.
|
||||
|
||||
If you send a report, we will respond to the e-mail within 48 hours, and provide regular updates from that time onwards.
|
||||
|
||||
If you would like to encrypt your report, please use the PGP key provided below.
|
||||
It is also reproduced [on MIT's key server](https://pgp.mit.edu/pks/lookup?op=get&search=0x5D0F03018D07DE73)
|
||||
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQENBFlyIAwBCACe0keNPjgYzZ1Oy/8t3zj/Qw9bHHqrzx7FWy8NbXnYBM19NqOZ
|
||||
DIP7Oe0DvCaf/uruBskCS0iVstHlEFQ2AYe0Ei0REt9lQdy61GylU/DEB3879IG+
|
||||
6FO0SnFeYeerv1/hFI2K6uv8v7PyyVDiiJSW0I1KIs2OBwJicTKmWxLAeQsRgx9G
|
||||
yRGalrVk4KP+6pWTA7k3DxmDZKZyfYV/Ej10NtuzmsemwDbv98HKeomp/kgFOfSy
|
||||
3AZjeCpctlsNqpjUuXa0/HudmH2WLxZ0fz8XeoRh8XM9UudNIecjrDqmAFrt/btQ
|
||||
/3guvlzhFCdhYPVGsUusKMECk/JG+Xx1/1ZjABEBAAG0LFBhcml0eSBTZWN1cml0
|
||||
eSBDb250YWN0IDxzZWN1cml0eUBwYXJpdHkuaW8+iQFUBBMBCAA+FiEE2uUVYCjP
|
||||
N6B8aTiDXQ8DAY0H3nMFAllyIAwCGwMFCQPCZwAFCwkIBwIGFQgJCgsCBBYCAwEC
|
||||
HgECF4AACgkQXQ8DAY0H3nM60wgAkS3A36Zc+upiaxU7tumcGv+an17j7gin0sif
|
||||
+0ELSjVfrXInM6ovai+NhUdcLkJ7tCrKS90fvlaELK5Sg9CXBWCTFccKN4A/B7ey
|
||||
rOg2NPXUecnyBB/XqQgKYH7ujYlOlqBDXMfz6z8Hj6WToxg9PPMGGomyMGh8AWxM
|
||||
3yRPFs5RKt0VKgN++5N00oly5Y8ri5pgCidDvCLYMGTVDHFKwkuc9w6BlWlu1R1e
|
||||
/hXFWUFAP1ffTAul3QwyKhjPn2iotCdxXjvt48KaU8DN4iL7aMBN/ZBKqGS7yRdF
|
||||
D/JbJyaaJ0ZRvFSTSXy/sWY3z1B5mtCPBxco8hqqNfRkCwuZ6LkBDQRZciAMAQgA
|
||||
8BP8xrwe12TOUTqL/Vrbxv/FLdhKh53J6TrPKvC2TEEKOrTNo5ahRq+XOS5E7G2N
|
||||
x3b+fq8gR9BzFcldAx0XWUtGs/Wv++ulaSNqTBxj13J3G3WGsUfMKxRgj//piCUD
|
||||
bCFLQfGZdKk0M1o9QkPVARwwmvCNiNB/l++xGqPtfc44H5jWj3GoGvL2MkShPzrN
|
||||
yN/bJ+m+R5gtFGdInqa5KXBuxxuW25eDKJ+LzjbgUgeC76wNcfOiQHTdMkcupjdO
|
||||
bbGFwo10hcbRAOcZEv6//Zrlmk/6nPxEd2hN20St2bSN0+FqfZ267mWEu3ejsgF8
|
||||
ArdCpv5h4fBvJyNwiTZwIQARAQABiQE8BBgBCAAmFiEE2uUVYCjPN6B8aTiDXQ8D
|
||||
AY0H3nMFAllyIAwCGwwFCQPCZwAACgkQXQ8DAY0H3nNisggAl4fqhRlA34wIb190
|
||||
sqXHVxiCuzPaqS6krE9xAa1+gncX485OtcJNqnjugHm2rFE48lv7oasviuPXuInE
|
||||
/OgVFnXYv9d/Xx2JUeDs+bFTLouCDRY2Unh7KJZasfqnMcCHWcxHx5FvRNZRssaB
|
||||
WTZVo6sizPurGUtbpYe4/OLFhadBqAE0EUmVRFEUMc1YTnu4eLaRBzoWN4d2UWwi
|
||||
LN25RSrVSke7LTSFbgn9ntQrQ2smXSR+cdNkkfRCjFcpUaecvFl9HwIqoyVbT4Ym
|
||||
0hbpbbX/cJdc91tKa+psa29uMeGL/cgL9fAu19yNFRyOTMxjZnvql1X/WE1pLmoP
|
||||
ETBD1Q==
|
||||
=K9Qw
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
```
|
||||
|
||||
Important Legal Information:
|
||||
|
||||
Your submission might be eligible for a bug bounty. The bug bounty program is an experimental and discretionary rewards program for the Parity community to reward those who are helping to improve the Parity software. Rewards are at the sole discretion of Parity Technologies Ltd..
|
||||
|
||||
We are not able to issue rewards to individuals who are on sanctions lists or who are in countries on sanctions lists (e.g. North Korea, Iran, etc).
|
||||
|
||||
You are responsible for all taxes. All rewards are subject to applicable law.
|
||||
|
||||
Finally, your testing must not violate any law or compromise any data that is not yours.
|
||||
9
chainspec/Cargo.toml
Normal file
9
chainspec/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "chainspec"
|
||||
version = "0.1.0"
|
||||
authors = ["debris <marek.kotewicz@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
ethjson = { path = "../json" }
|
||||
serde_json = "1.0"
|
||||
serde_ignored = "0.0.4"
|
||||
48
chainspec/src/main.rs
Normal file
48
chainspec/src/main.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
extern crate serde_json;
|
||||
extern crate serde_ignored;
|
||||
extern crate ethjson;
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::{fs, env, process};
|
||||
use ethjson::spec::Spec;
|
||||
|
||||
fn quit(s: &str) -> ! {
|
||||
println!("{}", s);
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut args = env::args();
|
||||
if args.len() != 2 {
|
||||
quit("You need to specify chainspec.json\n\
|
||||
\n\
|
||||
./chainspec <chainspec.json>");
|
||||
}
|
||||
|
||||
let path = args.nth(1).expect("args.len() == 2; qed");
|
||||
let file = match fs::File::open(&path) {
|
||||
Ok(file) => file,
|
||||
Err(_) => quit(&format!("{} could not be opened", path)),
|
||||
};
|
||||
|
||||
let mut unused = BTreeSet::new();
|
||||
let mut deserializer = serde_json::Deserializer::from_reader(file);
|
||||
|
||||
let spec: Result<Spec, _> = serde_ignored::deserialize(&mut deserializer, |field| {
|
||||
unused.insert(field.to_string());
|
||||
});
|
||||
|
||||
if let Err(err) = spec {
|
||||
quit(&format!("{} {}", path, err.to_string()));
|
||||
}
|
||||
|
||||
if !unused.is_empty() {
|
||||
let err = unused.into_iter()
|
||||
.map(|field| format!("{} unexpected field `{}`", path, field))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
quit(&err);
|
||||
}
|
||||
|
||||
println!("{} is valid", path);
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
description = "Parity Dapps crate"
|
||||
name = "parity-dapps"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
@@ -9,37 +9,42 @@ authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
base32 = "0.3"
|
||||
env_logger = "0.4"
|
||||
futures = "0.1"
|
||||
futures-cpupool = "0.1"
|
||||
linked-hash-map = "0.3"
|
||||
linked-hash-map = "0.5"
|
||||
log = "0.3"
|
||||
parity-dapps-glue = "1.7"
|
||||
mime = "0.2"
|
||||
mime_guess = "1.6.1"
|
||||
ntp = "0.2.0"
|
||||
parity-dapps-glue = "1.8"
|
||||
parking_lot = "0.4"
|
||||
mime_guess = "2.0.0-alpha.2"
|
||||
rand = "0.3"
|
||||
rustc-hex = "1.0"
|
||||
serde = "1.0"
|
||||
serde_derive = "1.0"
|
||||
serde_json = "1.0"
|
||||
time = "0.1.35"
|
||||
unicase = "1.3"
|
||||
url = "1.0"
|
||||
unicase = "1.4"
|
||||
zip = { version = "0.1", default-features = false }
|
||||
itertools = "0.5"
|
||||
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
|
||||
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.8" }
|
||||
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.8" }
|
||||
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
ethcore-util = { path = "../util" }
|
||||
ethcore-bigint = { path = "../util/bigint" }
|
||||
ethcore-bytes = { path = "../util/bytes" }
|
||||
fetch = { path = "../util/fetch" }
|
||||
node-health = { path = "./node-health" }
|
||||
parity-hash-fetch = { path = "../hash-fetch" }
|
||||
parity-reactor = { path = "../util/reactor" }
|
||||
parity-ui = { path = "./ui" }
|
||||
hash = { path = "../util/hash" }
|
||||
parity-version = { path = "../util/version" }
|
||||
|
||||
clippy = { version = "0.0.103", optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.4"
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
|
||||
[features]
|
||||
dev = ["clippy", "ethcore-util/dev"]
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
description = "Base Package for all Parity built-in dapps"
|
||||
name = "parity-dapps-glue"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
@@ -12,7 +12,7 @@ syntex = { version = "0.58", optional = true }
|
||||
|
||||
[dependencies]
|
||||
glob = { version = "0.2.11" }
|
||||
mime_guess = { version = "1.6.1" }
|
||||
mime_guess = { version = "2.0.0-alpha.2" }
|
||||
aster = { version = "0.41", default-features = false }
|
||||
quasi = { version = "0.32", default-features = false }
|
||||
quasi_macros = { version = "0.32", optional = true }
|
||||
|
||||
@@ -101,6 +101,7 @@ fn implement_webapp(cx: &ExtCtxt, builder: &aster::AstBuilder, item: &Item, push
|
||||
|
||||
let files_impl = quote_item!(cx,
|
||||
impl $type_name {
|
||||
#[allow(unused_mut)]
|
||||
fn files() -> ::std::collections::HashMap<&'static str, File> {
|
||||
let mut files = ::std::collections::HashMap::new();
|
||||
$statements
|
||||
|
||||
18
dapps/node-health/Cargo.toml
Normal file
18
dapps/node-health/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "node-health"
|
||||
description = "Node's health status"
|
||||
version = "0.1.0"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
futures = "0.1"
|
||||
futures-cpupool = "0.1"
|
||||
log = "0.3"
|
||||
ntp = "0.2.0"
|
||||
parking_lot = "0.4"
|
||||
serde = "1.0"
|
||||
serde_derive = "1.0"
|
||||
time = "0.1.35"
|
||||
|
||||
parity-reactor = { path = "../../util/reactor" }
|
||||
122
dapps/node-health/src/health.rs
Normal file
122
dapps/node-health/src/health.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Reporting node's health.
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
use futures::Future;
|
||||
use futures::sync::oneshot;
|
||||
use types::{HealthInfo, HealthStatus, Health};
|
||||
use time::{TimeChecker, MAX_DRIFT};
|
||||
use parity_reactor::Remote;
|
||||
use parking_lot::Mutex;
|
||||
use {SyncStatus};
|
||||
|
||||
const TIMEOUT_SECS: u64 = 5;
|
||||
const PROOF: &str = "Only one closure is invoked.";
|
||||
|
||||
/// A struct enabling you to query for node's health.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NodeHealth {
|
||||
sync_status: Arc<SyncStatus>,
|
||||
time: TimeChecker,
|
||||
remote: Remote,
|
||||
}
|
||||
|
||||
impl NodeHealth {
|
||||
/// Creates new `NodeHealth`.
|
||||
pub fn new(sync_status: Arc<SyncStatus>, time: TimeChecker, remote: Remote) -> Self {
|
||||
NodeHealth { sync_status, time, remote, }
|
||||
}
|
||||
|
||||
/// Query latest health report.
|
||||
pub fn health(&self) -> Box<Future<Item = Health, Error = ()> + Send> {
|
||||
trace!(target: "dapps", "Checking node health.");
|
||||
// Check timediff
|
||||
let sync_status = self.sync_status.clone();
|
||||
let time = self.time.time_drift();
|
||||
let (tx, rx) = oneshot::channel();
|
||||
let tx = Arc::new(Mutex::new(Some(tx)));
|
||||
let tx2 = tx.clone();
|
||||
self.remote.spawn_with_timeout(
|
||||
move || time.then(move |result| {
|
||||
let _ = tx.lock().take().expect(PROOF).send(Ok(result));
|
||||
Ok(())
|
||||
}),
|
||||
time::Duration::from_secs(TIMEOUT_SECS),
|
||||
move || {
|
||||
let _ = tx2.lock().take().expect(PROOF).send(Err(()));
|
||||
},
|
||||
);
|
||||
|
||||
Box::new(rx.map_err(|err| {
|
||||
warn!(target: "dapps", "Health request cancelled: {:?}", err);
|
||||
}).and_then(move |time| {
|
||||
// Check peers
|
||||
let peers = {
|
||||
let (connected, max) = sync_status.peers();
|
||||
let (status, message) = match connected {
|
||||
0 => {
|
||||
(HealthStatus::Bad, "You are not connected to any peers. There is most likely some network issue. Fix connectivity.".into())
|
||||
},
|
||||
1 => (HealthStatus::NeedsAttention, "You are connected to only one peer. Your node might not be reliable. Check your network connection.".into()),
|
||||
_ => (HealthStatus::Ok, "".into()),
|
||||
};
|
||||
HealthInfo { status, message, details: (connected, max) }
|
||||
};
|
||||
|
||||
// Check sync
|
||||
let sync = {
|
||||
let is_syncing = sync_status.is_major_importing();
|
||||
let (status, message) = if is_syncing {
|
||||
(HealthStatus::NeedsAttention, "Your node is still syncing, the values you see might be outdated. Wait until it's fully synced.".into())
|
||||
} else {
|
||||
(HealthStatus::Ok, "".into())
|
||||
};
|
||||
HealthInfo { status, message, details: is_syncing }
|
||||
};
|
||||
|
||||
// Check time
|
||||
let time = {
|
||||
let (status, message, details) = match time {
|
||||
Ok(Ok(diff)) if diff < MAX_DRIFT && diff > -MAX_DRIFT => {
|
||||
(HealthStatus::Ok, "".into(), diff)
|
||||
},
|
||||
Ok(Ok(diff)) => {
|
||||
(HealthStatus::Bad, format!(
|
||||
"Your clock is not in sync. Detected difference is too big for the protocol to work: {}ms. Synchronize your clock.",
|
||||
diff,
|
||||
), diff)
|
||||
},
|
||||
Ok(Err(err)) => {
|
||||
(HealthStatus::NeedsAttention, format!(
|
||||
"Unable to reach time API: {}. Make sure that your clock is synchronized.",
|
||||
err,
|
||||
), 0)
|
||||
},
|
||||
Err(_) => {
|
||||
(HealthStatus::NeedsAttention, "Time API request timed out. Make sure that the clock is synchronized.".into(), 0)
|
||||
},
|
||||
};
|
||||
|
||||
HealthInfo { status, message, details, }
|
||||
};
|
||||
|
||||
Ok(Health { peers, sync, time})
|
||||
}))
|
||||
}
|
||||
}
|
||||
49
dapps/node-health/src/lib.rs
Normal file
49
dapps/node-health/src/lib.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Node Health status reporting.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate ntp;
|
||||
extern crate time as time_crate;
|
||||
extern crate parity_reactor;
|
||||
extern crate parking_lot;
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
mod health;
|
||||
mod time;
|
||||
mod types;
|
||||
|
||||
pub use futures_cpupool::CpuPool;
|
||||
pub use health::NodeHealth;
|
||||
pub use types::{Health, HealthInfo, HealthStatus};
|
||||
pub use time::{TimeChecker, Error};
|
||||
|
||||
/// Indicates sync status
|
||||
pub trait SyncStatus: ::std::fmt::Debug + Send + Sync {
|
||||
/// Returns true if there is a major sync happening.
|
||||
fn is_major_importing(&self) -> bool;
|
||||
|
||||
/// Returns number of connected and ideal peers.
|
||||
fn peers(&self) -> (usize, usize);
|
||||
}
|
||||
@@ -33,17 +33,22 @@
|
||||
|
||||
use std::io;
|
||||
use std::{fmt, mem, time};
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use futures::{self, Future, BoxFuture};
|
||||
use futures_cpupool::CpuPool;
|
||||
use std::sync::atomic::{self, AtomicUsize};
|
||||
use std::sync::Arc;
|
||||
|
||||
use futures::{self, Future};
|
||||
use futures::future::{self, IntoFuture};
|
||||
use futures_cpupool::{CpuPool, CpuFuture};
|
||||
use ntp;
|
||||
use time::{Duration, Timespec};
|
||||
use util::{Arc, RwLock};
|
||||
use parking_lot::RwLock;
|
||||
use time_crate::{Duration, Timespec};
|
||||
|
||||
/// Time checker error.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Error {
|
||||
/// No servers are currently available for a query.
|
||||
NoServersAvailable,
|
||||
/// There was an error when trying to reach the NTP server.
|
||||
Ntp(String),
|
||||
/// IO error when reading NTP response.
|
||||
@@ -55,6 +60,7 @@ impl fmt::Display for Error {
|
||||
use self::Error::*;
|
||||
|
||||
match *self {
|
||||
NoServersAvailable => write!(fmt, "No NTP servers available"),
|
||||
Ntp(ref err) => write!(fmt, "NTP error: {}", err),
|
||||
Io(ref err) => write!(fmt, "Connection Error: {}", err),
|
||||
}
|
||||
@@ -71,52 +77,125 @@ impl From<ntp::errors::Error> for Error {
|
||||
|
||||
/// NTP time drift checker.
|
||||
pub trait Ntp {
|
||||
/// Returned Future.
|
||||
type Future: IntoFuture<Item=Duration, Error=Error>;
|
||||
|
||||
/// Returns the current time drift.
|
||||
fn drift(&self) -> BoxFuture<Duration, Error>;
|
||||
fn drift(&self) -> Self::Future;
|
||||
}
|
||||
|
||||
const SERVER_MAX_POLL_INTERVAL_SECS: u64 = 60;
|
||||
#[derive(Debug)]
|
||||
struct Server {
|
||||
pub address: String,
|
||||
next_call: RwLock<time::Instant>,
|
||||
failures: AtomicUsize,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn is_available(&self) -> bool {
|
||||
*self.next_call.read() < time::Instant::now()
|
||||
}
|
||||
|
||||
pub fn report_success(&self) {
|
||||
self.failures.store(0, atomic::Ordering::SeqCst);
|
||||
self.update_next_call(1)
|
||||
}
|
||||
|
||||
pub fn report_failure(&self) {
|
||||
let errors = self.failures.fetch_add(1, atomic::Ordering::SeqCst);
|
||||
self.update_next_call(1 << errors)
|
||||
}
|
||||
|
||||
fn update_next_call(&self, delay: usize) {
|
||||
*self.next_call.write() = time::Instant::now() + time::Duration::from_secs(delay as u64 * SERVER_MAX_POLL_INTERVAL_SECS);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AsRef<str>> From<T> for Server {
|
||||
fn from(t: T) -> Self {
|
||||
Server {
|
||||
address: t.as_ref().to_owned(),
|
||||
next_call: RwLock::new(time::Instant::now()),
|
||||
failures: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// NTP client using the SNTP algorithm for calculating drift.
|
||||
#[derive(Clone)]
|
||||
pub struct SimpleNtp {
|
||||
address: Arc<String>,
|
||||
addresses: Vec<Arc<Server>>,
|
||||
pool: CpuPool,
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimpleNtp {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "Ntp {{ address: {} }}", self.address)
|
||||
f
|
||||
.debug_struct("SimpleNtp")
|
||||
.field("addresses", &self.addresses)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl SimpleNtp {
|
||||
fn new(address: &str, pool: CpuPool) -> SimpleNtp {
|
||||
fn new<T: AsRef<str>>(addresses: &[T], pool: CpuPool) -> SimpleNtp {
|
||||
SimpleNtp {
|
||||
address: Arc::new(address.to_owned()),
|
||||
addresses: addresses.iter().map(Server::from).map(Arc::new).collect(),
|
||||
pool: pool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Ntp for SimpleNtp {
|
||||
fn drift(&self) -> BoxFuture<Duration, Error> {
|
||||
let address = self.address.clone();
|
||||
self.pool.spawn_fn(move || {
|
||||
let packet = ntp::request(&*address)?;
|
||||
let dest_time = ::time::now_utc().to_timespec();
|
||||
let orig_time = Timespec::from(packet.orig_time);
|
||||
let recv_time = Timespec::from(packet.recv_time);
|
||||
let transmit_time = Timespec::from(packet.transmit_time);
|
||||
type Future = future::Either<
|
||||
CpuFuture<Duration, Error>,
|
||||
future::FutureResult<Duration, Error>,
|
||||
>;
|
||||
|
||||
let drift = ((recv_time - orig_time) + (transmit_time - dest_time)) / 2;
|
||||
fn drift(&self) -> Self::Future {
|
||||
use self::future::Either::{A, B};
|
||||
|
||||
Ok(drift)
|
||||
}).boxed()
|
||||
let server = self.addresses.iter().find(|server| server.is_available());
|
||||
server.map(|server| {
|
||||
let server = server.clone();
|
||||
A(self.pool.spawn_fn(move || {
|
||||
debug!(target: "dapps", "Fetching time from {}.", server.address);
|
||||
|
||||
match ntp::request(&server.address) {
|
||||
Ok(packet) => {
|
||||
let dest_time = ::time_crate::now_utc().to_timespec();
|
||||
let orig_time = Timespec::from(packet.orig_time);
|
||||
let recv_time = Timespec::from(packet.recv_time);
|
||||
let transmit_time = Timespec::from(packet.transmit_time);
|
||||
|
||||
let drift = ((recv_time - orig_time) + (transmit_time - dest_time)) / 2;
|
||||
|
||||
server.report_success();
|
||||
Ok(drift)
|
||||
},
|
||||
Err(err) => {
|
||||
server.report_failure();
|
||||
Err(err.into())
|
||||
},
|
||||
}
|
||||
}))
|
||||
}).unwrap_or_else(|| B(future::err(Error::NoServersAvailable)))
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE In a positive scenario first results will be seen after:
|
||||
// MAX_RESULTS * UPDATE_TIMEOUT_INCOMPLETE_SECS seconds.
|
||||
const MAX_RESULTS: usize = 4;
|
||||
const UPDATE_TIMEOUT_OK_SECS: u64 = 30;
|
||||
const UPDATE_TIMEOUT_ERR_SECS: u64 = 2;
|
||||
const UPDATE_TIMEOUT_OK_SECS: u64 = 6 * 60 * 60;
|
||||
const UPDATE_TIMEOUT_WARN_SECS: u64 = 15 * 60;
|
||||
const UPDATE_TIMEOUT_ERR_SECS: u64 = 60;
|
||||
const UPDATE_TIMEOUT_INCOMPLETE_SECS: u64 = 10;
|
||||
|
||||
/// Maximal valid time drift.
|
||||
pub const MAX_DRIFT: i64 = 10_000;
|
||||
|
||||
type BoxFuture<A, B> = Box<Future<Item = A, Error = B> + Send>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// A time checker.
|
||||
@@ -127,13 +206,13 @@ pub struct TimeChecker<N: Ntp = SimpleNtp> {
|
||||
|
||||
impl TimeChecker<SimpleNtp> {
|
||||
/// Creates new time checker given the NTP server address.
|
||||
pub fn new(ntp_address: String, pool: CpuPool) -> Self {
|
||||
pub fn new<T: AsRef<str>>(ntp_addresses: &[T], pool: CpuPool) -> Self {
|
||||
let last_result = Arc::new(RwLock::new(
|
||||
// Assume everything is ok at the very beginning.
|
||||
(time::Instant::now(), vec![Ok(0)].into())
|
||||
));
|
||||
|
||||
let ntp = SimpleNtp::new(&ntp_address, pool);
|
||||
let ntp = SimpleNtp::new(ntp_addresses, pool);
|
||||
|
||||
TimeChecker {
|
||||
ntp,
|
||||
@@ -142,22 +221,34 @@ impl TimeChecker<SimpleNtp> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: Ntp> TimeChecker<N> {
|
||||
impl<N: Ntp> TimeChecker<N> where <N::Future as IntoFuture>::Future: Send + 'static {
|
||||
/// Updates the time
|
||||
pub fn update(&self) -> BoxFuture<i64, Error> {
|
||||
trace!(target: "dapps", "Updating time from NTP.");
|
||||
let last_result = self.last_result.clone();
|
||||
self.ntp.drift().then(move |res| {
|
||||
Box::new(self.ntp.drift().into_future().then(move |res| {
|
||||
let res = res.map(|d| d.num_milliseconds());
|
||||
|
||||
if let Err(Error::NoServersAvailable) = res {
|
||||
debug!(target: "dapps", "No NTP servers available. Selecting an older result.");
|
||||
return select_result(last_result.read().1.iter());
|
||||
}
|
||||
|
||||
// Update the results.
|
||||
let mut results = mem::replace(&mut last_result.write().1, VecDeque::new());
|
||||
let has_all_results = results.len() >= MAX_RESULTS;
|
||||
let valid_till = time::Instant::now() + time::Duration::from_secs(
|
||||
if res.is_ok() && results.len() == MAX_RESULTS {
|
||||
UPDATE_TIMEOUT_OK_SECS
|
||||
} else {
|
||||
UPDATE_TIMEOUT_ERR_SECS
|
||||
match res {
|
||||
Ok(time) if has_all_results && time < MAX_DRIFT => UPDATE_TIMEOUT_OK_SECS,
|
||||
Ok(_) if has_all_results => UPDATE_TIMEOUT_WARN_SECS,
|
||||
Err(_) if has_all_results => UPDATE_TIMEOUT_ERR_SECS,
|
||||
_ => UPDATE_TIMEOUT_INCOMPLETE_SECS,
|
||||
}
|
||||
);
|
||||
|
||||
trace!(target: "dapps", "New time drift received: {:?}", res);
|
||||
// Push the result.
|
||||
results.push_back(res.map(|d| d.num_milliseconds()));
|
||||
results.push_back(res);
|
||||
while results.len() > MAX_RESULTS {
|
||||
results.pop_front();
|
||||
}
|
||||
@@ -166,7 +257,7 @@ impl<N: Ntp> TimeChecker<N> {
|
||||
let res = select_result(results.iter());
|
||||
*last_result.write() = (valid_till, results);
|
||||
res
|
||||
}).boxed()
|
||||
}))
|
||||
}
|
||||
|
||||
/// Returns a current time drift or error if last request to NTP server failed.
|
||||
@@ -175,7 +266,7 @@ impl<N: Ntp> TimeChecker<N> {
|
||||
{
|
||||
let res = self.last_result.read();
|
||||
if res.0 > time::Instant::now() {
|
||||
return futures::done(select_result(res.1.iter())).boxed();
|
||||
return Box::new(futures::done(select_result(res.1.iter())));
|
||||
}
|
||||
}
|
||||
// or update and return result
|
||||
@@ -202,9 +293,9 @@ mod tests {
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::time::Instant;
|
||||
use time::Duration;
|
||||
use futures::{self, BoxFuture, Future};
|
||||
use futures::{future, Future};
|
||||
use super::{Ntp, TimeChecker, Error};
|
||||
use util::RwLock;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FakeNtp(RefCell<Vec<Duration>>, Cell<u64>);
|
||||
@@ -217,15 +308,17 @@ mod tests {
|
||||
}
|
||||
|
||||
impl Ntp for FakeNtp {
|
||||
fn drift(&self) -> BoxFuture<Duration, Error> {
|
||||
type Future = future::FutureResult<Duration, Error>;
|
||||
|
||||
fn drift(&self) -> Self::Future {
|
||||
self.1.set(self.1.get() + 1);
|
||||
futures::future::ok(self.0.borrow_mut().pop().expect("Unexpected call to drift().")).boxed()
|
||||
future::ok(self.0.borrow_mut().pop().expect("Unexpected call to drift()."))
|
||||
}
|
||||
}
|
||||
|
||||
fn time_checker() -> TimeChecker<FakeNtp> {
|
||||
let last_result = Arc::new(RwLock::new(
|
||||
(Instant::now(), vec![Err(Error::Ntp("NTP server unavailable.".into()))].into())
|
||||
(Instant::now(), vec![Err(Error::Ntp("NTP server unavailable".into()))].into())
|
||||
));
|
||||
|
||||
TimeChecker {
|
||||
57
dapps/node-health/src/types.rs
Normal file
57
dapps/node-health/src/types.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Base health types.
|
||||
|
||||
/// Health API endpoint status.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
pub enum HealthStatus {
|
||||
/// Everything's OK.
|
||||
#[serde(rename = "ok")]
|
||||
Ok,
|
||||
/// Node health need attention
|
||||
/// (the issue is not critical, but may need investigation)
|
||||
#[serde(rename = "needsAttention")]
|
||||
NeedsAttention,
|
||||
/// There is something bad detected with the node.
|
||||
#[serde(rename = "bad")]
|
||||
Bad,
|
||||
}
|
||||
|
||||
/// Represents a single check in node health.
|
||||
/// Cointains the status of that check and apropriate message and details.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct HealthInfo<T> {
|
||||
/// Check status.
|
||||
pub status: HealthStatus,
|
||||
/// Human-readable message.
|
||||
pub message: String,
|
||||
/// Technical details of the check.
|
||||
pub details: T,
|
||||
}
|
||||
|
||||
/// Node Health status.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Health {
|
||||
/// Status of peers.
|
||||
pub peers: HealthInfo<(usize, usize)>,
|
||||
/// Sync status.
|
||||
pub sync: HealthInfo<bool>,
|
||||
/// Time diff info.
|
||||
pub time: HealthInfo<i64>,
|
||||
}
|
||||
@@ -16,200 +16,82 @@
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::{server, net, Decoder, Encoder, Next, Control};
|
||||
use hyper::method::Method;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{Method, StatusCode};
|
||||
|
||||
use api::{response, types};
|
||||
use api::time::TimeChecker;
|
||||
use api::response;
|
||||
use apps::fetcher::Fetcher;
|
||||
use handlers::{self, extract_url};
|
||||
use endpoint::{Endpoint, Handler, EndpointPath};
|
||||
use parity_reactor::Remote;
|
||||
use {SyncStatus};
|
||||
use endpoint::{Endpoint, Request, Response, EndpointPath};
|
||||
use futures::{future, Future};
|
||||
use node_health::{NodeHealth, HealthStatus};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RestApi {
|
||||
fetcher: Arc<Fetcher>,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
time: TimeChecker,
|
||||
remote: Remote,
|
||||
health: NodeHealth,
|
||||
}
|
||||
|
||||
impl Endpoint for RestApi {
|
||||
fn respond(&self, mut path: EndpointPath, req: Request) -> Response {
|
||||
if let Method::Options = *req.method() {
|
||||
return Box::new(future::ok(response::empty()));
|
||||
}
|
||||
|
||||
let endpoint = path.app_params.get(0).map(String::to_owned);
|
||||
let hash = path.app_params.get(1).map(String::to_owned);
|
||||
|
||||
// at this point path.app_id contains 'api', adjust it to the hash properly, otherwise
|
||||
// we will try and retrieve 'api' as the hash when doing the /api/content route
|
||||
if let Some(ref hash) = hash {
|
||||
path.app_id = hash.to_owned();
|
||||
}
|
||||
|
||||
trace!(target: "dapps", "Handling /api request: {:?}/{:?}", endpoint, hash);
|
||||
match endpoint.as_ref().map(String::as_str) {
|
||||
Some("ping") => Box::new(future::ok(response::ping(req))),
|
||||
Some("health") => self.health(),
|
||||
Some("content") => self.resolve_content(hash.as_ref().map(String::as_str), path, req),
|
||||
_ => Box::new(future::ok(response::not_found())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RestApi {
|
||||
pub fn new(
|
||||
fetcher: Arc<Fetcher>,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
time: TimeChecker,
|
||||
remote: Remote,
|
||||
health: NodeHealth,
|
||||
) -> Box<Endpoint> {
|
||||
Box::new(RestApi {
|
||||
fetcher,
|
||||
sync_status,
|
||||
time,
|
||||
remote,
|
||||
health,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Endpoint for RestApi {
|
||||
fn to_async_handler(&self, path: EndpointPath, control: Control) -> Box<Handler> {
|
||||
Box::new(RestApiRouter::new((*self).clone(), path, control))
|
||||
}
|
||||
}
|
||||
|
||||
struct RestApiRouter {
|
||||
api: RestApi,
|
||||
path: Option<EndpointPath>,
|
||||
control: Option<Control>,
|
||||
handler: Box<Handler>,
|
||||
}
|
||||
|
||||
impl RestApiRouter {
|
||||
fn new(api: RestApi, path: EndpointPath, control: Control) -> Self {
|
||||
RestApiRouter {
|
||||
path: Some(path),
|
||||
control: Some(control),
|
||||
api: api,
|
||||
handler: Box::new(response::as_json_error(StatusCode::NotFound, &types::ApiError {
|
||||
code: "404".into(),
|
||||
title: "Not Found".into(),
|
||||
detail: "Resource you requested has not been found.".into(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_content(&self, hash: Option<&str>, path: EndpointPath, control: Control) -> Option<Box<Handler>> {
|
||||
fn resolve_content(&self, hash: Option<&str>, path: EndpointPath, req: Request) -> Response {
|
||||
trace!(target: "dapps", "Resolving content: {:?} from path: {:?}", hash, path);
|
||||
match hash {
|
||||
Some(hash) if self.api.fetcher.contains(hash) => {
|
||||
Some(self.api.fetcher.to_async_handler(path, control))
|
||||
Some(hash) if self.fetcher.contains(hash) => {
|
||||
self.fetcher.respond(path, req)
|
||||
},
|
||||
_ => None
|
||||
_ => Box::new(future::ok(response::not_found())),
|
||||
}
|
||||
}
|
||||
|
||||
fn health(&self, control: Control) -> Box<Handler> {
|
||||
use self::types::{HealthInfo, HealthStatus, Health};
|
||||
|
||||
trace!(target: "dapps", "Checking node health.");
|
||||
// Check timediff
|
||||
let sync_status = self.api.sync_status.clone();
|
||||
let map = move |time| {
|
||||
// Check peers
|
||||
let peers = {
|
||||
let (connected, max) = sync_status.peers();
|
||||
let (status, message) = match connected {
|
||||
0 => {
|
||||
(HealthStatus::Bad, "You are not connected to any peers. There is most likely some network issue. Fix connectivity.".into())
|
||||
},
|
||||
1 => (HealthStatus::NeedsAttention, "You are connected to only one peer. Your node might not be reliable. Check your network connection.".into()),
|
||||
_ => (HealthStatus::Ok, "".into()),
|
||||
};
|
||||
HealthInfo { status, message, details: (connected, max) }
|
||||
};
|
||||
|
||||
// Check sync
|
||||
let sync = {
|
||||
let is_syncing = sync_status.is_major_importing();
|
||||
let (status, message) = if is_syncing {
|
||||
(HealthStatus::NeedsAttention, "Your node is still syncing, the values you see might be outdated. Wait until it's fully synced.".into())
|
||||
} else {
|
||||
(HealthStatus::Ok, "".into())
|
||||
};
|
||||
HealthInfo { status, message, details: is_syncing }
|
||||
};
|
||||
|
||||
// Check time
|
||||
let time = {
|
||||
const MAX_DRIFT: i64 = 500;
|
||||
let (status, message, details) = match time {
|
||||
Ok(Ok(diff)) if diff < MAX_DRIFT && diff > -MAX_DRIFT => {
|
||||
(HealthStatus::Ok, "".into(), diff)
|
||||
},
|
||||
Ok(Ok(diff)) => {
|
||||
(HealthStatus::Bad, format!(
|
||||
"Your clock is not in sync. Detected difference is too big for the protocol to work: {}ms. Synchronize your clock.",
|
||||
diff,
|
||||
), diff)
|
||||
},
|
||||
Ok(Err(err)) => {
|
||||
(HealthStatus::NeedsAttention, format!(
|
||||
"Unable to reach time API: {}. Make sure that your clock is synchronized.",
|
||||
err,
|
||||
), 0)
|
||||
},
|
||||
Err(_) => {
|
||||
(HealthStatus::NeedsAttention, "Time API request timed out. Make sure that the clock is synchronized.".into(), 0)
|
||||
fn health(&self) -> Response {
|
||||
Box::new(self.health.health()
|
||||
.then(|health| {
|
||||
let status = match health {
|
||||
Ok(ref health) => {
|
||||
if [&health.peers.status, &health.sync.status].iter().any(|x| *x != &HealthStatus::Ok) {
|
||||
StatusCode::PreconditionFailed // HTTP 412
|
||||
} else {
|
||||
StatusCode::Ok // HTTP 200
|
||||
}
|
||||
},
|
||||
_ => StatusCode::ServiceUnavailable, // HTTP 503
|
||||
};
|
||||
|
||||
HealthInfo { status, message, details, }
|
||||
};
|
||||
|
||||
let status = if [&peers.status, &sync.status, &time.status].iter().any(|x| *x != &HealthStatus::Ok) {
|
||||
StatusCode::PreconditionFailed // HTTP 412
|
||||
} else {
|
||||
StatusCode::Ok // HTTP 200
|
||||
};
|
||||
|
||||
response::as_json(status, &Health { peers, sync, time })
|
||||
};
|
||||
|
||||
let time = self.api.time.time_drift();
|
||||
let remote = self.api.remote.clone();
|
||||
Box::new(handlers::AsyncHandler::new(time, map, remote, control))
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Handler<net::HttpStream> for RestApiRouter {
|
||||
fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next {
|
||||
if let Method::Options = *request.method() {
|
||||
self.handler = response::empty();
|
||||
return Next::write();
|
||||
}
|
||||
|
||||
// TODO [ToDr] Consider using `path.app_params` instead
|
||||
let url = extract_url(&request);
|
||||
if url.is_none() {
|
||||
// Just return 404 if we can't parse URL
|
||||
return Next::write();
|
||||
}
|
||||
|
||||
let url = url.expect("Check for None early-exists above; qed");
|
||||
let mut path = self.path.take().expect("on_request called only once, and path is always defined in new; qed");
|
||||
let control = self.control.take().expect("on_request called only once, and control is always defined in new; qed");
|
||||
|
||||
let endpoint = url.path.get(1).map(|v| v.as_str());
|
||||
let hash = url.path.get(2).map(|v| v.as_str());
|
||||
// at this point path.app_id contains 'api', adjust it to the hash properly, otherwise
|
||||
// we will try and retrieve 'api' as the hash when doing the /api/content route
|
||||
if let Some(ref hash) = hash { path.app_id = hash.clone().to_owned() }
|
||||
|
||||
let handler = endpoint.and_then(|v| match v {
|
||||
"ping" => Some(response::ping()),
|
||||
"health" => Some(self.health(control)),
|
||||
"content" => self.resolve_content(hash, path, control),
|
||||
_ => None
|
||||
});
|
||||
|
||||
// Overwrite default
|
||||
if let Some(h) = handler {
|
||||
self.handler = h;
|
||||
}
|
||||
|
||||
self.handler.on_request(request)
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, decoder: &mut Decoder<net::HttpStream>) -> Next {
|
||||
self.handler.on_request_readable(decoder)
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
self.handler.on_response(res)
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<net::HttpStream>) -> Next {
|
||||
self.handler.on_response_writable(encoder)
|
||||
Ok(response::as_json(status, &health).into())
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
|
||||
mod api;
|
||||
mod response;
|
||||
mod time;
|
||||
mod types;
|
||||
|
||||
pub use self::api::RestApi;
|
||||
pub use self::time::TimeChecker;
|
||||
|
||||
@@ -16,27 +16,28 @@
|
||||
|
||||
use serde::Serialize;
|
||||
use serde_json;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{self, mime, StatusCode};
|
||||
|
||||
use endpoint::Handler;
|
||||
use handlers::{ContentHandler, EchoHandler};
|
||||
|
||||
pub fn empty() -> Box<Handler> {
|
||||
Box::new(ContentHandler::ok("".into(), mime!(Text/Plain)))
|
||||
pub fn empty() -> hyper::Response {
|
||||
ContentHandler::ok("".into(), mime::TEXT_PLAIN).into()
|
||||
}
|
||||
|
||||
pub fn as_json<T: Serialize>(status: StatusCode, val: &T) -> ContentHandler {
|
||||
pub fn as_json<T: Serialize>(status: StatusCode, val: &T) -> hyper::Response {
|
||||
let json = serde_json::to_string(val)
|
||||
.expect("serialization to string is infallible; qed");
|
||||
ContentHandler::new(status, json, mime!(Application/Json))
|
||||
ContentHandler::new(status, json, mime::APPLICATION_JSON).into()
|
||||
}
|
||||
|
||||
pub fn as_json_error<T: Serialize>(status: StatusCode, val: &T) -> ContentHandler {
|
||||
let json = serde_json::to_string(val)
|
||||
.expect("serialization to string is infallible; qed");
|
||||
ContentHandler::new(status, json, mime!(Application/Json))
|
||||
pub fn ping(req: hyper::Request) -> hyper::Response {
|
||||
EchoHandler::new(req).into()
|
||||
}
|
||||
|
||||
pub fn ping() -> Box<Handler> {
|
||||
Box::new(EchoHandler::default())
|
||||
pub fn not_found() -> hyper::Response {
|
||||
as_json(StatusCode::NotFound, &::api::types::ApiError {
|
||||
code: "404".into(),
|
||||
title: "Not Found".into(),
|
||||
detail: "Resource you requested has not been found.".into(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -25,43 +25,3 @@ pub struct ApiError {
|
||||
/// More technical error details.
|
||||
pub detail: String,
|
||||
}
|
||||
|
||||
/// Health API endpoint status.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
pub enum HealthStatus {
|
||||
/// Everything's OK.
|
||||
#[serde(rename = "ok")]
|
||||
Ok,
|
||||
/// Node health need attention
|
||||
/// (the issue is not critical, but may need investigation)
|
||||
#[serde(rename = "needsAttention")]
|
||||
NeedsAttention,
|
||||
/// There is something bad detected with the node.
|
||||
#[serde(rename = "bad")]
|
||||
Bad
|
||||
}
|
||||
|
||||
/// Represents a single check in node health.
|
||||
/// Cointains the status of that check and apropriate message and details.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct HealthInfo<T> {
|
||||
/// Check status.
|
||||
pub status: HealthStatus,
|
||||
/// Human-readable message.
|
||||
pub message: String,
|
||||
/// Technical details of the check.
|
||||
pub details: T,
|
||||
}
|
||||
|
||||
/// Node Health status.
|
||||
#[derive(Debug, PartialEq, Serialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Health {
|
||||
/// Status of peers.
|
||||
pub peers: HealthInfo<(usize, usize)>,
|
||||
/// Sync status.
|
||||
pub sync: HealthInfo<bool>,
|
||||
/// Time diff info.
|
||||
pub time: HealthInfo<i64>,
|
||||
}
|
||||
|
||||
@@ -19,12 +19,12 @@
|
||||
use std::fs;
|
||||
|
||||
use linked_hash_map::LinkedHashMap;
|
||||
use page::LocalPageEndpoint;
|
||||
use page::local;
|
||||
use handlers::FetchControl;
|
||||
|
||||
pub enum ContentStatus {
|
||||
Fetching(FetchControl),
|
||||
Ready(LocalPageEndpoint),
|
||||
Ready(local::Dapp),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
||||
@@ -18,16 +18,17 @@ use zip;
|
||||
use std::{fs, fmt};
|
||||
use std::io::{self, Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use bigint::hash::H256;
|
||||
use fetch::{self, Mime};
|
||||
use util::H256;
|
||||
use futures_cpupool::CpuPool;
|
||||
use hash::keccak_buffer;
|
||||
|
||||
use util::sha3::sha3;
|
||||
use page::{LocalPageEndpoint, PageCache};
|
||||
use handlers::{ContentValidator, ValidatorResponse};
|
||||
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
|
||||
use handlers::{ContentValidator, ValidatorResponse};
|
||||
use page::{local, PageCache};
|
||||
use Embeddable;
|
||||
|
||||
type OnDone = Box<Fn(Option<LocalPageEndpoint>) + Send>;
|
||||
type OnDone = Box<Fn(Option<local::Dapp>) + Send>;
|
||||
|
||||
fn write_response_and_check_hash(
|
||||
id: &str,
|
||||
@@ -57,9 +58,9 @@ fn write_response_and_check_hash(
|
||||
file.flush()?;
|
||||
|
||||
// Validate hash
|
||||
// TODO [ToDr] calculate sha3 in-flight while reading the response
|
||||
// TODO [ToDr] calculate keccak in-flight while reading the response
|
||||
let mut file = io::BufReader::new(fs::File::open(&content_path)?);
|
||||
let hash = sha3(&mut file)?;
|
||||
let hash = keccak_buffer(&mut file)?;
|
||||
if id == hash {
|
||||
Ok((file.into_inner(), content_path))
|
||||
} else {
|
||||
@@ -75,15 +76,17 @@ pub struct Content {
|
||||
mime: Mime,
|
||||
content_path: PathBuf,
|
||||
on_done: OnDone,
|
||||
pool: CpuPool,
|
||||
}
|
||||
|
||||
impl Content {
|
||||
pub fn new(id: String, mime: Mime, content_path: PathBuf, on_done: OnDone) -> Self {
|
||||
pub fn new(id: String, mime: Mime, content_path: PathBuf, on_done: OnDone, pool: CpuPool) -> Self {
|
||||
Content {
|
||||
id: id,
|
||||
mime: mime,
|
||||
content_path: content_path,
|
||||
on_done: on_done,
|
||||
id,
|
||||
mime,
|
||||
content_path,
|
||||
on_done,
|
||||
pool,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -91,12 +94,15 @@ impl Content {
|
||||
impl ContentValidator for Content {
|
||||
type Error = ValidationError;
|
||||
|
||||
fn validate_and_install(&self, response: fetch::Response) -> Result<ValidatorResponse, ValidationError> {
|
||||
let validate = |content_path: PathBuf| {
|
||||
fn validate_and_install(self, response: fetch::Response) -> Result<ValidatorResponse, ValidationError> {
|
||||
let pool = self.pool;
|
||||
let id = self.id.clone();
|
||||
let mime = self.mime;
|
||||
let validate = move |content_path: PathBuf| {
|
||||
// Create dir
|
||||
let (_, content_path) = write_response_and_check_hash(self.id.as_str(), content_path.clone(), self.id.as_str(), response)?;
|
||||
let (_, content_path) = write_response_and_check_hash(&id, content_path, &id, response)?;
|
||||
|
||||
Ok(LocalPageEndpoint::single_file(content_path, self.mime.clone(), PageCache::Enabled))
|
||||
Ok(local::Dapp::single_file(pool, content_path, mime, PageCache::Enabled))
|
||||
};
|
||||
|
||||
// Prepare path for a file
|
||||
@@ -118,15 +124,17 @@ pub struct Dapp {
|
||||
dapps_path: PathBuf,
|
||||
on_done: OnDone,
|
||||
embeddable_on: Embeddable,
|
||||
pool: CpuPool,
|
||||
}
|
||||
|
||||
impl Dapp {
|
||||
pub fn new(id: String, dapps_path: PathBuf, on_done: OnDone, embeddable_on: Embeddable) -> Self {
|
||||
pub fn new(id: String, dapps_path: PathBuf, on_done: OnDone, embeddable_on: Embeddable, pool: CpuPool) -> Self {
|
||||
Dapp {
|
||||
id,
|
||||
dapps_path,
|
||||
on_done,
|
||||
embeddable_on,
|
||||
pool,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,16 +166,19 @@ impl Dapp {
|
||||
impl ContentValidator for Dapp {
|
||||
type Error = ValidationError;
|
||||
|
||||
fn validate_and_install(&self, response: fetch::Response) -> Result<ValidatorResponse, ValidationError> {
|
||||
let validate = |dapp_path: PathBuf| {
|
||||
let (file, zip_path) = write_response_and_check_hash(self.id.as_str(), dapp_path.clone(), &format!("{}.zip", self.id), response)?;
|
||||
fn validate_and_install(self, response: fetch::Response) -> Result<ValidatorResponse, ValidationError> {
|
||||
let id = self.id.clone();
|
||||
let pool = self.pool;
|
||||
let embeddable_on = self.embeddable_on;
|
||||
let validate = move |dapp_path: PathBuf| {
|
||||
let (file, zip_path) = write_response_and_check_hash(&id, dapp_path.clone(), &format!("{}.zip", id), response)?;
|
||||
trace!(target: "dapps", "Opening dapp bundle at {:?}", zip_path);
|
||||
// Unpack archive
|
||||
let mut zip = zip::ZipArchive::new(file)?;
|
||||
// First find manifest file
|
||||
let (mut manifest, manifest_dir) = Self::find_manifest(&mut zip)?;
|
||||
// Overwrite id to match hash
|
||||
manifest.id = self.id.clone();
|
||||
manifest.id = id;
|
||||
|
||||
// Unpack zip
|
||||
for i in 0..zip.len() {
|
||||
@@ -198,7 +209,7 @@ impl ContentValidator for Dapp {
|
||||
let mut manifest_file = fs::File::create(manifest_path)?;
|
||||
manifest_file.write_all(manifest_str.as_bytes())?;
|
||||
// Create endpoint
|
||||
let endpoint = LocalPageEndpoint::new(dapp_path, manifest.clone().into(), PageCache::Enabled, self.embeddable_on.clone());
|
||||
let endpoint = local::Dapp::new(pool, dapp_path, manifest.into(), PageCache::Enabled, embeddable_on);
|
||||
Ok(endpoint)
|
||||
};
|
||||
|
||||
|
||||
@@ -24,27 +24,25 @@ use std::{fs, env};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use rustc_hex::FromHex;
|
||||
use futures::{future, Future};
|
||||
use futures_cpupool::CpuPool;
|
||||
use fetch::{Client as FetchClient, Fetch};
|
||||
use hash_fetch::urlhint::{URLHintContract, URLHint, URLHintResult};
|
||||
use parity_reactor::Remote;
|
||||
|
||||
use hyper;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::StatusCode;
|
||||
|
||||
use {Embeddable, SyncStatus, random_filename};
|
||||
use util::Mutex;
|
||||
use page::LocalPageEndpoint;
|
||||
use parking_lot::Mutex;
|
||||
use page::local;
|
||||
use handlers::{ContentHandler, ContentFetcherHandler};
|
||||
use endpoint::{Endpoint, EndpointPath, Handler};
|
||||
use endpoint::{self, Endpoint, EndpointPath};
|
||||
use apps::cache::{ContentCache, ContentStatus};
|
||||
|
||||
/// Limit of cached dapps/content
|
||||
const MAX_CACHED_DAPPS: usize = 20;
|
||||
|
||||
pub trait Fetcher: Send + Sync + 'static {
|
||||
pub trait Fetcher: Endpoint + 'static {
|
||||
fn contains(&self, content_id: &str) -> bool;
|
||||
|
||||
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler>;
|
||||
}
|
||||
|
||||
pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + 'static = URLHintContract> {
|
||||
@@ -53,8 +51,8 @@ pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + 'static = URLHint
|
||||
cache: Arc<Mutex<ContentCache>>,
|
||||
sync: Arc<SyncStatus>,
|
||||
embeddable_on: Embeddable,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
pool: CpuPool,
|
||||
only_content: bool,
|
||||
}
|
||||
|
||||
@@ -66,24 +64,23 @@ impl<R: URLHint + 'static, F: Fetch> Drop for ContentFetcher<F, R> {
|
||||
}
|
||||
|
||||
impl<R: URLHint + 'static, F: Fetch> ContentFetcher<F, R> {
|
||||
|
||||
pub fn new(
|
||||
resolver: R,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
remote: Remote,
|
||||
sync: Arc<SyncStatus>,
|
||||
fetch: F,
|
||||
pool: CpuPool,
|
||||
) -> Self {
|
||||
let mut cache_path = env::temp_dir();
|
||||
cache_path.push(random_filename());
|
||||
|
||||
ContentFetcher {
|
||||
cache_path: cache_path,
|
||||
resolver: resolver,
|
||||
sync: sync_status,
|
||||
cache_path,
|
||||
resolver,
|
||||
sync,
|
||||
cache: Arc::new(Mutex::new(ContentCache::default())),
|
||||
embeddable_on: None,
|
||||
remote: remote,
|
||||
fetch: fetch,
|
||||
fetch,
|
||||
pool,
|
||||
only_content: true,
|
||||
}
|
||||
}
|
||||
@@ -98,24 +95,34 @@ impl<R: URLHint + 'static, F: Fetch> ContentFetcher<F, R> {
|
||||
self
|
||||
}
|
||||
|
||||
fn still_syncing(embeddable: Embeddable) -> Box<Handler> {
|
||||
Box::new(ContentHandler::error(
|
||||
fn not_found(embeddable: Embeddable) -> endpoint::Response {
|
||||
Box::new(future::ok(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"Resource Not Found",
|
||||
"Requested resource was not found.",
|
||||
None,
|
||||
embeddable,
|
||||
).into()))
|
||||
}
|
||||
|
||||
fn still_syncing(embeddable: Embeddable) -> endpoint::Response {
|
||||
Box::new(future::ok(ContentHandler::error(
|
||||
StatusCode::ServiceUnavailable,
|
||||
"Sync In Progress",
|
||||
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
|
||||
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
|
||||
embeddable,
|
||||
))
|
||||
).into()))
|
||||
}
|
||||
|
||||
fn dapps_disabled(address: Embeddable) -> Box<Handler> {
|
||||
Box::new(ContentHandler::error(
|
||||
fn dapps_disabled(address: Embeddable) -> endpoint::Response {
|
||||
Box::new(future::ok(ContentHandler::error(
|
||||
StatusCode::ServiceUnavailable,
|
||||
"Network Dapps Not Available",
|
||||
"This interface doesn't support network dapps for security reasons.",
|
||||
None,
|
||||
address,
|
||||
))
|
||||
).into()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -126,8 +133,6 @@ impl<R: URLHint + 'static, F: Fetch> ContentFetcher<F, R> {
|
||||
// resolve contract call synchronously.
|
||||
// TODO: port to futures-based hyper and make it all async.
|
||||
fn resolve(&self, content_id: Vec<u8>) -> Option<URLHintResult> {
|
||||
use futures::Future;
|
||||
|
||||
self.resolver.resolve(content_id)
|
||||
.wait()
|
||||
.unwrap_or_else(|e| { warn!("Error resolving content-id: {}", e); None })
|
||||
@@ -151,8 +156,10 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
|
||||
impl<R: URLHint + 'static, F: Fetch> Endpoint for ContentFetcher<F, R> {
|
||||
fn respond(&self, path: EndpointPath, req: endpoint::Request) -> endpoint::Response {
|
||||
let mut cache = self.cache.lock();
|
||||
let content_id = path.app_id.clone();
|
||||
|
||||
@@ -161,12 +168,12 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
match status {
|
||||
// Just serve the content
|
||||
Some(&mut ContentStatus::Ready(ref endpoint)) => {
|
||||
(None, endpoint.to_async_handler(path, control))
|
||||
(None, endpoint.to_response(&path))
|
||||
},
|
||||
// Content is already being fetched
|
||||
Some(&mut ContentStatus::Fetching(ref fetch_control)) if !fetch_control.is_deadline_reached() => {
|
||||
trace!(target: "dapps", "Content fetching in progress. Waiting...");
|
||||
(None, fetch_control.to_async_handler(path, control))
|
||||
(None, fetch_control.to_response(path))
|
||||
},
|
||||
// We need to start fetching the content
|
||||
_ => {
|
||||
@@ -176,7 +183,7 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
|
||||
let cache = self.cache.clone();
|
||||
let id = content_id.clone();
|
||||
let on_done = move |result: Option<LocalPageEndpoint>| {
|
||||
let on_done = move |result: Option<local::Dapp>| {
|
||||
let mut cache = cache.lock();
|
||||
match result {
|
||||
Some(endpoint) => cache.insert(id.clone(), ContentStatus::Ready(endpoint)),
|
||||
@@ -195,39 +202,39 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
},
|
||||
Some(URLHintResult::Dapp(dapp)) => {
|
||||
let handler = ContentFetcherHandler::new(
|
||||
dapp.url(),
|
||||
req.method(),
|
||||
&dapp.url(),
|
||||
path,
|
||||
control,
|
||||
installers::Dapp::new(
|
||||
content_id.clone(),
|
||||
self.cache_path.clone(),
|
||||
Box::new(on_done),
|
||||
self.embeddable_on.clone(),
|
||||
self.pool.clone(),
|
||||
),
|
||||
self.embeddable_on.clone(),
|
||||
self.remote.clone(),
|
||||
self.fetch.clone(),
|
||||
);
|
||||
|
||||
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
|
||||
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as endpoint::Response)
|
||||
},
|
||||
Some(URLHintResult::Content(content)) => {
|
||||
let handler = ContentFetcherHandler::new(
|
||||
content.url,
|
||||
req.method(),
|
||||
&content.url,
|
||||
path,
|
||||
control,
|
||||
installers::Content::new(
|
||||
content_id.clone(),
|
||||
content.mime,
|
||||
self.cache_path.clone(),
|
||||
Box::new(on_done),
|
||||
self.pool.clone(),
|
||||
),
|
||||
self.embeddable_on.clone(),
|
||||
self.remote.clone(),
|
||||
self.fetch.clone(),
|
||||
);
|
||||
|
||||
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as Box<Handler>)
|
||||
(Some(ContentStatus::Fetching(handler.fetch_control())), Box::new(handler) as endpoint::Response)
|
||||
},
|
||||
None if self.sync.is_major_importing() => {
|
||||
(None, Self::still_syncing(self.embeddable_on.clone()))
|
||||
@@ -235,13 +242,7 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
None => {
|
||||
// This may happen when sync status changes in between
|
||||
// `contains` and `to_handler`
|
||||
(None, Box::new(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"Resource Not Found",
|
||||
"Requested resource was not found.",
|
||||
None,
|
||||
self.embeddable_on.clone(),
|
||||
)) as Box<Handler>)
|
||||
(None, Self::not_found(self.embeddable_on.clone()))
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -261,15 +262,14 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
|
||||
mod tests {
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use util::Bytes;
|
||||
use bytes::Bytes;
|
||||
use fetch::{Fetch, Client};
|
||||
use futures::{future, Future, BoxFuture};
|
||||
use hash_fetch::urlhint::{URLHint, URLHintResult};
|
||||
use parity_reactor::Remote;
|
||||
use futures::future;
|
||||
use hash_fetch::urlhint::{URLHint, URLHintResult, BoxFuture};
|
||||
|
||||
use apps::cache::ContentStatus;
|
||||
use endpoint::EndpointInfo;
|
||||
use page::LocalPageEndpoint;
|
||||
use page::local;
|
||||
use super::{ContentFetcher, Fetcher};
|
||||
use {SyncStatus};
|
||||
|
||||
@@ -277,10 +277,11 @@ mod tests {
|
||||
struct FakeResolver;
|
||||
impl URLHint for FakeResolver {
|
||||
fn resolve(&self, _id: Bytes) -> BoxFuture<Option<URLHintResult>, String> {
|
||||
future::ok(None).boxed()
|
||||
Box::new(future::ok(None))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FakeSync(bool);
|
||||
impl SyncStatus for FakeSync {
|
||||
fn is_major_importing(&self) -> bool { self.0 }
|
||||
@@ -290,10 +291,16 @@ mod tests {
|
||||
#[test]
|
||||
fn should_true_if_contains_the_app() {
|
||||
// given
|
||||
let pool = ::futures_cpupool::CpuPool::new(1);
|
||||
let path = env::temp_dir();
|
||||
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(FakeSync(false)), Remote::new_sync(), Client::new().unwrap())
|
||||
.allow_dapps(true);
|
||||
let handler = LocalPageEndpoint::new(path, EndpointInfo {
|
||||
let fetcher = ContentFetcher::new(
|
||||
FakeResolver,
|
||||
Arc::new(FakeSync(false)),
|
||||
Client::new().unwrap(),
|
||||
pool.clone(),
|
||||
).allow_dapps(true);
|
||||
|
||||
let handler = local::Dapp::new(pool, path, EndpointInfo {
|
||||
name: "fake".into(),
|
||||
description: "".into(),
|
||||
version: "".into(),
|
||||
|
||||
@@ -19,9 +19,11 @@ use std::io;
|
||||
use std::io::Read;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use page::{LocalPageEndpoint, PageCache};
|
||||
use endpoint::{Endpoint, EndpointInfo};
|
||||
use futures_cpupool::CpuPool;
|
||||
|
||||
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest};
|
||||
use endpoint::{Endpoint, EndpointInfo};
|
||||
use page::{local, PageCache};
|
||||
use Embeddable;
|
||||
|
||||
struct LocalDapp {
|
||||
@@ -61,14 +63,14 @@ fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo {
|
||||
/// Returns Dapp Id and Local Dapp Endpoint for given filesystem path.
|
||||
/// Parses the path to extract last component (for name).
|
||||
/// `None` is returned when path is invalid or non-existent.
|
||||
pub fn local_endpoint<P: AsRef<Path>>(path: P, embeddable: Embeddable) -> Option<(String, Box<LocalPageEndpoint>)> {
|
||||
pub fn local_endpoint<P: AsRef<Path>>(path: P, embeddable: Embeddable, pool: CpuPool) -> Option<(String, Box<local::Dapp>)> {
|
||||
let path = path.as_ref().to_owned();
|
||||
path.canonicalize().ok().and_then(|path| {
|
||||
let name = path.file_name().and_then(|name| name.to_str());
|
||||
name.map(|name| {
|
||||
let dapp = local_dapp(name.into(), path.clone());
|
||||
(dapp.id, Box::new(LocalPageEndpoint::new(
|
||||
dapp.path, dapp.info, PageCache::Disabled, embeddable.clone())
|
||||
(dapp.id, Box::new(local::Dapp::new(
|
||||
pool.clone(), dapp.path, dapp.info, PageCache::Disabled, embeddable.clone())
|
||||
))
|
||||
})
|
||||
})
|
||||
@@ -86,13 +88,13 @@ fn local_dapp(name: String, path: PathBuf) -> LocalDapp {
|
||||
}
|
||||
|
||||
/// Returns endpoints for Local Dapps found for given filesystem path.
|
||||
/// Scans the directory and collects `LocalPageEndpoints`.
|
||||
pub fn local_endpoints<P: AsRef<Path>>(dapps_path: P, embeddable: Embeddable) -> BTreeMap<String, Box<Endpoint>> {
|
||||
/// Scans the directory and collects `local::Dapp`.
|
||||
pub fn local_endpoints<P: AsRef<Path>>(dapps_path: P, embeddable: Embeddable, pool: CpuPool) -> BTreeMap<String, Box<Endpoint>> {
|
||||
let mut pages = BTreeMap::<String, Box<Endpoint>>::new();
|
||||
for dapp in local_dapps(dapps_path.as_ref()) {
|
||||
pages.insert(
|
||||
dapp.id,
|
||||
Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, embeddable.clone()))
|
||||
Box::new(local::Dapp::new(pool.clone(), dapp.path, dapp.info, PageCache::Disabled, embeddable.clone()))
|
||||
);
|
||||
}
|
||||
pages
|
||||
|
||||
@@ -14,24 +14,23 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use endpoint::{Endpoints, Endpoint};
|
||||
use page::PageEndpoint;
|
||||
use futures_cpupool::CpuPool;
|
||||
use page;
|
||||
use proxypac::ProxyPac;
|
||||
use web::Web;
|
||||
use fetch::Fetch;
|
||||
use parity_dapps::WebApp;
|
||||
use parity_reactor::Remote;
|
||||
use parity_ui;
|
||||
use {WebProxyTokens, ParentFrameSettings};
|
||||
|
||||
mod app;
|
||||
mod cache;
|
||||
mod fs;
|
||||
mod ui;
|
||||
pub mod fs;
|
||||
pub mod fetcher;
|
||||
pub mod manifest;
|
||||
|
||||
@@ -44,12 +43,12 @@ pub const UTILS_PATH: &'static str = "parity-utils";
|
||||
pub const WEB_PATH: &'static str = "web";
|
||||
pub const URL_REFERER: &'static str = "__referer=";
|
||||
|
||||
pub fn utils() -> Box<Endpoint> {
|
||||
Box::new(PageEndpoint::with_prefix(parity_ui::App::default(), UTILS_PATH.to_owned()))
|
||||
pub fn utils(pool: CpuPool) -> Box<Endpoint> {
|
||||
Box::new(page::builtin::Dapp::new(pool, parity_ui::App::default()))
|
||||
}
|
||||
|
||||
pub fn ui() -> Box<Endpoint> {
|
||||
Box::new(PageEndpoint::with_fallback_to_index(parity_ui::App::default()))
|
||||
pub fn ui(pool: CpuPool) -> Box<Endpoint> {
|
||||
Box::new(page::builtin::Dapp::with_fallback_to_index(pool, parity_ui::App::default()))
|
||||
}
|
||||
|
||||
pub fn ui_redirection(embeddable: Option<ParentFrameSettings>) -> Box<Endpoint> {
|
||||
@@ -62,13 +61,14 @@ pub fn all_endpoints<F: Fetch>(
|
||||
dapps_domain: &str,
|
||||
embeddable: Option<ParentFrameSettings>,
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
) -> Endpoints {
|
||||
pool: CpuPool,
|
||||
) -> (Vec<String>, Endpoints) {
|
||||
// fetch fs dapps at first to avoid overwriting builtins
|
||||
let mut pages = fs::local_endpoints(dapps_path, embeddable.clone());
|
||||
let mut pages = fs::local_endpoints(dapps_path.clone(), embeddable.clone(), pool.clone());
|
||||
let local_endpoints: Vec<String> = pages.keys().cloned().collect();
|
||||
for path in extra_dapps {
|
||||
if let Some((id, endpoint)) = fs::local_endpoint(path.clone(), embeddable.clone()) {
|
||||
if let Some((id, endpoint)) = fs::local_endpoint(path.clone(), embeddable.clone(), pool.clone()) {
|
||||
pages.insert(id, endpoint);
|
||||
} else {
|
||||
warn!(target: "dapps", "Ignoring invalid dapp at {}", path.display());
|
||||
@@ -76,17 +76,17 @@ pub fn all_endpoints<F: Fetch>(
|
||||
}
|
||||
|
||||
// NOTE [ToDr] Dapps will be currently embeded on 8180
|
||||
insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(embeddable.clone()));
|
||||
insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(embeddable.clone()), pool.clone());
|
||||
pages.insert("proxy".into(), ProxyPac::boxed(embeddable.clone(), dapps_domain.to_owned()));
|
||||
pages.insert(WEB_PATH.into(), Web::boxed(embeddable.clone(), web_proxy_tokens.clone(), remote.clone(), fetch.clone()));
|
||||
pages.insert(WEB_PATH.into(), Web::boxed(embeddable.clone(), web_proxy_tokens.clone(), fetch.clone()));
|
||||
|
||||
Arc::new(pages)
|
||||
(local_endpoints, pages)
|
||||
}
|
||||
|
||||
fn insert<T : WebApp + Default + 'static>(pages: &mut BTreeMap<String, Box<Endpoint>>, id: &str, embed_at: Embeddable) {
|
||||
fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str, embed_at: Embeddable, pool: CpuPool) {
|
||||
pages.insert(id.to_owned(), Box::new(match embed_at {
|
||||
Embeddable::Yes(address) => PageEndpoint::new_safe_to_embed(T::default(), address),
|
||||
Embeddable::No => PageEndpoint::new(T::default()),
|
||||
Embeddable::Yes(address) => page::builtin::Dapp::new_safe_to_embed(pool, T::default(), address),
|
||||
Embeddable::No => page::builtin::Dapp::new(pool, T::default()),
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
@@ -16,9 +16,10 @@
|
||||
|
||||
//! UI redirections
|
||||
|
||||
use hyper::{Control, StatusCode};
|
||||
use hyper::StatusCode;
|
||||
use futures::future;
|
||||
|
||||
use endpoint::{Endpoint, Handler, EndpointPath};
|
||||
use endpoint::{Endpoint, Request, Response, EndpointPath};
|
||||
use {handlers, Embeddable};
|
||||
|
||||
/// Redirection to UI server.
|
||||
@@ -37,19 +38,20 @@ impl Redirection {
|
||||
}
|
||||
|
||||
impl Endpoint for Redirection {
|
||||
fn to_async_handler(&self, _path: EndpointPath, _control: Control) -> Box<Handler> {
|
||||
if let Some(ref frame) = self.embeddable_on {
|
||||
fn respond(&self, _path: EndpointPath, req: Request) -> Response {
|
||||
Box::new(future::ok(if let Some(ref frame) = self.embeddable_on {
|
||||
trace!(target: "dapps", "Redirecting to signer interface.");
|
||||
handlers::Redirection::boxed(&format!("http://{}:{}", &frame.host, frame.port))
|
||||
let protocol = req.uri().scheme().unwrap_or("http");
|
||||
handlers::Redirection::new(format!("{}://{}:{}", protocol, &frame.host, frame.port)).into()
|
||||
} else {
|
||||
trace!(target: "dapps", "Signer disabled, returning 404.");
|
||||
Box::new(handlers::ContentHandler::error(
|
||||
handlers::ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Your homepage is not available when Trusted Signer is disabled.",
|
||||
Some("You can still access dapps by writing a correct address, though. Re-enable Signer to get your homepage back."),
|
||||
None,
|
||||
))
|
||||
}
|
||||
).into()
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,20 +16,27 @@
|
||||
|
||||
//! URL Endpoint traits
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use hyper::{self, server, net};
|
||||
use jsonrpc_core::BoxFuture;
|
||||
use hyper;
|
||||
|
||||
#[derive(Debug, PartialEq, Default, Clone)]
|
||||
pub struct EndpointPath {
|
||||
pub app_id: String,
|
||||
pub app_params: Vec<String>,
|
||||
pub query: Option<String>,
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
pub using_dapps_domains: bool,
|
||||
}
|
||||
|
||||
impl EndpointPath {
|
||||
pub fn has_no_params(&self) -> bool {
|
||||
self.app_params.is_empty() || self.app_params.iter().all(|x| x.is_empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct EndpointInfo {
|
||||
pub name: String,
|
||||
@@ -39,17 +46,12 @@ pub struct EndpointInfo {
|
||||
pub icon_url: String,
|
||||
}
|
||||
|
||||
pub type Endpoints = Arc<BTreeMap<String, Box<Endpoint>>>;
|
||||
pub type Handler = server::Handler<net::HttpStream> + Send;
|
||||
pub type Endpoints = BTreeMap<String, Box<Endpoint>>;
|
||||
pub type Response = BoxFuture<hyper::Response, hyper::Error>;
|
||||
pub type Request = hyper::Request;
|
||||
|
||||
pub trait Endpoint : Send + Sync {
|
||||
fn info(&self) -> Option<&EndpointInfo> { None }
|
||||
|
||||
fn to_handler(&self, _path: EndpointPath) -> Box<Handler> {
|
||||
panic!("This Endpoint is asynchronous and requires Control object.");
|
||||
}
|
||||
|
||||
fn to_async_handler(&self, path: EndpointPath, _control: hyper::Control) -> Box<Handler> {
|
||||
self.to_handler(path)
|
||||
}
|
||||
fn respond(&self, path: EndpointPath, req: Request) -> Response;
|
||||
}
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Async Content Handler
|
||||
//! Temporary solution until we switch to future-based server.
|
||||
//! Wraps a future and converts it to hyper::server::Handler;
|
||||
|
||||
use std::{mem, time};
|
||||
use std::sync::mpsc;
|
||||
use futures::Future;
|
||||
use hyper::{server, Decoder, Encoder, Next, Control};
|
||||
use hyper::net::HttpStream;
|
||||
|
||||
use handlers::ContentHandler;
|
||||
use parity_reactor::Remote;
|
||||
|
||||
const TIMEOUT_SECS: u64 = 15;
|
||||
|
||||
enum State<F, T, M> {
|
||||
Initial(F, M, Remote, Control),
|
||||
Waiting(mpsc::Receiver<Result<T, ()>>, M),
|
||||
Done(ContentHandler),
|
||||
Invalid,
|
||||
}
|
||||
|
||||
pub struct AsyncHandler<F, T, M> {
|
||||
state: State<F, T, M>,
|
||||
}
|
||||
|
||||
impl<F, T, M> AsyncHandler<F, T, M> {
|
||||
pub fn new(future: F, map: M, remote: Remote, control: Control) -> Self {
|
||||
AsyncHandler {
|
||||
state: State::Initial(future, map, remote, control),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, T, E, M> server::Handler<HttpStream> for AsyncHandler<F, Result<T, E>, M> where
|
||||
F: Future<Item=T, Error=E> + Send + 'static,
|
||||
M: FnOnce(Result<Result<T, E>, ()>) -> ContentHandler,
|
||||
T: Send + 'static,
|
||||
E: Send + 'static,
|
||||
{
|
||||
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
|
||||
if let State::Initial(future, map, remote, control) = mem::replace(&mut self.state, State::Invalid) {
|
||||
let (tx, rx) = mpsc::sync_channel(1);
|
||||
let control2 = control.clone();
|
||||
let tx2 = tx.clone();
|
||||
remote.spawn_with_timeout(move || future.then(move |result| {
|
||||
// Send a result (ignore errors if the connection was dropped)
|
||||
let _ = tx.send(Ok(result));
|
||||
// Resume handler
|
||||
let _ = control.ready(Next::read());
|
||||
|
||||
Ok(())
|
||||
}), time::Duration::from_secs(TIMEOUT_SECS), move || {
|
||||
// Notify about error
|
||||
let _ = tx2.send(Err(()));
|
||||
// Resume handler
|
||||
let _ = control2.ready(Next::read());
|
||||
});
|
||||
|
||||
self.state = State::Waiting(rx, map);
|
||||
}
|
||||
|
||||
Next::wait()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
if let State::Waiting(rx, map) = mem::replace(&mut self.state, State::Invalid) {
|
||||
match rx.try_recv() {
|
||||
Ok(result) => {
|
||||
self.state = State::Done(map(result));
|
||||
},
|
||||
Err(err) => {
|
||||
warn!("Resuming handler in incorrect state: {:?}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
if let State::Done(ref mut handler) = self.state {
|
||||
handler.on_response(res)
|
||||
} else {
|
||||
Next::end()
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
if let State::Done(ref mut handler) = self.state {
|
||||
handler.on_response_writable(encoder)
|
||||
} else {
|
||||
Next::end()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,32 +16,29 @@
|
||||
|
||||
//! Simple Content Handler
|
||||
|
||||
use hyper::{header, server, Decoder, Encoder, Next};
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::mime::Mime;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{self, mime, header};
|
||||
use hyper::StatusCode;
|
||||
|
||||
use util::version;
|
||||
use parity_version::version;
|
||||
|
||||
use handlers::add_security_headers;
|
||||
use Embeddable;
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ContentHandler {
|
||||
code: StatusCode,
|
||||
content: String,
|
||||
mimetype: Mime,
|
||||
write_pos: usize,
|
||||
mimetype: mime::Mime,
|
||||
safe_to_embed_on: Embeddable,
|
||||
}
|
||||
|
||||
impl ContentHandler {
|
||||
pub fn ok(content: String, mimetype: Mime) -> Self {
|
||||
pub fn ok(content: String, mimetype: mime::Mime) -> Self {
|
||||
Self::new(StatusCode::Ok, content, mimetype)
|
||||
}
|
||||
|
||||
pub fn html(code: StatusCode, content: String, embeddable_on: Embeddable) -> Self {
|
||||
Self::new_embeddable(code, content, mime!(Text/Html), embeddable_on)
|
||||
Self::new_embeddable(code, content, mime::TEXT_HTML, embeddable_on)
|
||||
}
|
||||
|
||||
pub fn error(
|
||||
@@ -60,57 +57,32 @@ impl ContentHandler {
|
||||
), embeddable_on)
|
||||
}
|
||||
|
||||
pub fn new(code: StatusCode, content: String, mimetype: Mime) -> Self {
|
||||
pub fn new(code: StatusCode, content: String, mimetype: mime::Mime) -> Self {
|
||||
Self::new_embeddable(code, content, mimetype, None)
|
||||
}
|
||||
|
||||
pub fn new_embeddable(
|
||||
code: StatusCode,
|
||||
content: String,
|
||||
mimetype: Mime,
|
||||
mimetype: mime::Mime,
|
||||
safe_to_embed_on: Embeddable,
|
||||
) -> Self {
|
||||
ContentHandler {
|
||||
code,
|
||||
content,
|
||||
mimetype,
|
||||
write_pos: 0,
|
||||
safe_to_embed_on,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Handler<HttpStream> for ContentHandler {
|
||||
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
res.set_status(self.code);
|
||||
res.headers_mut().set(header::ContentType(self.mimetype.clone()));
|
||||
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.take());
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
let bytes = self.content.as_bytes();
|
||||
if self.write_pos == bytes.len() {
|
||||
return Next::end();
|
||||
}
|
||||
|
||||
match encoder.write(&bytes[self.write_pos..]) {
|
||||
Ok(bytes) => {
|
||||
self.write_pos += bytes;
|
||||
Next::write()
|
||||
},
|
||||
Err(e) => match e.kind() {
|
||||
::std::io::ErrorKind::WouldBlock => Next::write(),
|
||||
_ => Next::end()
|
||||
},
|
||||
}
|
||||
impl Into<hyper::Response> for ContentHandler {
|
||||
fn into(self) -> hyper::Response {
|
||||
let mut res = hyper::Response::new()
|
||||
.with_status(self.code)
|
||||
.with_header(header::ContentType(self.mimetype))
|
||||
.with_body(self.content);
|
||||
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,45 +16,31 @@
|
||||
|
||||
//! Echo Handler
|
||||
|
||||
use std::io::Read;
|
||||
use hyper::{server, Decoder, Encoder, Next};
|
||||
use hyper::net::HttpStream;
|
||||
use super::ContentHandler;
|
||||
use hyper::{self, header};
|
||||
|
||||
#[derive(Default)]
|
||||
use handlers::add_security_headers;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EchoHandler {
|
||||
content: String,
|
||||
handler: Option<ContentHandler>,
|
||||
request: hyper::Request,
|
||||
}
|
||||
|
||||
impl server::Handler<HttpStream> for EchoHandler {
|
||||
fn on_request(&mut self, _: server::Request<HttpStream>) -> Next {
|
||||
Next::read()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
match decoder.read_to_string(&mut self.content) {
|
||||
Ok(0) => {
|
||||
self.handler = Some(ContentHandler::ok(self.content.clone(), mime!(Application/Json)));
|
||||
Next::write()
|
||||
},
|
||||
Ok(_) => Next::read(),
|
||||
Err(e) => match e.kind() {
|
||||
::std::io::ErrorKind::WouldBlock => Next::read(),
|
||||
_ => Next::end(),
|
||||
}
|
||||
impl EchoHandler {
|
||||
pub fn new(request: hyper::Request) -> Self {
|
||||
EchoHandler {
|
||||
request,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
self.handler.as_mut()
|
||||
.expect("handler always set in on_request, which is before now; qed")
|
||||
.on_response(res)
|
||||
}
|
||||
impl Into<hyper::Response> for EchoHandler {
|
||||
fn into(self) -> hyper::Response {
|
||||
let content_type = self.request.headers().get().cloned();
|
||||
let mut res = hyper::Response::new()
|
||||
.with_header(content_type.unwrap_or(header::ContentType::json()))
|
||||
.with_body(self.request.body());
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
self.handler.as_mut()
|
||||
.expect("handler always set in on_request, which is before now; qed")
|
||||
.on_response_writable(encoder)
|
||||
add_security_headers(res.headers_mut(), None);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,57 +16,39 @@
|
||||
|
||||
//! Hyper Server Handler that fetches a file during a request (proxy).
|
||||
|
||||
use std::fmt;
|
||||
use std::sync::{mpsc, Arc};
|
||||
use std::{fmt, mem};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::{Instant, Duration};
|
||||
use fetch::{self, Fetch};
|
||||
use futures::Future;
|
||||
use parity_reactor::Remote;
|
||||
use util::Mutex;
|
||||
use futures::sync::oneshot;
|
||||
use futures::{self, Future};
|
||||
use hyper::{self, Method, StatusCode};
|
||||
use jsonrpc_core::BoxFuture;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use hyper::{server, Decoder, Encoder, Next, Method, Control};
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::uri::RequestUri;
|
||||
use hyper::status::StatusCode;
|
||||
|
||||
use endpoint::EndpointPath;
|
||||
use endpoint::{self, EndpointPath};
|
||||
use handlers::{ContentHandler, StreamingHandler};
|
||||
use page::{LocalPageEndpoint, PageHandlerWaiting};
|
||||
use page::local;
|
||||
use {Embeddable};
|
||||
|
||||
const FETCH_TIMEOUT: u64 = 300;
|
||||
|
||||
pub enum ValidatorResponse {
|
||||
Local(LocalPageEndpoint),
|
||||
Local(local::Dapp),
|
||||
Streaming(StreamingHandler<fetch::Response>),
|
||||
}
|
||||
|
||||
pub trait ContentValidator: Send + 'static {
|
||||
pub trait ContentValidator: Sized + Send + 'static {
|
||||
type Error: fmt::Debug + fmt::Display;
|
||||
|
||||
fn validate_and_install(&self, fetch::Response) -> Result<ValidatorResponse, Self::Error>;
|
||||
fn validate_and_install(self, fetch::Response) -> Result<ValidatorResponse, Self::Error>;
|
||||
}
|
||||
|
||||
enum FetchState {
|
||||
Waiting,
|
||||
NotStarted(String),
|
||||
Error(ContentHandler),
|
||||
InProgress(mpsc::Receiver<FetchState>),
|
||||
Streaming(StreamingHandler<fetch::Response>),
|
||||
Done(LocalPageEndpoint, Box<PageHandlerWaiting>),
|
||||
}
|
||||
|
||||
enum WaitResult {
|
||||
Error(ContentHandler),
|
||||
Done(LocalPageEndpoint),
|
||||
NonAwaitable,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FetchControl {
|
||||
abort: Arc<AtomicBool>,
|
||||
listeners: Arc<Mutex<Vec<(Control, mpsc::Sender<WaitResult>)>>>,
|
||||
listeners: Arc<Mutex<Vec<oneshot::Sender<WaitResult>>>>,
|
||||
deadline: Instant,
|
||||
}
|
||||
|
||||
@@ -81,14 +63,30 @@ impl Default for FetchControl {
|
||||
}
|
||||
|
||||
impl FetchControl {
|
||||
pub fn is_deadline_reached(&self) -> bool {
|
||||
self.deadline < Instant::now()
|
||||
}
|
||||
|
||||
pub fn abort(&self) {
|
||||
self.abort.store(true, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
pub fn to_response(&self, path: EndpointPath) -> endpoint::Response {
|
||||
let (tx, receiver) = oneshot::channel();
|
||||
self.listeners.lock().push(tx);
|
||||
|
||||
Box::new(WaitingHandler {
|
||||
path,
|
||||
state: WaitState::Waiting(receiver),
|
||||
})
|
||||
}
|
||||
|
||||
fn notify<F: Fn() -> WaitResult>(&self, status: F) {
|
||||
let mut listeners = self.listeners.lock();
|
||||
for (control, sender) in listeners.drain(..) {
|
||||
for sender in listeners.drain(..) {
|
||||
trace!(target: "dapps", "Resuming request waiting for content...");
|
||||
if let Err(e) = sender.send(status()) {
|
||||
trace!(target: "dapps", "Waiting listener notification failed: {:?}", e);
|
||||
} else {
|
||||
let _ = control.ready(Next::read());
|
||||
if let Err(_) = sender.send(status()) {
|
||||
trace!(target: "dapps", "Waiting listener notification failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -98,92 +96,79 @@ impl FetchControl {
|
||||
FetchState::Error(ref handler) => self.notify(|| WaitResult::Error(handler.clone())),
|
||||
FetchState::Done(ref endpoint, _) => self.notify(|| WaitResult::Done(endpoint.clone())),
|
||||
FetchState::Streaming(_) => self.notify(|| WaitResult::NonAwaitable),
|
||||
FetchState::NotStarted(_) | FetchState::InProgress(_) | FetchState::Waiting => {},
|
||||
FetchState::InProgress(_) => {},
|
||||
FetchState::Empty => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_deadline_reached(&self) -> bool {
|
||||
self.deadline < Instant::now()
|
||||
}
|
||||
|
||||
pub fn abort(&self) {
|
||||
self.abort.store(true, Ordering::SeqCst);
|
||||
}
|
||||
enum WaitState {
|
||||
Waiting(oneshot::Receiver<WaitResult>),
|
||||
Done(endpoint::Response),
|
||||
}
|
||||
|
||||
pub fn to_async_handler(&self, path: EndpointPath, control: Control) -> Box<server::Handler<HttpStream> + Send> {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
self.listeners.lock().push((control, tx));
|
||||
|
||||
Box::new(WaitingHandler {
|
||||
receiver: rx,
|
||||
state: FetchState::Waiting,
|
||||
uri: RequestUri::default(),
|
||||
path: path,
|
||||
})
|
||||
}
|
||||
#[derive(Debug)]
|
||||
enum WaitResult {
|
||||
Error(ContentHandler),
|
||||
Done(local::Dapp),
|
||||
NonAwaitable,
|
||||
}
|
||||
|
||||
pub struct WaitingHandler {
|
||||
receiver: mpsc::Receiver<WaitResult>,
|
||||
state: FetchState,
|
||||
uri: RequestUri,
|
||||
path: EndpointPath,
|
||||
state: WaitState,
|
||||
}
|
||||
|
||||
impl server::Handler<HttpStream> for WaitingHandler {
|
||||
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
|
||||
self.uri = request.uri().clone();
|
||||
Next::wait()
|
||||
}
|
||||
impl Future for WaitingHandler {
|
||||
type Item = hyper::Response;
|
||||
type Error = hyper::Error;
|
||||
|
||||
fn on_request_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
let result = self.receiver.try_recv().ok();
|
||||
self.state = match result {
|
||||
Some(WaitResult::Error(handler)) => FetchState::Error(handler),
|
||||
Some(WaitResult::Done(endpoint)) => {
|
||||
let mut page_handler = endpoint.to_page_handler(self.path.clone());
|
||||
page_handler.set_uri(&self.uri);
|
||||
FetchState::Done(endpoint, page_handler)
|
||||
},
|
||||
_ => {
|
||||
warn!("A result for waiting request was not received.");
|
||||
FetchState::Waiting
|
||||
},
|
||||
};
|
||||
fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> {
|
||||
loop {
|
||||
let new_state = match self.state {
|
||||
WaitState::Waiting(ref mut receiver) => {
|
||||
let result = try_ready!(receiver.poll().map_err(|_| hyper::Error::Timeout));
|
||||
|
||||
match self.state {
|
||||
FetchState::Done(_, ref mut handler) => handler.on_request_readable(decoder),
|
||||
FetchState::Streaming(ref mut handler) => handler.on_request_readable(decoder),
|
||||
FetchState::Error(ref mut handler) => handler.on_request_readable(decoder),
|
||||
_ => Next::write(),
|
||||
}
|
||||
}
|
||||
match result {
|
||||
WaitResult::Error(handler) => {
|
||||
return Ok(futures::Async::Ready(handler.into()));
|
||||
},
|
||||
WaitResult::NonAwaitable => {
|
||||
let errors = Errors { embeddable_on: None };
|
||||
return Ok(futures::Async::Ready(errors.streaming().into()));
|
||||
},
|
||||
WaitResult::Done(endpoint) => {
|
||||
WaitState::Done(endpoint.to_response(&self.path).into())
|
||||
},
|
||||
}
|
||||
},
|
||||
WaitState::Done(ref mut response) => {
|
||||
return response.poll()
|
||||
},
|
||||
};
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
match self.state {
|
||||
FetchState::Done(_, ref mut handler) => handler.on_response(res),
|
||||
FetchState::Streaming(ref mut handler) => handler.on_response(res),
|
||||
FetchState::Error(ref mut handler) => handler.on_response(res),
|
||||
_ => Next::end(),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
match self.state {
|
||||
FetchState::Done(_, ref mut handler) => handler.on_response_writable(encoder),
|
||||
FetchState::Streaming(ref mut handler) => handler.on_response_writable(encoder),
|
||||
FetchState::Error(ref mut handler) => handler.on_response_writable(encoder),
|
||||
_ => Next::end(),
|
||||
self.state = new_state;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
struct Errors {
|
||||
embeddable_on: Embeddable,
|
||||
}
|
||||
|
||||
impl Errors {
|
||||
fn streaming(&self) -> ContentHandler {
|
||||
ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
"Streaming Error",
|
||||
"This content is being streamed in other place.",
|
||||
None,
|
||||
self.embeddable_on.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
fn download_error<E: fmt::Debug>(&self, e: E) -> ContentHandler {
|
||||
ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
@@ -225,67 +210,102 @@ impl Errors {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ContentFetcherHandler<H: ContentValidator, F: Fetch> {
|
||||
enum FetchState {
|
||||
Error(ContentHandler),
|
||||
InProgress(BoxFuture<FetchState, ()>),
|
||||
Streaming(hyper::Response),
|
||||
Done(local::Dapp, endpoint::Response),
|
||||
Empty,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FetchState {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::FetchState::*;
|
||||
|
||||
write!(fmt, "FetchState(")?;
|
||||
match *self {
|
||||
Error(ref error) => write!(fmt, "error: {:?}", error),
|
||||
InProgress(_) => write!(fmt, "in progress"),
|
||||
Streaming(ref res) => write!(fmt, "streaming: {:?}", res),
|
||||
Done(ref endpoint, _) => write!(fmt, "done: {:?}", endpoint),
|
||||
Empty => write!(fmt, "?"),
|
||||
}?;
|
||||
write!(fmt, ")")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ContentFetcherHandler {
|
||||
fetch_control: FetchControl,
|
||||
control: Control,
|
||||
remote: Remote,
|
||||
status: FetchState,
|
||||
fetch: F,
|
||||
installer: Option<H>,
|
||||
path: EndpointPath,
|
||||
errors: Errors,
|
||||
}
|
||||
|
||||
impl<H: ContentValidator, F: Fetch> ContentFetcherHandler<H, F> {
|
||||
pub fn new(
|
||||
url: String,
|
||||
path: EndpointPath,
|
||||
control: Control,
|
||||
installer: H,
|
||||
embeddable_on: Embeddable,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
) -> Self {
|
||||
ContentFetcherHandler {
|
||||
fetch_control: FetchControl::default(),
|
||||
control,
|
||||
remote,
|
||||
fetch,
|
||||
status: FetchState::NotStarted(url),
|
||||
installer: Some(installer),
|
||||
path,
|
||||
errors: Errors {
|
||||
embeddable_on,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
impl ContentFetcherHandler {
|
||||
pub fn fetch_control(&self) -> FetchControl {
|
||||
self.fetch_control.clone()
|
||||
}
|
||||
|
||||
fn fetch_content(&self, uri: RequestUri, url: &str, installer: H) -> mpsc::Receiver<FetchState> {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let abort = self.fetch_control.abort.clone();
|
||||
pub fn new<H: ContentValidator, F: Fetch>(
|
||||
method: &hyper::Method,
|
||||
url: &str,
|
||||
path: EndpointPath,
|
||||
installer: H,
|
||||
embeddable_on: Embeddable,
|
||||
fetch: F,
|
||||
) -> Self {
|
||||
let fetch_control = FetchControl::default();
|
||||
let errors = Errors { embeddable_on };
|
||||
|
||||
let path = self.path.clone();
|
||||
let tx2 = tx.clone();
|
||||
let control = self.control.clone();
|
||||
let errors = self.errors.clone();
|
||||
// Validation of method
|
||||
let status = match *method {
|
||||
// Start fetching content
|
||||
Method::Get => {
|
||||
trace!(target: "dapps", "Fetching content from: {:?}", url);
|
||||
FetchState::InProgress(Self::fetch_content(
|
||||
fetch,
|
||||
url,
|
||||
fetch_control.abort.clone(),
|
||||
path,
|
||||
errors.clone(),
|
||||
installer,
|
||||
))
|
||||
},
|
||||
// or return error
|
||||
_ => FetchState::Error(errors.method_not_allowed()),
|
||||
};
|
||||
|
||||
let future = self.fetch.fetch_with_abort(url, abort.into()).then(move |result| {
|
||||
ContentFetcherHandler {
|
||||
fetch_control,
|
||||
status,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
||||
fn fetch_content<H: ContentValidator, F: Fetch>(
|
||||
fetch: F,
|
||||
url: &str,
|
||||
abort: Arc<AtomicBool>,
|
||||
path: EndpointPath,
|
||||
errors: Errors,
|
||||
installer: H,
|
||||
) -> BoxFuture<FetchState, ()> {
|
||||
// Start fetching the content
|
||||
let fetch2 = fetch.clone();
|
||||
let future = fetch.fetch_with_abort(url, abort.into()).then(move |result| {
|
||||
trace!(target: "dapps", "Fetching content finished. Starting validation: {:?}", result);
|
||||
let new_state = match result {
|
||||
Ok(match result {
|
||||
Ok(response) => match installer.validate_and_install(response) {
|
||||
Ok(ValidatorResponse::Local(endpoint)) => {
|
||||
trace!(target: "dapps", "Validation OK. Returning response.");
|
||||
let mut handler = endpoint.to_page_handler(path);
|
||||
handler.set_uri(&uri);
|
||||
FetchState::Done(endpoint, handler)
|
||||
let response = endpoint.to_response(&path);
|
||||
FetchState::Done(endpoint, response)
|
||||
},
|
||||
Ok(ValidatorResponse::Streaming(handler)) => {
|
||||
Ok(ValidatorResponse::Streaming(stream)) => {
|
||||
trace!(target: "dapps", "Validation OK. Streaming response.");
|
||||
FetchState::Streaming(handler)
|
||||
let (reading, response) = stream.into_response();
|
||||
fetch2.process_and_forget(reading);
|
||||
FetchState::Streaming(response)
|
||||
},
|
||||
Err(e) => {
|
||||
trace!(target: "dapps", "Error while validating content: {:?}", e);
|
||||
@@ -296,100 +316,55 @@ impl<H: ContentValidator, F: Fetch> ContentFetcherHandler<H, F> {
|
||||
warn!(target: "dapps", "Unable to fetch content: {:?}", e);
|
||||
FetchState::Error(errors.download_error(e))
|
||||
},
|
||||
};
|
||||
// Content may be resolved when the connection is already dropped.
|
||||
let _ = tx2.send(new_state);
|
||||
// Ignoring control errors
|
||||
let _ = control.ready(Next::read());
|
||||
Ok(()) as Result<(), ()>
|
||||
})
|
||||
});
|
||||
|
||||
// make sure to run within fetch thread pool.
|
||||
let future = self.fetch.process(future);
|
||||
// spawn to event loop
|
||||
let control = self.control.clone();
|
||||
let errors = self.errors.clone();
|
||||
self.remote.spawn_with_timeout(|| future, Duration::from_secs(FETCH_TIMEOUT), move || {
|
||||
// Notify about the timeout
|
||||
let _ = tx.send(FetchState::Error(errors.timeout_error()));
|
||||
// Ignoring control errors
|
||||
let _ = control.ready(Next::read());
|
||||
});
|
||||
|
||||
rx
|
||||
fetch.process(future)
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: ContentValidator, F: Fetch> server::Handler<HttpStream> for ContentFetcherHandler<H, F> {
|
||||
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
|
||||
let status = if let FetchState::NotStarted(ref url) = self.status {
|
||||
let uri = request.uri().clone();
|
||||
let installer = self.installer.take().expect("Installer always set initialy; installer used only in on_request; on_request invoked only once; qed");
|
||||
impl Future for ContentFetcherHandler {
|
||||
type Item = hyper::Response;
|
||||
type Error = hyper::Error;
|
||||
|
||||
Some(match *request.method() {
|
||||
// Start fetching content
|
||||
Method::Get => {
|
||||
trace!(target: "dapps", "Fetching content from: {:?}", url);
|
||||
let receiver = self.fetch_content(uri, url, installer);
|
||||
FetchState::InProgress(receiver)
|
||||
fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> {
|
||||
loop {
|
||||
trace!(target: "dapps", "Polling status: {:?}", self.status);
|
||||
self.status = match mem::replace(&mut self.status, FetchState::Empty) {
|
||||
FetchState::Error(error) => {
|
||||
return Ok(futures::Async::Ready(error.into()));
|
||||
},
|
||||
// or return error
|
||||
_ => FetchState::Error(self.errors.method_not_allowed()),
|
||||
})
|
||||
} else { None };
|
||||
FetchState::Streaming(response) => {
|
||||
return Ok(futures::Async::Ready(response));
|
||||
},
|
||||
any => any,
|
||||
};
|
||||
|
||||
if let Some(status) = status {
|
||||
let status = match self.status {
|
||||
// Request may time out
|
||||
FetchState::InProgress(_) if self.fetch_control.is_deadline_reached() => {
|
||||
trace!(target: "dapps", "Fetching dapp failed because of timeout.");
|
||||
FetchState::Error(self.errors.timeout_error())
|
||||
},
|
||||
FetchState::InProgress(ref mut receiver) => {
|
||||
// Check if there is a response
|
||||
trace!(target: "dapps", "Polling streaming response.");
|
||||
try_ready!(receiver.poll().map_err(|err| {
|
||||
warn!(target: "dapps", "Error while fetching response: {:?}", err);
|
||||
hyper::Error::Timeout
|
||||
}))
|
||||
},
|
||||
FetchState::Done(_, ref mut response) => {
|
||||
return response.poll()
|
||||
},
|
||||
FetchState::Empty => panic!("Future polled twice."),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
trace!(target: "dapps", "New status: {:?}", status);
|
||||
self.fetch_control.set_status(&status);
|
||||
self.status = status;
|
||||
}
|
||||
|
||||
Next::read()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
let (status, next) = match self.status {
|
||||
// Request may time out
|
||||
FetchState::InProgress(_) if self.fetch_control.is_deadline_reached() => {
|
||||
trace!(target: "dapps", "Fetching dapp failed because of timeout.");
|
||||
(Some(FetchState::Error(self.errors.timeout_error())), Next::write())
|
||||
},
|
||||
FetchState::InProgress(ref receiver) => {
|
||||
// Check if there is an answer
|
||||
let rec = receiver.try_recv();
|
||||
match rec {
|
||||
// just return the new state
|
||||
Ok(state) => (Some(state), Next::write()),
|
||||
// wait some more
|
||||
_ => (None, Next::wait())
|
||||
}
|
||||
},
|
||||
FetchState::Error(ref mut handler) => (None, handler.on_request_readable(decoder)),
|
||||
_ => (None, Next::write()),
|
||||
};
|
||||
|
||||
if let Some(status) = status {
|
||||
self.fetch_control.set_status(&status);
|
||||
self.status = status;
|
||||
}
|
||||
|
||||
next
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
match self.status {
|
||||
FetchState::Done(_, ref mut handler) => handler.on_response(res),
|
||||
FetchState::Streaming(ref mut handler) => handler.on_response(res),
|
||||
FetchState::Error(ref mut handler) => handler.on_response(res),
|
||||
_ => Next::end(),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
match self.status {
|
||||
FetchState::Done(_, ref mut handler) => handler.on_response_writable(encoder),
|
||||
FetchState::Streaming(ref mut handler) => handler.on_response_writable(encoder),
|
||||
FetchState::Error(ref mut handler) => handler.on_response_writable(encoder),
|
||||
_ => Next::end(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,68 +16,79 @@
|
||||
|
||||
//! Hyper handlers implementations.
|
||||
|
||||
mod async;
|
||||
mod content;
|
||||
mod echo;
|
||||
mod fetch;
|
||||
mod reader;
|
||||
mod redirect;
|
||||
mod streaming;
|
||||
|
||||
pub use self::async::AsyncHandler;
|
||||
pub use self::content::ContentHandler;
|
||||
pub use self::echo::EchoHandler;
|
||||
pub use self::fetch::{ContentFetcherHandler, ContentValidator, FetchControl, ValidatorResponse};
|
||||
pub use self::reader::Reader;
|
||||
pub use self::redirect::Redirection;
|
||||
pub use self::streaming::StreamingHandler;
|
||||
|
||||
use std::iter;
|
||||
use util::Itertools;
|
||||
|
||||
use url::Url;
|
||||
use hyper::{server, header, net, uri};
|
||||
use itertools::Itertools;
|
||||
use hyper::header;
|
||||
use {apps, address, Embeddable};
|
||||
|
||||
/// Adds security-related headers to the Response.
|
||||
pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embeddable) {
|
||||
headers.set_raw("X-XSS-Protection", vec![b"1; mode=block".to_vec()]);
|
||||
headers.set_raw("X-Content-Type-Options", vec![b"nosniff".to_vec()]);
|
||||
headers.set_raw("X-XSS-Protection", "1; mode=block");
|
||||
headers.set_raw("X-Content-Type-Options", "nosniff");
|
||||
|
||||
// Embedding header:
|
||||
if let None = embeddable_on {
|
||||
headers.set_raw("X-Frame-Options", vec![b"SAMEORIGIN".to_vec()]);
|
||||
headers.set_raw("X-Frame-Options", "SAMEORIGIN");
|
||||
}
|
||||
|
||||
// Content Security Policy headers
|
||||
headers.set_raw("Content-Security-Policy", vec![
|
||||
headers.set_raw("Content-Security-Policy", String::new()
|
||||
// Allow connecting to WS servers and HTTP(S) servers.
|
||||
// We could be more restrictive and allow only RPC server URL.
|
||||
b"connect-src http: https: ws: wss:;".to_vec(),
|
||||
+ "connect-src http: https: ws: wss:;"
|
||||
// Allow framing any content from HTTP(S).
|
||||
// Again we could only allow embedding from RPC server URL.
|
||||
// (deprecated)
|
||||
b"frame-src 'self' http: https:;".to_vec(),
|
||||
+ "frame-src 'self' http: https:;"
|
||||
// Allow framing and web workers from HTTP(S).
|
||||
b"child-src 'self' http: https:;".to_vec(),
|
||||
+ "child-src 'self' http: https:;"
|
||||
// We allow data: blob: and HTTP(s) images.
|
||||
// We could get rid of wildcarding HTTP and only allow RPC server URL.
|
||||
// (http required for local dapps icons)
|
||||
b"img-src 'self' 'unsafe-inline' data: blob: http: https:;".to_vec(),
|
||||
+ "img-src 'self' 'unsafe-inline' data: blob: http: https:;"
|
||||
// Allow style from data: blob: and HTTPS.
|
||||
b"style-src 'self' 'unsafe-inline' data: blob: https:;".to_vec(),
|
||||
+ "style-src 'self' 'unsafe-inline' data: blob: https:;"
|
||||
// Allow fonts from data: and HTTPS.
|
||||
b"font-src 'self' data: https:;".to_vec(),
|
||||
+ "font-src 'self' data: https:;"
|
||||
// Allow inline scripts and scripts eval (webpack/jsconsole)
|
||||
b"script-src 'self' 'unsafe-inline' 'unsafe-eval';".to_vec(),
|
||||
+ {
|
||||
let script_src = embeddable_on.as_ref()
|
||||
.map(|e| e.extra_script_src.iter()
|
||||
.map(|&(ref host, port)| address(host, port))
|
||||
.join(" ")
|
||||
).unwrap_or_default();
|
||||
&format!(
|
||||
"script-src 'self' 'unsafe-inline' 'unsafe-eval' {};",
|
||||
script_src
|
||||
)
|
||||
}
|
||||
// Same restrictions as script-src with additional
|
||||
// blob: that is required for camera access (worker)
|
||||
+ "worker-src 'self' 'unsafe-inline' 'unsafe-eval' https: blob:;"
|
||||
// Restrict everything else to the same origin.
|
||||
b"default-src 'self';".to_vec(),
|
||||
+ "default-src 'self';"
|
||||
// Run in sandbox mode (although it's not fully safe since we allow same-origin and script)
|
||||
b"sandbox allow-same-origin allow-forms allow-modals allow-popups allow-presentation allow-scripts;".to_vec(),
|
||||
+ "sandbox allow-same-origin allow-forms allow-modals allow-popups allow-presentation allow-scripts;"
|
||||
// Disallow subitting forms from any dapps
|
||||
b"form-action 'none';".to_vec(),
|
||||
+ "form-action 'none';"
|
||||
// Never allow mixed content
|
||||
b"block-all-mixed-content;".to_vec(),
|
||||
+ "block-all-mixed-content;"
|
||||
// Specify if the site can be embedded.
|
||||
match embeddable_on {
|
||||
+ &match embeddable_on {
|
||||
Some(ref embed) => {
|
||||
let std = address(&embed.host, embed.port);
|
||||
let proxy = format!("{}.{}", apps::HOME_PAGE, embed.dapps_domain);
|
||||
@@ -87,7 +98,7 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
||||
.into_iter()
|
||||
.chain(embed.extra_embed_on
|
||||
.iter()
|
||||
.map(|&(ref host, port)| format!("{}:{}", host, port))
|
||||
.map(|&(ref host, port)| address(host, port))
|
||||
);
|
||||
|
||||
let ancestors = if embed.host == "127.0.0.1" {
|
||||
@@ -100,44 +111,6 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
||||
format!("frame-ancestors {};", ancestors)
|
||||
},
|
||||
None => format!("frame-ancestors 'self';"),
|
||||
}.into_bytes(),
|
||||
]);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
/// Extracts URL part from the Request.
|
||||
pub fn extract_url(req: &server::Request<net::HttpStream>) -> Option<Url> {
|
||||
convert_uri_to_url(req.uri(), req.headers().get::<header::Host>())
|
||||
}
|
||||
|
||||
/// Extracts URL given URI and Host header.
|
||||
pub fn convert_uri_to_url(uri: &uri::RequestUri, host: Option<&header::Host>) -> Option<Url> {
|
||||
match *uri {
|
||||
uri::RequestUri::AbsoluteUri(ref url) => {
|
||||
match Url::from_generic_url(url.clone()) {
|
||||
Ok(url) => Some(url),
|
||||
_ => None,
|
||||
}
|
||||
},
|
||||
uri::RequestUri::AbsolutePath { ref path, ref query } => {
|
||||
let query = match *query {
|
||||
Some(ref query) => format!("?{}", query),
|
||||
None => "".into(),
|
||||
};
|
||||
// Attempt to prepend the Host header (mandatory in HTTP/1.1)
|
||||
let url_string = match host {
|
||||
Some(ref host) => {
|
||||
format!("http://{}:{}{}{}", host.hostname, host.port.unwrap_or(80), path, query)
|
||||
},
|
||||
None => return None,
|
||||
};
|
||||
|
||||
match Url::parse(&url_string) {
|
||||
Ok(url) => Some(url),
|
||||
_ => None,
|
||||
}
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
73
dapps/src/handlers/reader.rs
Normal file
73
dapps/src/handlers/reader.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! A chunk-producing io::Read wrapper.
|
||||
|
||||
use std::io::{self, Read};
|
||||
|
||||
use futures::{self, sink, Sink, Future};
|
||||
use futures::sync::mpsc;
|
||||
use hyper;
|
||||
|
||||
type Sender = mpsc::Sender<Result<hyper::Chunk, hyper::Error>>;
|
||||
|
||||
const MAX_CHUNK_SIZE: usize = 32 * 1024;
|
||||
|
||||
/// A Reader is essentially a stream of `hyper::Chunks`.
|
||||
/// The chunks are read from given `io::Read` instance.
|
||||
///
|
||||
/// Unfortunately `hyper` doesn't allow you to pass `Stream`
|
||||
/// directly to the response, so you need to create
|
||||
/// a `Body::pair()` and send over chunks using `sink::Send`.
|
||||
/// Also `Chunks` need to take `Vec` by value, so we need
|
||||
/// to allocate it for each chunk being sent.
|
||||
pub struct Reader<R: io::Read> {
|
||||
buffer: [u8; MAX_CHUNK_SIZE],
|
||||
content: io::BufReader<R>,
|
||||
sending: sink::Send<Sender>,
|
||||
}
|
||||
|
||||
impl<R: io::Read> Reader<R> {
|
||||
pub fn pair(content: R, initial: Vec<u8>) -> (Self, hyper::Body) {
|
||||
let (tx, rx) = hyper::Body::pair();
|
||||
let reader = Reader {
|
||||
buffer: [0; MAX_CHUNK_SIZE],
|
||||
content: io::BufReader::new(content),
|
||||
sending: tx.send(Ok(initial.into())),
|
||||
};
|
||||
|
||||
(reader, rx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: io::Read> Future for Reader<R> {
|
||||
type Item = ();
|
||||
type Error = ();
|
||||
|
||||
fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> {
|
||||
loop {
|
||||
let next = try_ready!(self.sending.poll().map_err(|err| {
|
||||
warn!(target: "dapps", "Unable to send next chunk: {:?}", err);
|
||||
}));
|
||||
|
||||
self.sending = match self.content.read(&mut self.buffer) {
|
||||
Ok(0) => return Ok(futures::Async::Ready(())),
|
||||
Ok(read) => next.send(Ok(self.buffer[..read].to_vec().into())),
|
||||
Err(err) => next.send(Err(hyper::Error::Io(err))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,7 @@
|
||||
|
||||
//! HTTP Redirection hyper handler
|
||||
|
||||
use hyper::{header, server, Decoder, Encoder, Next};
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{self, header, StatusCode};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Redirection {
|
||||
@@ -26,36 +24,18 @@ pub struct Redirection {
|
||||
}
|
||||
|
||||
impl Redirection {
|
||||
pub fn new(url: &str) -> Self {
|
||||
pub fn new<T: Into<String>>(url: T) -> Self {
|
||||
Redirection {
|
||||
to_url: url.to_owned()
|
||||
to_url: url.into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn boxed(url: &str) -> Box<Self> {
|
||||
Box::new(Self::new(url))
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Handler<HttpStream> for Redirection {
|
||||
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
impl Into<hyper::Response> for Redirection {
|
||||
fn into(self) -> hyper::Response {
|
||||
// Don't use `MovedPermanently` here to prevent browser from caching the redirections.
|
||||
res.set_status(StatusCode::Found);
|
||||
res.headers_mut().set(header::Location(self.to_url.to_owned()));
|
||||
Next::write()
|
||||
}
|
||||
fn on_response_writable(&mut self, _encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
Next::end()
|
||||
hyper::Response::new()
|
||||
.with_status(StatusCode::Found)
|
||||
.with_header(header::Location::new(self.to_url))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -16,87 +16,43 @@
|
||||
|
||||
//! Content Stream Response
|
||||
|
||||
use std::io::{self, Read};
|
||||
use std::io;
|
||||
use hyper::{self, header, mime, StatusCode};
|
||||
|
||||
use hyper::{header, server, Decoder, Encoder, Next};
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::mime::Mime;
|
||||
use hyper::status::StatusCode;
|
||||
|
||||
use handlers::add_security_headers;
|
||||
use handlers::{add_security_headers, Reader};
|
||||
use Embeddable;
|
||||
|
||||
const BUFFER_SIZE: usize = 1024;
|
||||
|
||||
pub struct StreamingHandler<R: io::Read> {
|
||||
buffer: [u8; BUFFER_SIZE],
|
||||
buffer_leftover: usize,
|
||||
pub struct StreamingHandler<R> {
|
||||
initial: Vec<u8>,
|
||||
content: R,
|
||||
status: StatusCode,
|
||||
content: io::BufReader<R>,
|
||||
mimetype: Mime,
|
||||
mimetype: mime::Mime,
|
||||
safe_to_embed_on: Embeddable,
|
||||
}
|
||||
|
||||
impl<R: io::Read> StreamingHandler<R> {
|
||||
pub fn new(content: R, status: StatusCode, mimetype: Mime, embeddable_on: Embeddable) -> Self {
|
||||
pub fn new(content: R, status: StatusCode, mimetype: mime::Mime, safe_to_embed_on: Embeddable) -> Self {
|
||||
StreamingHandler {
|
||||
buffer: [0; BUFFER_SIZE],
|
||||
buffer_leftover: 0,
|
||||
status: status,
|
||||
content: io::BufReader::new(content),
|
||||
mimetype: mimetype,
|
||||
safe_to_embed_on: embeddable_on,
|
||||
initial: Vec::new(),
|
||||
content,
|
||||
status,
|
||||
mimetype,
|
||||
safe_to_embed_on,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_initial_content(&mut self, content: &str) {
|
||||
assert_eq!(self.buffer_leftover, 0);
|
||||
let bytes = content.as_bytes();
|
||||
self.buffer_leftover = bytes.len();
|
||||
self.buffer[0..self.buffer_leftover].copy_from_slice(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: io::Read> server::Handler<HttpStream> for StreamingHandler<R> {
|
||||
fn on_request(&mut self, _request: server::Request<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
res.set_status(self.status);
|
||||
res.headers_mut().set(header::ContentType(self.mimetype.clone()));
|
||||
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.take());
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
fn handle_error(e: io::Error) -> Next {
|
||||
match e.kind() {
|
||||
::std::io::ErrorKind::WouldBlock => Next::write(),
|
||||
_ => Next::end(),
|
||||
}
|
||||
}
|
||||
|
||||
let write_pos = self.buffer_leftover;
|
||||
match self.content.read(&mut self.buffer[write_pos..]) {
|
||||
Err(e) => handle_error(e),
|
||||
Ok(read) => match encoder.write(&self.buffer[..write_pos + read]) {
|
||||
Err(e) => handle_error(e),
|
||||
Ok(0) => Next::end(),
|
||||
Ok(wrote) => {
|
||||
self.buffer_leftover = write_pos + read - wrote;
|
||||
if self.buffer_leftover > 0 {
|
||||
for i in self.buffer_leftover..write_pos + read {
|
||||
self.buffer.swap(i, i - self.buffer_leftover);
|
||||
}
|
||||
}
|
||||
Next::write()
|
||||
},
|
||||
},
|
||||
}
|
||||
self.initial = content.as_bytes().to_vec();
|
||||
}
|
||||
|
||||
pub fn into_response(self) -> (Reader<R>, hyper::Response) {
|
||||
let (reader, body) = Reader::pair(self.content, self.initial);
|
||||
let mut res = hyper::Response::new()
|
||||
.with_status(self.status)
|
||||
.with_header(header::ContentType(self.mimetype))
|
||||
.with_body(body);
|
||||
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on);
|
||||
|
||||
(reader, res)
|
||||
}
|
||||
}
|
||||
|
||||
146
dapps/src/lib.rs
146
dapps/src/lib.rs
@@ -20,34 +20,36 @@
|
||||
#![cfg_attr(feature="nightly", plugin(clippy))]
|
||||
|
||||
extern crate base32;
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate itertools;
|
||||
extern crate linked_hash_map;
|
||||
extern crate mime_guess;
|
||||
extern crate ntp;
|
||||
extern crate parking_lot;
|
||||
extern crate rand;
|
||||
extern crate rustc_hex;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
extern crate time;
|
||||
extern crate unicase;
|
||||
extern crate url as url_lib;
|
||||
extern crate zip;
|
||||
|
||||
extern crate jsonrpc_core;
|
||||
extern crate jsonrpc_http_server;
|
||||
|
||||
extern crate ethcore_util as util;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
extern crate ethcore_bytes as bytes;
|
||||
extern crate fetch;
|
||||
extern crate node_health;
|
||||
extern crate parity_dapps_glue as parity_dapps;
|
||||
extern crate parity_hash_fetch as hash_fetch;
|
||||
extern crate parity_reactor;
|
||||
extern crate parity_ui;
|
||||
extern crate hash;
|
||||
extern crate parity_version;
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate futures;
|
||||
#[macro_use]
|
||||
extern crate mime;
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
@@ -55,7 +57,8 @@ extern crate serde_derive;
|
||||
extern crate ethcore_devtools as devtools;
|
||||
#[cfg(test)]
|
||||
extern crate env_logger;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate parity_reactor;
|
||||
|
||||
mod endpoint;
|
||||
mod apps;
|
||||
@@ -64,31 +67,24 @@ mod router;
|
||||
mod handlers;
|
||||
mod api;
|
||||
mod proxypac;
|
||||
mod url;
|
||||
mod web;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use futures_cpupool::CpuPool;
|
||||
use jsonrpc_http_server::{self as http, hyper, Origin};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use fetch::Fetch;
|
||||
use futures_cpupool::CpuPool;
|
||||
use parity_reactor::Remote;
|
||||
use node_health::NodeHealth;
|
||||
|
||||
pub use hash_fetch::urlhint::ContractClient;
|
||||
pub use node_health::SyncStatus;
|
||||
|
||||
/// Indicates sync status
|
||||
pub trait SyncStatus: Send + Sync {
|
||||
/// Returns true if there is a major sync happening.
|
||||
fn is_major_importing(&self) -> bool;
|
||||
|
||||
/// Returns number of connected and ideal peers.
|
||||
fn peers(&self) -> (usize, usize);
|
||||
}
|
||||
|
||||
/// Validates Web Proxy tokens
|
||||
pub trait WebProxyTokens: Send + Sync {
|
||||
@@ -101,38 +97,65 @@ impl<F> WebProxyTokens for F where F: Fn(String) -> Option<Origin> + Send + Sync
|
||||
}
|
||||
|
||||
/// Current supported endpoints.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct Endpoints {
|
||||
endpoints: endpoint::Endpoints,
|
||||
local_endpoints: Arc<RwLock<Vec<String>>>,
|
||||
endpoints: Arc<RwLock<endpoint::Endpoints>>,
|
||||
dapps_path: PathBuf,
|
||||
embeddable: Option<ParentFrameSettings>,
|
||||
pool: Option<CpuPool>,
|
||||
}
|
||||
|
||||
impl Endpoints {
|
||||
/// Returns a current list of app endpoints.
|
||||
pub fn list(&self) -> Vec<apps::App> {
|
||||
self.endpoints.iter().filter_map(|(ref k, ref e)| {
|
||||
self.endpoints.read().iter().filter_map(|(ref k, ref e)| {
|
||||
e.info().map(|ref info| apps::App::from_info(k, info))
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Check for any changes in the local dapps folder and update.
|
||||
pub fn refresh_local_dapps(&self) {
|
||||
let pool = match self.pool.as_ref() {
|
||||
None => return,
|
||||
Some(pool) => pool,
|
||||
};
|
||||
let new_local = apps::fs::local_endpoints(&self.dapps_path, self.embeddable.clone(), pool.clone());
|
||||
let old_local = mem::replace(&mut *self.local_endpoints.write(), new_local.keys().cloned().collect());
|
||||
let (_, to_remove): (_, Vec<_>) = old_local
|
||||
.into_iter()
|
||||
.partition(|k| new_local.contains_key(&k.clone()));
|
||||
|
||||
let mut endpoints = self.endpoints.write();
|
||||
// remove the dead dapps
|
||||
for k in to_remove {
|
||||
endpoints.remove(&k);
|
||||
}
|
||||
// new dapps to be added
|
||||
for (k, v) in new_local {
|
||||
if !endpoints.contains_key(&k) {
|
||||
endpoints.insert(k, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Dapps server as `jsonrpc-http-server` request middleware.
|
||||
pub struct Middleware {
|
||||
endpoints: Endpoints,
|
||||
router: router::Router,
|
||||
endpoints: endpoint::Endpoints,
|
||||
}
|
||||
|
||||
impl Middleware {
|
||||
/// Get local endpoints handle.
|
||||
pub fn endpoints(&self) -> Endpoints {
|
||||
Endpoints {
|
||||
endpoints: self.endpoints.clone(),
|
||||
}
|
||||
pub fn endpoints(&self) -> &Endpoints {
|
||||
&self.endpoints
|
||||
}
|
||||
|
||||
/// Creates new middleware for UI server.
|
||||
pub fn ui<F: Fetch>(
|
||||
ntp_server: &str,
|
||||
pool: CpuPool,
|
||||
remote: Remote,
|
||||
health: NodeHealth,
|
||||
dapps_domain: &str,
|
||||
registrar: Arc<ContractClient>,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
@@ -141,18 +164,16 @@ impl Middleware {
|
||||
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(
|
||||
hash_fetch::urlhint::URLHintContract::new(registrar),
|
||||
sync_status.clone(),
|
||||
remote.clone(),
|
||||
fetch.clone(),
|
||||
pool.clone(),
|
||||
).embeddable_on(None).allow_dapps(false));
|
||||
let special = {
|
||||
let mut special = special_endpoints(
|
||||
ntp_server,
|
||||
pool,
|
||||
pool.clone(),
|
||||
health,
|
||||
content_fetcher.clone(),
|
||||
remote.clone(),
|
||||
sync_status.clone(),
|
||||
);
|
||||
special.insert(router::SpecialEndpoint::Home, Some(apps::ui()));
|
||||
special.insert(router::SpecialEndpoint::Home, Some(apps::ui(pool.clone())));
|
||||
special
|
||||
};
|
||||
let router = router::Router::new(
|
||||
@@ -164,18 +185,18 @@ impl Middleware {
|
||||
);
|
||||
|
||||
Middleware {
|
||||
router: router,
|
||||
endpoints: Default::default(),
|
||||
router: router,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates new Dapps server middleware.
|
||||
pub fn dapps<F: Fetch>(
|
||||
ntp_server: &str,
|
||||
pool: CpuPool,
|
||||
remote: Remote,
|
||||
health: NodeHealth,
|
||||
ui_address: Option<(String, u16)>,
|
||||
extra_embed_on: Vec<(String, u16)>,
|
||||
extra_script_src: Vec<(String, u16)>,
|
||||
dapps_path: PathBuf,
|
||||
extra_dapps: Vec<PathBuf>,
|
||||
dapps_domain: &str,
|
||||
@@ -184,30 +205,35 @@ impl Middleware {
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
fetch: F,
|
||||
) -> Self {
|
||||
let embeddable = as_embeddable(ui_address, extra_embed_on, dapps_domain);
|
||||
let embeddable = as_embeddable(ui_address, extra_embed_on, extra_script_src, dapps_domain);
|
||||
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(
|
||||
hash_fetch::urlhint::URLHintContract::new(registrar),
|
||||
sync_status.clone(),
|
||||
remote.clone(),
|
||||
fetch.clone(),
|
||||
pool.clone(),
|
||||
).embeddable_on(embeddable.clone()).allow_dapps(true));
|
||||
let endpoints = apps::all_endpoints(
|
||||
dapps_path,
|
||||
let (local_endpoints, endpoints) = apps::all_endpoints(
|
||||
dapps_path.clone(),
|
||||
extra_dapps,
|
||||
dapps_domain,
|
||||
embeddable.clone(),
|
||||
web_proxy_tokens,
|
||||
remote.clone(),
|
||||
fetch.clone(),
|
||||
pool.clone(),
|
||||
);
|
||||
let endpoints = Endpoints {
|
||||
endpoints: Arc::new(RwLock::new(endpoints)),
|
||||
dapps_path,
|
||||
local_endpoints: Arc::new(RwLock::new(local_endpoints)),
|
||||
embeddable: embeddable.clone(),
|
||||
pool: Some(pool.clone()),
|
||||
};
|
||||
|
||||
let special = {
|
||||
let mut special = special_endpoints(
|
||||
ntp_server,
|
||||
pool,
|
||||
pool.clone(),
|
||||
health,
|
||||
content_fetcher.clone(),
|
||||
remote.clone(),
|
||||
sync_status,
|
||||
);
|
||||
special.insert(
|
||||
router::SpecialEndpoint::Home,
|
||||
@@ -225,33 +251,29 @@ impl Middleware {
|
||||
);
|
||||
|
||||
Middleware {
|
||||
router: router,
|
||||
endpoints: endpoints,
|
||||
endpoints,
|
||||
router,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl http::RequestMiddleware for Middleware {
|
||||
fn on_request(&self, req: &hyper::server::Request<hyper::net::HttpStream>, control: &hyper::Control) -> http::RequestMiddlewareAction {
|
||||
self.router.on_request(req, control)
|
||||
fn on_request(&self, req: hyper::Request) -> http::RequestMiddlewareAction {
|
||||
self.router.on_request(req)
|
||||
}
|
||||
}
|
||||
|
||||
fn special_endpoints(
|
||||
ntp_server: &str,
|
||||
pool: CpuPool,
|
||||
health: NodeHealth,
|
||||
content_fetcher: Arc<apps::fetcher::Fetcher>,
|
||||
remote: Remote,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
) -> HashMap<router::SpecialEndpoint, Option<Box<endpoint::Endpoint>>> {
|
||||
let mut special = HashMap::new();
|
||||
special.insert(router::SpecialEndpoint::Rpc, None);
|
||||
special.insert(router::SpecialEndpoint::Utils, Some(apps::utils()));
|
||||
special.insert(router::SpecialEndpoint::Utils, Some(apps::utils(pool)));
|
||||
special.insert(router::SpecialEndpoint::Api, Some(api::RestApi::new(
|
||||
content_fetcher,
|
||||
sync_status,
|
||||
api::TimeChecker::new(ntp_server.into(), pool),
|
||||
remote,
|
||||
health,
|
||||
)));
|
||||
special
|
||||
}
|
||||
@@ -263,12 +285,14 @@ fn address(host: &str, port: u16) -> String {
|
||||
fn as_embeddable(
|
||||
ui_address: Option<(String, u16)>,
|
||||
extra_embed_on: Vec<(String, u16)>,
|
||||
extra_script_src: Vec<(String, u16)>,
|
||||
dapps_domain: &str,
|
||||
) -> Option<ParentFrameSettings> {
|
||||
ui_address.map(|(host, port)| ParentFrameSettings {
|
||||
host,
|
||||
port,
|
||||
extra_embed_on,
|
||||
extra_script_src,
|
||||
dapps_domain: dapps_domain.to_owned(),
|
||||
})
|
||||
}
|
||||
@@ -289,8 +313,10 @@ pub struct ParentFrameSettings {
|
||||
pub host: String,
|
||||
/// Port
|
||||
pub port: u16,
|
||||
/// Additional pages the pages can be embedded on.
|
||||
/// Additional URLs the dapps can be embedded on.
|
||||
pub extra_embed_on: Vec<(String, u16)>,
|
||||
/// Additional URLs the dapp scripts can be loaded from.
|
||||
pub extra_script_src: Vec<(String, u16)>,
|
||||
/// Dapps Domain (web3.site)
|
||||
pub dapps_domain: String,
|
||||
}
|
||||
|
||||
@@ -14,71 +14,62 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::io;
|
||||
use futures::future;
|
||||
use futures_cpupool::CpuPool;
|
||||
use hyper::mime::{self, Mime};
|
||||
use itertools::Itertools;
|
||||
use parity_dapps::{WebApp, Info};
|
||||
|
||||
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Request, Response};
|
||||
use page::{handler, PageCache};
|
||||
use std::sync::Arc;
|
||||
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
|
||||
use parity_dapps::{WebApp, File, Info};
|
||||
use Embeddable;
|
||||
|
||||
pub struct PageEndpoint<T : WebApp + 'static> {
|
||||
pub struct Dapp<T: WebApp + 'static> {
|
||||
/// futures cpu pool
|
||||
pool: CpuPool,
|
||||
/// Content of the files
|
||||
pub app: Arc<T>,
|
||||
/// Prefix to strip from the path (when `None` deducted from `app_id`)
|
||||
pub prefix: Option<String>,
|
||||
app: T,
|
||||
/// Safe to be loaded in frame by other origin. (use wisely!)
|
||||
safe_to_embed_on: Embeddable,
|
||||
info: EndpointInfo,
|
||||
fallback_to_index_html: bool,
|
||||
}
|
||||
|
||||
impl<T: WebApp + 'static> PageEndpoint<T> {
|
||||
/// Creates new `PageEndpoint` for builtin (compile time) Dapp.
|
||||
pub fn new(app: T) -> Self {
|
||||
impl<T: WebApp + 'static> Dapp<T> {
|
||||
/// Creates new `Dapp` for builtin (compile time) Dapp.
|
||||
pub fn new(pool: CpuPool, app: T) -> Self {
|
||||
let info = app.info();
|
||||
PageEndpoint {
|
||||
app: Arc::new(app),
|
||||
prefix: None,
|
||||
Dapp {
|
||||
pool,
|
||||
app,
|
||||
safe_to_embed_on: None,
|
||||
info: EndpointInfo::from(info),
|
||||
fallback_to_index_html: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new `PageEndpoint` for builtin (compile time) Dapp.
|
||||
/// Creates a new `Dapp` for builtin (compile time) Dapp.
|
||||
/// Instead of returning 404 this endpoint will always server index.html.
|
||||
pub fn with_fallback_to_index(app: T) -> Self {
|
||||
pub fn with_fallback_to_index(pool: CpuPool, app: T) -> Self {
|
||||
let info = app.info();
|
||||
PageEndpoint {
|
||||
app: Arc::new(app),
|
||||
prefix: None,
|
||||
Dapp {
|
||||
pool,
|
||||
app,
|
||||
safe_to_embed_on: None,
|
||||
info: EndpointInfo::from(info),
|
||||
fallback_to_index_html: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new `PageEndpoint` and specify prefix that should be removed before looking for a file.
|
||||
/// It's used only for special endpoints (i.e. `/parity-utils/`)
|
||||
/// So `/parity-utils/inject.js` will be resolved to `/inject.js` is prefix is set.
|
||||
pub fn with_prefix(app: T, prefix: String) -> Self {
|
||||
let info = app.info();
|
||||
PageEndpoint {
|
||||
app: Arc::new(app),
|
||||
prefix: Some(prefix),
|
||||
safe_to_embed_on: None,
|
||||
info: EndpointInfo::from(info),
|
||||
fallback_to_index_html: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates new `PageEndpoint` which can be safely used in iframe
|
||||
/// Creates new `Dapp` which can be safely used in iframe
|
||||
/// even from different origin. It might be dangerous (clickjacking).
|
||||
/// Use wisely!
|
||||
pub fn new_safe_to_embed(app: T, address: Embeddable) -> Self {
|
||||
pub fn new_safe_to_embed(pool: CpuPool, app: T, address: Embeddable) -> Self {
|
||||
let info = app.info();
|
||||
PageEndpoint {
|
||||
app: Arc::new(app),
|
||||
prefix: None,
|
||||
Dapp {
|
||||
pool,
|
||||
app,
|
||||
safe_to_embed_on: address,
|
||||
info: EndpointInfo::from(info),
|
||||
fallback_to_index_html: false,
|
||||
@@ -86,21 +77,51 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: WebApp> Endpoint for PageEndpoint<T> {
|
||||
|
||||
impl<T: WebApp> Endpoint for Dapp<T> {
|
||||
fn info(&self) -> Option<&EndpointInfo> {
|
||||
Some(&self.info)
|
||||
}
|
||||
|
||||
fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
|
||||
Box::new(handler::PageHandler {
|
||||
app: BuiltinDapp::new(self.app.clone(), self.fallback_to_index_html),
|
||||
prefix: self.prefix.clone(),
|
||||
path: path,
|
||||
file: handler::ServedFile::new(self.safe_to_embed_on.clone()),
|
||||
fn respond(&self, path: EndpointPath, _req: Request) -> Response {
|
||||
trace!(target: "dapps", "Builtin file path: {:?}", path);
|
||||
let file_path = if path.has_no_params() {
|
||||
"index.html".to_owned()
|
||||
} else {
|
||||
path.app_params.into_iter().filter(|x| !x.is_empty()).join("/")
|
||||
};
|
||||
trace!(target: "dapps", "Builtin file: {:?}", file_path);
|
||||
|
||||
let file = {
|
||||
let file = |path| self.app.file(path).map(|file| {
|
||||
let content_type = match file.content_type.parse() {
|
||||
Ok(mime) => mime,
|
||||
Err(_) => {
|
||||
warn!(target: "dapps", "invalid MIME type: {}", file.content_type);
|
||||
mime::TEXT_HTML
|
||||
},
|
||||
};
|
||||
BuiltinFile {
|
||||
content_type,
|
||||
content: io::Cursor::new(file.content),
|
||||
}
|
||||
});
|
||||
let res = file(&file_path);
|
||||
if self.fallback_to_index_html {
|
||||
res.or_else(|| file("index.html"))
|
||||
} else {
|
||||
res
|
||||
}
|
||||
};
|
||||
|
||||
let (reader, response) = handler::PageHandler {
|
||||
file,
|
||||
cache: PageCache::Disabled,
|
||||
safe_to_embed_on: self.safe_to_embed_on.clone(),
|
||||
})
|
||||
}.into_response();
|
||||
|
||||
self.pool.spawn(reader).forget();
|
||||
|
||||
Box::new(future::ok(response))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,66 +137,20 @@ impl From<Info> for EndpointInfo {
|
||||
}
|
||||
}
|
||||
|
||||
struct BuiltinDapp<T: WebApp + 'static> {
|
||||
app: Arc<T>,
|
||||
fallback_to_index_html: bool,
|
||||
|
||||
struct BuiltinFile {
|
||||
content_type: Mime,
|
||||
content: io::Cursor<&'static [u8]>,
|
||||
}
|
||||
|
||||
impl<T: WebApp + 'static> BuiltinDapp<T> {
|
||||
fn new(app: Arc<T>, fallback_to_index_html: bool) -> Self {
|
||||
BuiltinDapp {
|
||||
app: app,
|
||||
fallback_to_index_html: fallback_to_index_html,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: WebApp + 'static> handler::Dapp for BuiltinDapp<T> {
|
||||
type DappFile = BuiltinDappFile<T>;
|
||||
|
||||
fn file(&self, path: &str) -> Option<Self::DappFile> {
|
||||
let file = |path| self.app.file(path).map(|_| {
|
||||
BuiltinDappFile {
|
||||
app: self.app.clone(),
|
||||
path: path.into(),
|
||||
write_pos: 0,
|
||||
}
|
||||
});
|
||||
let res = file(path);
|
||||
if self.fallback_to_index_html {
|
||||
res.or_else(|| file("index.html"))
|
||||
} else {
|
||||
res
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct BuiltinDappFile<T: WebApp + 'static> {
|
||||
app: Arc<T>,
|
||||
path: String,
|
||||
write_pos: usize,
|
||||
}
|
||||
|
||||
impl<T: WebApp + 'static> BuiltinDappFile<T> {
|
||||
fn file(&self) -> &File {
|
||||
self.app.file(&self.path).expect("Check is done when structure is created.")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: WebApp + 'static> handler::DappFile for BuiltinDappFile<T> {
|
||||
fn content_type(&self) -> &str {
|
||||
self.file().content_type
|
||||
}
|
||||
|
||||
fn is_drained(&self) -> bool {
|
||||
self.write_pos == self.file().content.len()
|
||||
}
|
||||
|
||||
fn next_chunk(&mut self) -> &[u8] {
|
||||
&self.file().content[self.write_pos..]
|
||||
}
|
||||
|
||||
fn bytes_written(&mut self, bytes: usize) {
|
||||
self.write_pos += bytes;
|
||||
impl handler::DappFile for BuiltinFile {
|
||||
type Reader = io::Cursor<&'static [u8]>;
|
||||
|
||||
fn content_type(&self) -> &Mime {
|
||||
&self.content_type
|
||||
}
|
||||
|
||||
fn into_reader(self) -> Self::Reader {
|
||||
self.content
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,61 +14,25 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use time::{self, Duration};
|
||||
use std::io;
|
||||
use std::time::{Duration, SystemTime};
|
||||
use hyper::{self, header, StatusCode};
|
||||
use hyper::mime::Mime;
|
||||
|
||||
use hyper::header;
|
||||
use hyper::server;
|
||||
use hyper::uri::RequestUri;
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{Decoder, Encoder, Next};
|
||||
use endpoint::EndpointPath;
|
||||
use handlers::{ContentHandler, add_security_headers};
|
||||
use handlers::{Reader, ContentHandler, add_security_headers};
|
||||
use {Embeddable};
|
||||
|
||||
/// Represents a file that can be sent to client.
|
||||
/// Implementation should keep track of bytes already sent internally.
|
||||
pub trait DappFile: Send {
|
||||
pub trait DappFile {
|
||||
/// A reader type returned by this file.
|
||||
type Reader: io::Read;
|
||||
|
||||
/// Returns a content-type of this file.
|
||||
fn content_type(&self) -> &str;
|
||||
fn content_type(&self) -> &Mime;
|
||||
|
||||
/// Checks if all bytes from that file were written.
|
||||
fn is_drained(&self) -> bool;
|
||||
|
||||
/// Fetch next chunk to write to the client.
|
||||
fn next_chunk(&mut self) -> &[u8];
|
||||
|
||||
/// How many files have been written to the client.
|
||||
fn bytes_written(&mut self, bytes: usize);
|
||||
}
|
||||
|
||||
/// Dapp as a (dynamic) set of files.
|
||||
pub trait Dapp: Send + 'static {
|
||||
/// File type
|
||||
type DappFile: DappFile;
|
||||
|
||||
/// Returns file under given path.
|
||||
fn file(&self, path: &str) -> Option<Self::DappFile>;
|
||||
}
|
||||
|
||||
/// Currently served by `PageHandler` file
|
||||
pub enum ServedFile<T: Dapp> {
|
||||
/// File from dapp
|
||||
File(T::DappFile),
|
||||
/// Error (404)
|
||||
Error(ContentHandler),
|
||||
}
|
||||
|
||||
impl<T: Dapp> ServedFile<T> {
|
||||
pub fn new(embeddable_on: Embeddable) -> Self {
|
||||
ServedFile::Error(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Requested dapp resource was not found.",
|
||||
None,
|
||||
embeddable_on,
|
||||
))
|
||||
}
|
||||
/// Convert this file into io::Read instance.
|
||||
fn into_reader(self) -> Self::Reader where Self: Sized;
|
||||
}
|
||||
|
||||
/// Defines what cache headers should be appended to returned resources.
|
||||
@@ -84,194 +48,55 @@ impl Default for PageCache {
|
||||
}
|
||||
}
|
||||
|
||||
/// A generic type for `PageHandler` allowing to set the URL.
|
||||
/// Used by dapps fetching to set the URL after the content was downloaded.
|
||||
pub trait PageHandlerWaiting: server::Handler<HttpStream> + Send {
|
||||
fn set_uri(&mut self, uri: &RequestUri);
|
||||
}
|
||||
|
||||
/// A handler for a single webapp.
|
||||
/// Resolves correct paths and serves as a plumbing code between
|
||||
/// hyper server and dapp.
|
||||
pub struct PageHandler<T: Dapp> {
|
||||
/// A Dapp.
|
||||
pub app: T,
|
||||
pub struct PageHandler<T: DappFile> {
|
||||
/// File currently being served
|
||||
pub file: ServedFile<T>,
|
||||
/// Optional prefix to strip from path.
|
||||
pub prefix: Option<String>,
|
||||
/// Requested path.
|
||||
pub path: EndpointPath,
|
||||
pub file: Option<T>,
|
||||
/// Flag indicating if the file can be safely embeded (put in iframe).
|
||||
pub safe_to_embed_on: Embeddable,
|
||||
/// Cache settings for this page.
|
||||
pub cache: PageCache,
|
||||
}
|
||||
|
||||
impl<T: Dapp> PageHandlerWaiting for PageHandler<T> {
|
||||
fn set_uri(&mut self, uri: &RequestUri) {
|
||||
trace!(target: "dapps", "Setting URI: {:?}", uri);
|
||||
self.file = match *uri {
|
||||
RequestUri::AbsolutePath { ref path, .. } => {
|
||||
self.app.file(&self.extract_path(path))
|
||||
},
|
||||
RequestUri::AbsoluteUri(ref url) => {
|
||||
self.app.file(&self.extract_path(url.path()))
|
||||
},
|
||||
_ => None,
|
||||
}.map_or_else(|| ServedFile::new(self.safe_to_embed_on.clone()), |f| ServedFile::File(f));
|
||||
}
|
||||
}
|
||||
impl<T: DappFile> PageHandler<T> {
|
||||
pub fn into_response(self) -> (Option<Reader<T::Reader>>, hyper::Response) {
|
||||
let file = match self.file {
|
||||
None => return (None, ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"File not found",
|
||||
"Requested file has not been found.",
|
||||
None,
|
||||
self.safe_to_embed_on,
|
||||
).into()),
|
||||
Some(file) => file,
|
||||
};
|
||||
|
||||
impl<T: Dapp> PageHandler<T> {
|
||||
fn extract_path(&self, path: &str) -> String {
|
||||
let app_id = &self.path.app_id;
|
||||
let prefix = "/".to_owned() + self.prefix.as_ref().unwrap_or(app_id);
|
||||
let prefix_with_slash = prefix.clone() + "/";
|
||||
let query_pos = path.find('?').unwrap_or_else(|| path.len());
|
||||
let mut res = hyper::Response::new()
|
||||
.with_status(StatusCode::Ok);
|
||||
|
||||
// Index file support
|
||||
match path == "/" || path == &prefix || path == &prefix_with_slash {
|
||||
true => "index.html".to_owned(),
|
||||
false => if path.starts_with(&prefix_with_slash) {
|
||||
path[prefix_with_slash.len()..query_pos].to_owned()
|
||||
} else if path.starts_with("/") {
|
||||
path[1..query_pos].to_owned()
|
||||
} else {
|
||||
path[0..query_pos].to_owned()
|
||||
// headers
|
||||
{
|
||||
let mut headers = res.headers_mut();
|
||||
|
||||
if let PageCache::Enabled = self.cache {
|
||||
let validity_secs = 365u32 * 24 * 3600;
|
||||
let validity = Duration::from_secs(validity_secs as u64);
|
||||
headers.set(header::CacheControl(vec![
|
||||
header::CacheDirective::Public,
|
||||
header::CacheDirective::MaxAge(validity_secs),
|
||||
]));
|
||||
headers.set(header::Expires(header::HttpDate::from(SystemTime::now() + validity)));
|
||||
}
|
||||
|
||||
headers.set(header::ContentType(file.content_type().to_owned()));
|
||||
|
||||
add_security_headers(&mut headers, self.safe_to_embed_on);
|
||||
}
|
||||
|
||||
let (reader, body) = Reader::pair(file.into_reader(), Vec::new());
|
||||
res.set_body(body);
|
||||
(Some(reader), res)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
|
||||
fn on_request(&mut self, req: server::Request<HttpStream>) -> Next {
|
||||
self.set_uri(req.uri());
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, _decoder: &mut Decoder<HttpStream>) -> Next {
|
||||
Next::write()
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
match self.file {
|
||||
ServedFile::File(ref f) => {
|
||||
res.set_status(StatusCode::Ok);
|
||||
|
||||
if let PageCache::Enabled = self.cache {
|
||||
let mut headers = res.headers_mut();
|
||||
let validity = Duration::days(365);
|
||||
headers.set(header::CacheControl(vec![
|
||||
header::CacheDirective::Public,
|
||||
header::CacheDirective::MaxAge(validity.num_seconds() as u32),
|
||||
]));
|
||||
headers.set(header::Expires(header::HttpDate(time::now() + validity)));
|
||||
}
|
||||
|
||||
match f.content_type().parse() {
|
||||
Ok(mime) => res.headers_mut().set(header::ContentType(mime)),
|
||||
Err(()) => debug!(target: "dapps", "invalid MIME type: {}", f.content_type()),
|
||||
}
|
||||
|
||||
// Security headers:
|
||||
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.take());
|
||||
Next::write()
|
||||
},
|
||||
ServedFile::Error(ref mut handler) => {
|
||||
handler.on_response(res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
match self.file {
|
||||
ServedFile::Error(ref mut handler) => handler.on_response_writable(encoder),
|
||||
ServedFile::File(ref f) if f.is_drained() => Next::end(),
|
||||
ServedFile::File(ref mut f) => match encoder.write(f.next_chunk()) {
|
||||
Ok(bytes) => {
|
||||
f.bytes_written(bytes);
|
||||
Next::write()
|
||||
},
|
||||
Err(e) => match e.kind() {
|
||||
::std::io::ErrorKind::WouldBlock => Next::write(),
|
||||
_ => Next::end(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
pub struct TestWebAppFile;
|
||||
|
||||
impl DappFile for TestWebAppFile {
|
||||
fn content_type(&self) -> &str {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn is_drained(&self) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn next_chunk(&mut self) -> &[u8] {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn bytes_written(&mut self, _bytes: usize) {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TestWebapp;
|
||||
|
||||
impl Dapp for TestWebapp {
|
||||
type DappFile = TestWebAppFile;
|
||||
|
||||
fn file(&self, _path: &str) -> Option<Self::DappFile> {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_extract_path_with_appid() {
|
||||
|
||||
// given
|
||||
let path1 = "/";
|
||||
let path2= "/test.css";
|
||||
let path3 = "/app/myfile.txt";
|
||||
let path4 = "/app/myfile.txt?query=123";
|
||||
let page_handler = PageHandler {
|
||||
app: test::TestWebapp,
|
||||
prefix: None,
|
||||
path: EndpointPath {
|
||||
app_id: "app".to_owned(),
|
||||
app_params: vec![],
|
||||
host: "".to_owned(),
|
||||
port: 8080,
|
||||
using_dapps_domains: true,
|
||||
},
|
||||
file: ServedFile::new(None),
|
||||
cache: Default::default(),
|
||||
safe_to_embed_on: None,
|
||||
};
|
||||
|
||||
// when
|
||||
let res1 = page_handler.extract_path(path1);
|
||||
let res2 = page_handler.extract_path(path2);
|
||||
let res3 = page_handler.extract_path(path3);
|
||||
let res4 = page_handler.extract_path(path4);
|
||||
|
||||
// then
|
||||
assert_eq!(&res1, "index.html");
|
||||
assert_eq!(&res2, "test.css");
|
||||
assert_eq!(&res3, "myfile.txt");
|
||||
assert_eq!(&res4, "myfile.txt");
|
||||
}
|
||||
|
||||
@@ -15,16 +15,18 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use mime_guess;
|
||||
use std::io::{Seek, Read, SeekFrom};
|
||||
use std::fs;
|
||||
use std::{fs, fmt};
|
||||
use std::path::{Path, PathBuf};
|
||||
use page::handler::{self, PageCache, PageHandlerWaiting};
|
||||
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
|
||||
use mime::Mime;
|
||||
use futures::{future};
|
||||
use futures_cpupool::CpuPool;
|
||||
use page::handler::{self, PageCache};
|
||||
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Request, Response};
|
||||
use hyper::mime::Mime;
|
||||
use Embeddable;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LocalPageEndpoint {
|
||||
#[derive(Clone)]
|
||||
pub struct Dapp {
|
||||
pool: CpuPool,
|
||||
path: PathBuf,
|
||||
mime: Option<Mime>,
|
||||
info: Option<EndpointInfo>,
|
||||
@@ -32,23 +34,37 @@ pub struct LocalPageEndpoint {
|
||||
embeddable_on: Embeddable,
|
||||
}
|
||||
|
||||
impl LocalPageEndpoint {
|
||||
pub fn new(path: PathBuf, info: EndpointInfo, cache: PageCache, embeddable_on: Embeddable) -> Self {
|
||||
LocalPageEndpoint {
|
||||
path: path,
|
||||
impl fmt::Debug for Dapp {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct("Dapp")
|
||||
.field("path", &self.path)
|
||||
.field("mime", &self.mime)
|
||||
.field("info", &self.info)
|
||||
.field("cache", &self.cache)
|
||||
.field("embeddable_on", &self.embeddable_on)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Dapp {
|
||||
pub fn new(pool: CpuPool, path: PathBuf, info: EndpointInfo, cache: PageCache, embeddable_on: Embeddable) -> Self {
|
||||
Dapp {
|
||||
pool,
|
||||
path,
|
||||
mime: None,
|
||||
info: Some(info),
|
||||
cache: cache,
|
||||
embeddable_on: embeddable_on,
|
||||
cache,
|
||||
embeddable_on,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn single_file(path: PathBuf, mime: Mime, cache: PageCache) -> Self {
|
||||
LocalPageEndpoint {
|
||||
path: path,
|
||||
pub fn single_file(pool: CpuPool, path: PathBuf, mime: Mime, cache: PageCache) -> Self {
|
||||
Dapp {
|
||||
pool,
|
||||
path,
|
||||
mime: Some(mime),
|
||||
info: None,
|
||||
cache: cache,
|
||||
cache,
|
||||
embeddable_on: None,
|
||||
}
|
||||
}
|
||||
@@ -57,125 +73,75 @@ impl LocalPageEndpoint {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
fn page_handler_with_mime(&self, path: EndpointPath, mime: &Mime) -> handler::PageHandler<LocalSingleFile> {
|
||||
handler::PageHandler {
|
||||
app: LocalSingleFile { path: self.path.clone(), mime: format!("{}", mime) },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: handler::ServedFile::new(None),
|
||||
safe_to_embed_on: self.embeddable_on.clone(),
|
||||
cache: self.cache,
|
||||
}
|
||||
}
|
||||
|
||||
fn page_handler(&self, path: EndpointPath) -> handler::PageHandler<LocalDapp> {
|
||||
handler::PageHandler {
|
||||
app: LocalDapp { path: self.path.clone() },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: handler::ServedFile::new(None),
|
||||
safe_to_embed_on: self.embeddable_on.clone(),
|
||||
cache: self.cache,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_page_handler(&self, path: EndpointPath) -> Box<PageHandlerWaiting> {
|
||||
fn get_file(&self, path: &EndpointPath) -> Option<LocalFile> {
|
||||
if let Some(ref mime) = self.mime {
|
||||
Box::new(self.page_handler_with_mime(path, mime))
|
||||
} else {
|
||||
Box::new(self.page_handler(path))
|
||||
return LocalFile::from_path(&self.path, mime.to_owned());
|
||||
}
|
||||
|
||||
let mut file_path = self.path.to_owned();
|
||||
|
||||
if path.has_no_params() {
|
||||
file_path.push("index.html");
|
||||
} else {
|
||||
for part in &path.app_params {
|
||||
file_path.push(part);
|
||||
}
|
||||
}
|
||||
|
||||
let mime = mime_guess::guess_mime_type(&file_path);
|
||||
LocalFile::from_path(&file_path, mime)
|
||||
}
|
||||
|
||||
|
||||
pub fn to_response(&self, path: &EndpointPath) -> Response {
|
||||
let (reader, response) = handler::PageHandler {
|
||||
file: self.get_file(path),
|
||||
cache: self.cache,
|
||||
safe_to_embed_on: self.embeddable_on.clone(),
|
||||
}.into_response();
|
||||
|
||||
self.pool.spawn(reader).forget();
|
||||
|
||||
Box::new(future::ok(response))
|
||||
}
|
||||
}
|
||||
|
||||
impl Endpoint for LocalPageEndpoint {
|
||||
impl Endpoint for Dapp {
|
||||
fn info(&self) -> Option<&EndpointInfo> {
|
||||
self.info.as_ref()
|
||||
}
|
||||
|
||||
fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
|
||||
if let Some(ref mime) = self.mime {
|
||||
Box::new(self.page_handler_with_mime(path, mime))
|
||||
} else {
|
||||
Box::new(self.page_handler(path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalSingleFile {
|
||||
path: PathBuf,
|
||||
mime: String,
|
||||
}
|
||||
|
||||
impl handler::Dapp for LocalSingleFile {
|
||||
type DappFile = LocalFile;
|
||||
|
||||
fn file(&self, _path: &str) -> Option<Self::DappFile> {
|
||||
LocalFile::from_path(&self.path, Some(&self.mime))
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalDapp {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl handler::Dapp for LocalDapp {
|
||||
type DappFile = LocalFile;
|
||||
|
||||
fn file(&self, file_path: &str) -> Option<Self::DappFile> {
|
||||
let mut path = self.path.clone();
|
||||
for part in file_path.split('/') {
|
||||
path.push(part);
|
||||
}
|
||||
LocalFile::from_path(&path, None)
|
||||
fn respond(&self, path: EndpointPath, _req: Request) -> Response {
|
||||
self.to_response(&path)
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalFile {
|
||||
content_type: String,
|
||||
buffer: [u8; 4096],
|
||||
content_type: Mime,
|
||||
file: fs::File,
|
||||
len: u64,
|
||||
pos: u64,
|
||||
}
|
||||
|
||||
impl LocalFile {
|
||||
fn from_path<P: AsRef<Path>>(path: P, mime: Option<&str>) -> Option<Self> {
|
||||
fn from_path<P: AsRef<Path>>(path: P, content_type: Mime) -> Option<Self> {
|
||||
trace!(target: "dapps", "Local file: {:?}", path.as_ref());
|
||||
// Check if file exists
|
||||
fs::File::open(&path).ok().map(|file| {
|
||||
let content_type = mime.map(|mime| mime.to_owned())
|
||||
.unwrap_or_else(|| mime_guess::guess_mime_type(path).to_string());
|
||||
let len = file.metadata().ok().map_or(0, |meta| meta.len());
|
||||
LocalFile {
|
||||
content_type: content_type,
|
||||
buffer: [0; 4096],
|
||||
file: file,
|
||||
pos: 0,
|
||||
len: len,
|
||||
content_type,
|
||||
file,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl handler::DappFile for LocalFile {
|
||||
fn content_type(&self) -> &str {
|
||||
type Reader = fs::File;
|
||||
|
||||
fn content_type(&self) -> &Mime {
|
||||
&self.content_type
|
||||
}
|
||||
|
||||
fn is_drained(&self) -> bool {
|
||||
self.pos == self.len
|
||||
}
|
||||
|
||||
fn next_chunk(&mut self) -> &[u8] {
|
||||
let _ = self.file.seek(SeekFrom::Start(self.pos));
|
||||
if let Ok(n) = self.file.read(&mut self.buffer) {
|
||||
&self.buffer[0..n]
|
||||
} else {
|
||||
&self.buffer[0..0]
|
||||
}
|
||||
}
|
||||
|
||||
fn bytes_written(&mut self, bytes: usize) {
|
||||
self.pos += bytes as u64;
|
||||
fn into_reader(self) -> Self::Reader {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,11 +15,9 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
mod builtin;
|
||||
mod local;
|
||||
pub mod builtin;
|
||||
pub mod local;
|
||||
mod handler;
|
||||
|
||||
pub use self::local::LocalPageEndpoint;
|
||||
pub use self::builtin::PageEndpoint;
|
||||
pub use self::handler::{PageCache, PageHandlerWaiting};
|
||||
pub use self::handler::PageCache;
|
||||
|
||||
|
||||
@@ -16,9 +16,11 @@
|
||||
|
||||
//! Serving ProxyPac file
|
||||
|
||||
use endpoint::{Endpoint, Handler, EndpointPath};
|
||||
use handlers::ContentHandler;
|
||||
use apps::HOME_PAGE;
|
||||
use endpoint::{Endpoint, Request, Response, EndpointPath};
|
||||
use futures::future;
|
||||
use handlers::ContentHandler;
|
||||
use hyper::mime;
|
||||
use {address, Embeddable};
|
||||
|
||||
pub struct ProxyPac {
|
||||
@@ -33,7 +35,7 @@ impl ProxyPac {
|
||||
}
|
||||
|
||||
impl Endpoint for ProxyPac {
|
||||
fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
|
||||
fn respond(&self, path: EndpointPath, _req: Request) -> Response {
|
||||
let ui = self.embeddable
|
||||
.as_ref()
|
||||
.map(|ref parent| address(&parent.host, parent.port))
|
||||
@@ -57,7 +59,9 @@ function FindProxyForURL(url, host) {{
|
||||
"#,
|
||||
HOME_PAGE, self.dapps_domain, path.host, path.port, ui);
|
||||
|
||||
Box::new(ContentHandler::ok(content, mime!(Application/Javascript)))
|
||||
Box::new(future::ok(
|
||||
ContentHandler::ok(content, mime::TEXT_JAVASCRIPT).into()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,18 +17,17 @@
|
||||
//! Router implementation
|
||||
//! Dispatch requests to proper application.
|
||||
|
||||
use std::cmp;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use url::{Url, Host};
|
||||
use hyper::{self, server, header, Control};
|
||||
use hyper::net::HttpStream;
|
||||
use futures::future;
|
||||
use hyper::{self, header, Uri};
|
||||
use jsonrpc_http_server as http;
|
||||
|
||||
use apps;
|
||||
use apps::fetcher::Fetcher;
|
||||
use endpoint::{Endpoint, Endpoints, EndpointPath, Handler};
|
||||
use endpoint::{self, Endpoint, EndpointPath};
|
||||
use Endpoints;
|
||||
use handlers;
|
||||
use Embeddable;
|
||||
|
||||
@@ -42,6 +41,13 @@ pub enum SpecialEndpoint {
|
||||
None,
|
||||
}
|
||||
|
||||
enum Response {
|
||||
Some(endpoint::Response),
|
||||
None(hyper::Request),
|
||||
}
|
||||
|
||||
/// An endpoint router.
|
||||
/// Dispatches the request to particular Endpoint by requested uri/path.
|
||||
pub struct Router {
|
||||
endpoints: Option<Endpoints>,
|
||||
fetch: Arc<Fetcher>,
|
||||
@@ -50,57 +56,63 @@ pub struct Router {
|
||||
dapps_domain: String,
|
||||
}
|
||||
|
||||
impl http::RequestMiddleware for Router {
|
||||
fn on_request(&self, req: &server::Request<HttpStream>, control: &Control) -> http::RequestMiddlewareAction {
|
||||
impl Router {
|
||||
fn resolve_request(&self, req: hyper::Request, refresh_dapps: bool) -> (bool, Response) {
|
||||
// Choose proper handler depending on path / domain
|
||||
let url = handlers::extract_url(req);
|
||||
let endpoint = extract_endpoint(&url, &self.dapps_domain);
|
||||
let referer = extract_referer_endpoint(req, &self.dapps_domain);
|
||||
let endpoint = extract_endpoint(req.uri(), req.headers().get(), &self.dapps_domain);
|
||||
let referer = extract_referer_endpoint(&req, &self.dapps_domain);
|
||||
let is_utils = endpoint.1 == SpecialEndpoint::Utils;
|
||||
let is_origin_set = req.headers().get::<header::Origin>().is_some();
|
||||
let is_get_request = *req.method() == hyper::Method::Get;
|
||||
let is_head_request = *req.method() == hyper::Method::Head;
|
||||
let has_dapp = |dapp: &str| self.endpoints
|
||||
.as_ref()
|
||||
.map_or(false, |endpoints| endpoints.endpoints.read().contains_key(dapp));
|
||||
|
||||
trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", url, req);
|
||||
trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", req.uri(), req);
|
||||
debug!(target: "dapps", "Handling endpoint request: {:?}, referer: {:?}", endpoint, referer);
|
||||
|
||||
let control = control.clone();
|
||||
debug!(target: "dapps", "Handling endpoint request: {:?}", endpoint);
|
||||
let handler: Option<Box<Handler>> = match (endpoint.0, endpoint.1, referer) {
|
||||
(is_utils, match (endpoint.0, endpoint.1, referer) {
|
||||
// Handle invalid web requests that we can recover from
|
||||
(ref path, SpecialEndpoint::None, Some((ref referer, ref referer_url)))
|
||||
(ref path, SpecialEndpoint::None, Some(ref referer))
|
||||
if referer.app_id == apps::WEB_PATH
|
||||
&& self.endpoints.as_ref().map(|ep| ep.contains_key(apps::WEB_PATH)).unwrap_or(false)
|
||||
&& has_dapp(apps::WEB_PATH)
|
||||
&& !is_web_endpoint(path)
|
||||
=>
|
||||
{
|
||||
trace!(target: "dapps", "Redirecting to correct web request: {:?}", referer_url);
|
||||
let len = cmp::min(referer_url.path.len(), 2); // /web/<encoded>/
|
||||
let base = referer_url.path[..len].join("/");
|
||||
let requested = url.map(|u| u.path.join("/")).unwrap_or_default();
|
||||
Some(handlers::Redirection::boxed(&format!("/{}/{}", base, requested)))
|
||||
let token = referer.app_params.get(0).map(String::as_str).unwrap_or("");
|
||||
let requested = req.uri().path();
|
||||
let query = req.uri().query().map_or_else(String::new, |query| format!("?{}", query));
|
||||
let redirect_url = format!("/{}/{}{}{}", apps::WEB_PATH, token, requested, query);
|
||||
trace!(target: "dapps", "Redirecting to correct web request: {:?}", redirect_url);
|
||||
Response::Some(Box::new(future::ok(
|
||||
handlers::Redirection::new(redirect_url).into()
|
||||
)))
|
||||
},
|
||||
// First check special endpoints
|
||||
(ref path, ref endpoint, _) if self.special.contains_key(endpoint) => {
|
||||
trace!(target: "dapps", "Resolving to special endpoint.");
|
||||
self.special.get(endpoint)
|
||||
.expect("special known to contain key; qed")
|
||||
.as_ref()
|
||||
.map(|special| special.to_async_handler(path.clone().unwrap_or_default(), control))
|
||||
let special = self.special.get(endpoint).expect("special known to contain key; qed");
|
||||
match *special {
|
||||
Some(ref special) => Response::Some(special.respond(path.clone().unwrap_or_default(), req)),
|
||||
None => Response::None(req),
|
||||
}
|
||||
},
|
||||
// Then delegate to dapp
|
||||
(Some(ref path), _, _) if self.endpoints.as_ref().map(|ep| ep.contains_key(&path.app_id)).unwrap_or(false) => {
|
||||
(Some(ref path), _, _) if has_dapp(&path.app_id) => {
|
||||
trace!(target: "dapps", "Resolving to local/builtin dapp.");
|
||||
Some(self.endpoints
|
||||
Response::Some(self.endpoints
|
||||
.as_ref()
|
||||
.expect("endpoints known to be set; qed")
|
||||
.endpoints
|
||||
.read()
|
||||
.get(&path.app_id)
|
||||
.expect("endpoints known to contain key; qed")
|
||||
.to_async_handler(path.clone(), control))
|
||||
.respond(path.clone(), req))
|
||||
},
|
||||
// Try to resolve and fetch the dapp
|
||||
(Some(ref path), _, _) if self.fetch.contains(&path.app_id) => {
|
||||
trace!(target: "dapps", "Resolving to fetchable content.");
|
||||
Some(self.fetch.to_async_handler(path.clone(), control))
|
||||
Response::Some(self.fetch.respond(path.clone(), req))
|
||||
},
|
||||
// 404 for non-existent content (only if serving endpoints and not homepage)
|
||||
(Some(ref path), _, _)
|
||||
@@ -110,35 +122,53 @@ impl http::RequestMiddleware for Router {
|
||||
=>
|
||||
{
|
||||
trace!(target: "dapps", "Resolving to 404.");
|
||||
Some(Box::new(handlers::ContentHandler::error(
|
||||
hyper::StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Requested content was not found.",
|
||||
None,
|
||||
self.embeddable_on.clone(),
|
||||
)))
|
||||
if refresh_dapps {
|
||||
debug!(target: "dapps", "Refreshing dapps and re-trying.");
|
||||
self.endpoints.as_ref().map(|endpoints| endpoints.refresh_local_dapps());
|
||||
return self.resolve_request(req, false);
|
||||
} else {
|
||||
Response::Some(Box::new(future::ok(handlers::ContentHandler::error(
|
||||
hyper::StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Requested content was not found.",
|
||||
None,
|
||||
self.embeddable_on.clone(),
|
||||
).into())))
|
||||
}
|
||||
},
|
||||
// Any other GET|HEAD requests to home page.
|
||||
_ if (is_get_request || is_head_request) && self.special.contains_key(&SpecialEndpoint::Home) => {
|
||||
self.special.get(&SpecialEndpoint::Home)
|
||||
.expect("special known to contain key; qed")
|
||||
.as_ref()
|
||||
.map(|special| special.to_async_handler(Default::default(), control))
|
||||
let special = self.special.get(&SpecialEndpoint::Home).expect("special known to contain key; qed");
|
||||
match *special {
|
||||
Some(ref special) => {
|
||||
let mut endpoint = EndpointPath::default();
|
||||
endpoint.app_params = req.uri().path().split('/').map(str::to_owned).collect();
|
||||
Response::Some(special.respond(endpoint, req))
|
||||
},
|
||||
None => Response::None(req),
|
||||
}
|
||||
},
|
||||
// RPC by default
|
||||
_ => {
|
||||
trace!(target: "dapps", "Resolving to RPC call.");
|
||||
None
|
||||
Response::None(req)
|
||||
}
|
||||
};
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
match handler {
|
||||
Some(handler) => http::RequestMiddlewareAction::Respond {
|
||||
impl http::RequestMiddleware for Router {
|
||||
fn on_request(&self, req: hyper::Request) -> http::RequestMiddlewareAction {
|
||||
let is_origin_set = req.headers().get::<header::Origin>().is_some();
|
||||
let (is_utils, response) = self.resolve_request(req, self.endpoints.is_some());
|
||||
match response {
|
||||
Response::Some(response) => http::RequestMiddlewareAction::Respond {
|
||||
should_validate_hosts: !is_utils,
|
||||
handler: handler,
|
||||
response,
|
||||
},
|
||||
None => http::RequestMiddlewareAction::Proceed {
|
||||
Response::None(request) => http::RequestMiddlewareAction::Proceed {
|
||||
should_continue_on_invalid_cors: !is_origin_set,
|
||||
request,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -169,41 +199,44 @@ fn is_web_endpoint(path: &Option<EndpointPath>) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_referer_endpoint(req: &server::Request<HttpStream>, dapps_domain: &str) -> Option<(EndpointPath, Url)> {
|
||||
fn extract_referer_endpoint(req: &hyper::Request, dapps_domain: &str) -> Option<EndpointPath> {
|
||||
let referer = req.headers().get::<header::Referer>();
|
||||
|
||||
let url = referer.and_then(|referer| Url::parse(&referer.0).ok());
|
||||
let url = referer.and_then(|referer| referer.parse().ok());
|
||||
url.and_then(|url| {
|
||||
let option = Some(url);
|
||||
extract_url_referer_endpoint(&option, dapps_domain).or_else(|| {
|
||||
extract_endpoint(&option, dapps_domain).0.map(|endpoint| (endpoint, option.expect("Just wrapped; qed")))
|
||||
extract_url_referer_endpoint(&url, dapps_domain).or_else(|| {
|
||||
extract_endpoint(&url, None, dapps_domain).0
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_url_referer_endpoint(url: &Option<Url>, dapps_domain: &str) -> Option<(EndpointPath, Url)> {
|
||||
let query = url.as_ref().and_then(|url| url.query.as_ref());
|
||||
match (url, query) {
|
||||
(&Some(ref url), Some(ref query)) if query.starts_with(apps::URL_REFERER) => {
|
||||
let referer_url = format!("http://{}:{}/{}", url.host, url.port, &query[apps::URL_REFERER.len()..]);
|
||||
fn extract_url_referer_endpoint(url: &Uri, dapps_domain: &str) -> Option<EndpointPath> {
|
||||
let query = url.query();
|
||||
match query {
|
||||
Some(query) if query.starts_with(apps::URL_REFERER) => {
|
||||
let scheme = url.scheme().unwrap_or("http");
|
||||
let host = url.host().unwrap_or("unknown");
|
||||
let port = default_port(url, None);
|
||||
let referer_url = format!("{}://{}:{}/{}", scheme, host, port, &query[apps::URL_REFERER.len()..]);
|
||||
debug!(target: "dapps", "Recovering referer from query parameter: {}", referer_url);
|
||||
|
||||
let referer_url = Url::parse(&referer_url).ok();
|
||||
extract_endpoint(&referer_url, dapps_domain).0.map(|endpoint| {
|
||||
(endpoint, referer_url.expect("Endpoint returned only when url `is_some`").clone())
|
||||
})
|
||||
if let Some(referer_url) = referer_url.parse().ok() {
|
||||
extract_endpoint(&referer_url, None, dapps_domain).0
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_endpoint(url: &Option<Url>, dapps_domain: &str) -> (Option<EndpointPath>, SpecialEndpoint) {
|
||||
fn special_endpoint(url: &Url) -> SpecialEndpoint {
|
||||
if url.path.len() <= 1 {
|
||||
fn extract_endpoint(url: &Uri, extra_host: Option<&header::Host>, dapps_domain: &str) -> (Option<EndpointPath>, SpecialEndpoint) {
|
||||
fn special_endpoint(path: &[&str]) -> SpecialEndpoint {
|
||||
if path.len() <= 1 {
|
||||
return SpecialEndpoint::None;
|
||||
}
|
||||
|
||||
match url.path[0].as_ref() {
|
||||
match path[0].as_ref() {
|
||||
apps::RPC_PATH => SpecialEndpoint::Rpc,
|
||||
apps::API_PATH => SpecialEndpoint::Api,
|
||||
apps::UTILS_PATH => SpecialEndpoint::Utils,
|
||||
@@ -212,114 +245,162 @@ fn extract_endpoint(url: &Option<Url>, dapps_domain: &str) -> (Option<EndpointPa
|
||||
}
|
||||
}
|
||||
|
||||
match *url {
|
||||
Some(ref url) => match url.host {
|
||||
Host::Domain(ref domain) if domain.ends_with(dapps_domain) => {
|
||||
let id = &domain[0..(domain.len() - dapps_domain.len())];
|
||||
let (id, params) = if let Some(split) = id.rfind('.') {
|
||||
let (params, id) = id.split_at(split);
|
||||
(id[1..].to_owned(), [params.to_owned()].into_iter().chain(&url.path).cloned().collect())
|
||||
} else {
|
||||
(id.to_owned(), url.path.clone())
|
||||
};
|
||||
let port = default_port(url, extra_host.as_ref().and_then(|h| h.port()));
|
||||
let host = url.host().or_else(|| extra_host.as_ref().map(|h| h.hostname()));
|
||||
let query = url.query().map(str::to_owned);
|
||||
let mut path_segments = url.path().split('/').skip(1).collect::<Vec<_>>();
|
||||
trace!(
|
||||
target: "dapps",
|
||||
"Extracting endpoint from: {:?} (dapps: {}). Got host {:?}:{} with path {:?}",
|
||||
url, dapps_domain, host, port, path_segments
|
||||
);
|
||||
match host {
|
||||
Some(host) if host.ends_with(dapps_domain) => {
|
||||
let id = &host[0..(host.len() - dapps_domain.len())];
|
||||
let special = special_endpoint(&path_segments);
|
||||
|
||||
(Some(EndpointPath {
|
||||
app_id: id,
|
||||
app_params: params,
|
||||
host: domain.clone(),
|
||||
port: url.port,
|
||||
using_dapps_domains: true,
|
||||
}), special_endpoint(url))
|
||||
},
|
||||
_ if url.path.len() > 1 => {
|
||||
let id = url.path[0].to_owned();
|
||||
(Some(EndpointPath {
|
||||
app_id: id,
|
||||
app_params: url.path[1..].to_vec(),
|
||||
host: format!("{}", url.host),
|
||||
port: url.port,
|
||||
using_dapps_domains: false,
|
||||
}), special_endpoint(url))
|
||||
},
|
||||
_ => (None, special_endpoint(url)),
|
||||
// remove special endpoint id from params
|
||||
if special != SpecialEndpoint::None {
|
||||
path_segments.remove(0);
|
||||
}
|
||||
|
||||
let (app_id, app_params) = if let Some(split) = id.rfind('.') {
|
||||
let (params, id) = id.split_at(split);
|
||||
path_segments.insert(0, params);
|
||||
(id[1..].to_owned(), path_segments)
|
||||
} else {
|
||||
(id.to_owned(), path_segments)
|
||||
};
|
||||
|
||||
(Some(EndpointPath {
|
||||
app_id,
|
||||
app_params: app_params.into_iter().map(Into::into).collect(),
|
||||
query,
|
||||
host: host.to_owned(),
|
||||
port,
|
||||
using_dapps_domains: true,
|
||||
}), special)
|
||||
},
|
||||
_ => (None, SpecialEndpoint::None)
|
||||
Some(host) if path_segments.len() > 1 => {
|
||||
let special = special_endpoint(&path_segments);
|
||||
let id = path_segments.remove(0);
|
||||
(Some(EndpointPath {
|
||||
app_id: id.to_owned(),
|
||||
app_params: path_segments.into_iter().map(Into::into).collect(),
|
||||
query,
|
||||
host: host.to_owned(),
|
||||
port,
|
||||
using_dapps_domains: false,
|
||||
}), special)
|
||||
},
|
||||
_ => (None, special_endpoint(&path_segments)),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_extract_endpoint() {
|
||||
let dapps_domain = ".web3.site";
|
||||
assert_eq!(extract_endpoint(&None, dapps_domain), (None, SpecialEndpoint::None));
|
||||
|
||||
// With path prefix
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://localhost:8080/status/index.html").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["index.html".to_owned()],
|
||||
host: "localhost".to_owned(),
|
||||
port: 8080,
|
||||
using_dapps_domains: false,
|
||||
}), SpecialEndpoint::None)
|
||||
);
|
||||
|
||||
// With path prefix
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://localhost:8080/rpc/").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "rpc".to_owned(),
|
||||
app_params: vec!["".to_owned()],
|
||||
host: "localhost".to_owned(),
|
||||
port: 8080,
|
||||
using_dapps_domains: false,
|
||||
}), SpecialEndpoint::Rpc)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://my.status.web3.site/parity-utils/inject.js").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".to_owned(), "parity-utils".into(), "inject.js".into()],
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Utils)
|
||||
);
|
||||
|
||||
// By Subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://status.web3.site/test.html").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["test.html".to_owned()],
|
||||
host: "status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::None)
|
||||
);
|
||||
|
||||
// RPC by subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://my.status.web3.site/rpc/").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".to_owned(), "rpc".into(), "".into()],
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Rpc)
|
||||
);
|
||||
|
||||
// API by subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&Url::parse("http://my.status.web3.site/api/").ok(), dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".to_owned(), "api".into(), "".into()],
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Api)
|
||||
);
|
||||
fn default_port(url: &Uri, extra_port: Option<u16>) -> u16 {
|
||||
let scheme = url.scheme().unwrap_or("http");
|
||||
url.port().or(extra_port).unwrap_or_else(|| match scheme {
|
||||
"http" => 80,
|
||||
"https" => 443,
|
||||
_ => 80,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{SpecialEndpoint, EndpointPath, extract_endpoint};
|
||||
|
||||
#[test]
|
||||
fn should_extract_endpoint() {
|
||||
let dapps_domain = ".web3.site";
|
||||
|
||||
// With path prefix
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://localhost:8080/status/index.html?q=1".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["index.html".to_owned()],
|
||||
query: Some("q=1".into()),
|
||||
host: "localhost".to_owned(),
|
||||
port: 8080,
|
||||
using_dapps_domains: false,
|
||||
}), SpecialEndpoint::None)
|
||||
);
|
||||
|
||||
// With path prefix
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://localhost:8080/rpc/".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "rpc".to_owned(),
|
||||
app_params: vec!["".to_owned()],
|
||||
query: None,
|
||||
host: "localhost".to_owned(),
|
||||
port: 8080,
|
||||
using_dapps_domains: false,
|
||||
}), SpecialEndpoint::Rpc)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://my.status.web3.site/parity-utils/inject.js".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".into(), "inject.js".into()],
|
||||
query: None,
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Utils)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://my.status.web3.site/inject.js".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".into(), "inject.js".into()],
|
||||
query: None,
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::None)
|
||||
);
|
||||
|
||||
// By Subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://status.web3.site/test.html".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["test.html".to_owned()],
|
||||
query: None,
|
||||
host: "status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::None)
|
||||
);
|
||||
|
||||
// RPC by subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://my.status.web3.site/rpc/".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".into(), "".into()],
|
||||
query: None,
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Rpc)
|
||||
);
|
||||
|
||||
// API by subdomain
|
||||
assert_eq!(
|
||||
extract_endpoint(&"http://my.status.web3.site/api/".parse().unwrap(), None, dapps_domain),
|
||||
(Some(EndpointPath {
|
||||
app_id: "status".to_owned(),
|
||||
app_params: vec!["my".into(), "".into()],
|
||||
query: None,
|
||||
host: "my.status.web3.site".to_owned(),
|
||||
port: 80,
|
||||
using_dapps_domains: true,
|
||||
}), SpecialEndpoint::Api)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ fn should_handle_ping() {
|
||||
"\
|
||||
POST /api/ping HTTP/1.1\r\n\
|
||||
Host: home.parity\r\n\
|
||||
Content-Type: application/json\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
|
||||
@@ -18,7 +18,7 @@ use devtools::http_client;
|
||||
use rustc_hex::FromHex;
|
||||
use tests::helpers::{
|
||||
serve_with_registrar, serve_with_registrar_and_sync, serve_with_fetch,
|
||||
serve_with_registrar_and_fetch, serve_with_registrar_and_fetch_and_threads,
|
||||
serve_with_registrar_and_fetch,
|
||||
request, assert_security_headers_for_embed,
|
||||
};
|
||||
|
||||
@@ -39,7 +39,7 @@ fn should_resolve_dapp() {
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 404 Not Found");
|
||||
assert_eq!(registrar.calls.lock().len(), 2);
|
||||
assert_eq!(registrar.calls.lock().len(), 4);
|
||||
assert_security_headers_for_embed(&response.headers);
|
||||
}
|
||||
|
||||
@@ -171,6 +171,8 @@ fn should_return_fetched_dapp_content() {
|
||||
r#"18
|
||||
<h1>Hello Gavcoin!</h1>
|
||||
|
||||
0
|
||||
|
||||
"#
|
||||
);
|
||||
|
||||
@@ -257,7 +259,7 @@ fn should_not_request_content_twice() {
|
||||
use std::thread;
|
||||
|
||||
// given
|
||||
let (server, fetch, registrar) = serve_with_registrar_and_fetch_and_threads(true);
|
||||
let (server, fetch, registrar) = serve_with_registrar_and_fetch();
|
||||
let gavcoin = GAVCOIN_ICON.from_hex().unwrap();
|
||||
registrar.set_result(
|
||||
"2be00befcf008bc0e7d9cdefc194db9c75352e8632f48498b5a6bfce9f02c88e".parse().unwrap(),
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
use std::{io, thread, time};
|
||||
use std::sync::{atomic, mpsc, Arc};
|
||||
use util::Mutex;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use futures::{self, Future};
|
||||
use fetch::{self, Fetch};
|
||||
@@ -94,7 +94,7 @@ impl FakeFetch {
|
||||
}
|
||||
|
||||
impl Fetch for FakeFetch {
|
||||
type Result = futures::BoxFuture<fetch::Response, fetch::Error>;
|
||||
type Result = Box<Future<Item = fetch::Response, Error = fetch::Error> + Send>;
|
||||
|
||||
fn new() -> Result<Self, fetch::Error> where Self: Sized {
|
||||
Ok(FakeFetch::default())
|
||||
@@ -117,6 +117,17 @@ impl Fetch for FakeFetch {
|
||||
tx.send(fetch::Response::from_reader(cursor)).unwrap();
|
||||
});
|
||||
|
||||
rx.map_err(|_| fetch::Error::Aborted).boxed()
|
||||
Box::new(rx.map_err(|_| fetch::Error::Aborted))
|
||||
}
|
||||
|
||||
fn process_and_forget<F, I, E>(&self, f: F) where
|
||||
F: Future<Item=I, Error=E> + Send + 'static,
|
||||
I: Send + 'static,
|
||||
E: Send + 'static,
|
||||
{
|
||||
// Spawn the task in a separate thread.
|
||||
thread::spawn(|| {
|
||||
let _ = f.wait();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,12 +22,12 @@ use std::sync::Arc;
|
||||
use env_logger::LogBuilder;
|
||||
use jsonrpc_core::IoHandler;
|
||||
use jsonrpc_http_server::{self as http, Host, DomainsValidation};
|
||||
use parity_reactor::Remote;
|
||||
|
||||
use devtools::http_client;
|
||||
use hash_fetch::urlhint::ContractClient;
|
||||
use fetch::{Fetch, Client as FetchClient};
|
||||
use futures_cpupool::CpuPool;
|
||||
use parity_reactor::Remote;
|
||||
use node_health::{NodeHealth, TimeChecker, CpuPool};
|
||||
|
||||
use {Middleware, SyncStatus, WebProxyTokens};
|
||||
|
||||
@@ -39,6 +39,7 @@ use self::fetch::FakeFetch;
|
||||
|
||||
const SIGNER_PORT: u16 = 18180;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FakeSync(bool);
|
||||
impl SyncStatus for FakeSync {
|
||||
fn is_major_importing(&self) -> bool { self.0 }
|
||||
@@ -54,7 +55,7 @@ fn init_logger() {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server, Arc<FakeRegistrar>) where
|
||||
pub fn init_server<F, B>(process: F, io: IoHandler) -> (Server, Arc<FakeRegistrar>) where
|
||||
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
|
||||
B: Fetch,
|
||||
{
|
||||
@@ -63,11 +64,9 @@ pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server,
|
||||
let mut dapps_path = env::temp_dir();
|
||||
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
|
||||
|
||||
let server = process(ServerBuilder::new(
|
||||
&dapps_path, registrar.clone(), remote,
|
||||
))
|
||||
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
|
||||
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
|
||||
let mut builder = ServerBuilder::new(&dapps_path, registrar.clone());
|
||||
builder.signer_address = Some(("127.0.0.1".into(), SIGNER_PORT));
|
||||
let server = process(builder).start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
|
||||
(
|
||||
server,
|
||||
registrar,
|
||||
@@ -75,34 +74,34 @@ pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server,
|
||||
}
|
||||
|
||||
pub fn serve_with_rpc(io: IoHandler) -> Server {
|
||||
init_server(|builder| builder, io, Remote::new_sync()).0
|
||||
init_server(|builder| builder, io).0
|
||||
}
|
||||
|
||||
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
|
||||
let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
|
||||
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
|
||||
init_server(|mut builder| {
|
||||
builder.allowed_hosts = hosts.into();
|
||||
builder
|
||||
}, Default::default()).0
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
|
||||
init_server(|builder| builder, Default::default(), Remote::new_sync())
|
||||
init_server(|builder| builder, Default::default())
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
|
||||
init_server(|builder| {
|
||||
builder.sync_status(Arc::new(FakeSync(true)))
|
||||
}, Default::default(), Remote::new_sync())
|
||||
init_server(|mut builder| {
|
||||
builder.sync_status = Arc::new(FakeSync(true));
|
||||
builder
|
||||
}, Default::default())
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar_and_fetch() -> (Server, FakeFetch, Arc<FakeRegistrar>) {
|
||||
serve_with_registrar_and_fetch_and_threads(false)
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Server, FakeFetch, Arc<FakeRegistrar>) {
|
||||
let fetch = FakeFetch::default();
|
||||
let f = fetch.clone();
|
||||
let (server, reg) = init_server(move |builder| {
|
||||
builder.fetch(f.clone())
|
||||
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
|
||||
}, Default::default());
|
||||
|
||||
(server, fetch, reg)
|
||||
}
|
||||
@@ -110,19 +109,25 @@ pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Serv
|
||||
pub fn serve_with_fetch(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
|
||||
let fetch = FakeFetch::default();
|
||||
let f = fetch.clone();
|
||||
let (server, _) = init_server(move |builder| {
|
||||
builder
|
||||
.fetch(f.clone())
|
||||
.web_proxy_tokens(Arc::new(move |token| {
|
||||
if &token == web_token { Some(domain.into()) } else { None }
|
||||
}))
|
||||
}, Default::default(), Remote::new_sync());
|
||||
let (server, _) = init_server(move |mut builder| {
|
||||
builder.web_proxy_tokens = Arc::new(move |token| {
|
||||
if &token == web_token { Some(domain.into()) } else { None }
|
||||
});
|
||||
builder.fetch(f.clone())
|
||||
}, Default::default());
|
||||
|
||||
(server, fetch)
|
||||
}
|
||||
|
||||
pub fn serve() -> Server {
|
||||
init_server(|builder| builder, Default::default(), Remote::new_sync()).0
|
||||
init_server(|builder| builder, Default::default()).0
|
||||
}
|
||||
|
||||
pub fn serve_ui() -> Server {
|
||||
init_server(|mut builder| {
|
||||
builder.serve_ui = true;
|
||||
builder
|
||||
}, Default::default()).0
|
||||
}
|
||||
|
||||
pub fn request(server: Server, request: &str) -> http_client::Response {
|
||||
@@ -145,13 +150,13 @@ pub struct ServerBuilder<T: Fetch = FetchClient> {
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
signer_address: Option<(String, u16)>,
|
||||
allowed_hosts: DomainsValidation<Host>,
|
||||
remote: Remote,
|
||||
fetch: Option<T>,
|
||||
serve_ui: bool,
|
||||
}
|
||||
|
||||
impl ServerBuilder {
|
||||
/// Construct new dapps server
|
||||
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
|
||||
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>) -> Self {
|
||||
ServerBuilder {
|
||||
dapps_path: dapps_path.as_ref().to_owned(),
|
||||
registrar: registrar,
|
||||
@@ -159,8 +164,8 @@ impl ServerBuilder {
|
||||
web_proxy_tokens: Arc::new(|_| None),
|
||||
signer_address: None,
|
||||
allowed_hosts: DomainsValidation::Disabled,
|
||||
remote: remote,
|
||||
fetch: None,
|
||||
serve_ui: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -175,37 +180,11 @@ impl<T: Fetch> ServerBuilder<T> {
|
||||
web_proxy_tokens: self.web_proxy_tokens,
|
||||
signer_address: self.signer_address,
|
||||
allowed_hosts: self.allowed_hosts,
|
||||
remote: self.remote,
|
||||
fetch: Some(fetch),
|
||||
serve_ui: self.serve_ui,
|
||||
}
|
||||
}
|
||||
|
||||
/// Change default sync status.
|
||||
pub fn sync_status(mut self, status: Arc<SyncStatus>) -> Self {
|
||||
self.sync_status = status;
|
||||
self
|
||||
}
|
||||
|
||||
/// Change default web proxy tokens validator.
|
||||
pub fn web_proxy_tokens(mut self, tokens: Arc<WebProxyTokens>) -> Self {
|
||||
self.web_proxy_tokens = tokens;
|
||||
self
|
||||
}
|
||||
|
||||
/// Change default signer port.
|
||||
pub fn signer_address(mut self, signer_address: Option<(String, u16)>) -> Self {
|
||||
self.signer_address = signer_address;
|
||||
self
|
||||
}
|
||||
|
||||
/// Change allowed hosts.
|
||||
/// `None` - All hosts are allowed
|
||||
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
|
||||
pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
|
||||
self.allowed_hosts = allowed_hosts;
|
||||
self
|
||||
}
|
||||
|
||||
/// Asynchronously start server with no authentication,
|
||||
/// returns result with `Server` handle on success or an error.
|
||||
pub fn start_unsecured_http(self, addr: &SocketAddr, io: IoHandler) -> Result<Server, http::Error> {
|
||||
@@ -220,8 +199,9 @@ impl<T: Fetch> ServerBuilder<T> {
|
||||
self.registrar,
|
||||
self.sync_status,
|
||||
self.web_proxy_tokens,
|
||||
self.remote,
|
||||
Remote::new_sync(),
|
||||
fetch,
|
||||
self.serve_ui,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -253,21 +233,39 @@ impl Server {
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
serve_ui: bool,
|
||||
) -> Result<Server, http::Error> {
|
||||
let middleware = Middleware::dapps(
|
||||
"pool.ntp.org:123",
|
||||
CpuPool::new(4),
|
||||
remote,
|
||||
signer_address,
|
||||
vec![],
|
||||
dapps_path,
|
||||
extra_dapps,
|
||||
DAPPS_DOMAIN.into(),
|
||||
registrar,
|
||||
sync_status,
|
||||
web_proxy_tokens,
|
||||
fetch,
|
||||
let health = NodeHealth::new(
|
||||
sync_status.clone(),
|
||||
TimeChecker::new::<String>(&[], CpuPool::new(1)),
|
||||
remote.clone(),
|
||||
);
|
||||
let pool = ::futures_cpupool::CpuPool::new(1);
|
||||
let middleware = if serve_ui {
|
||||
Middleware::ui(
|
||||
pool,
|
||||
health,
|
||||
DAPPS_DOMAIN.into(),
|
||||
registrar,
|
||||
sync_status,
|
||||
fetch,
|
||||
)
|
||||
} else {
|
||||
Middleware::dapps(
|
||||
pool,
|
||||
health,
|
||||
signer_address,
|
||||
vec![],
|
||||
vec![],
|
||||
dapps_path,
|
||||
extra_dapps,
|
||||
DAPPS_DOMAIN.into(),
|
||||
registrar,
|
||||
sync_status,
|
||||
web_proxy_tokens,
|
||||
fetch,
|
||||
)
|
||||
};
|
||||
|
||||
let mut allowed_hosts: Option<Vec<Host>> = allowed_hosts.into();
|
||||
allowed_hosts.as_mut().map(|mut hosts| {
|
||||
@@ -289,9 +287,7 @@ impl Server {
|
||||
pub fn addr(&self) -> &SocketAddr {
|
||||
self.server.as_ref()
|
||||
.expect("server is always Some at the start; it's consumed only when object is dropped; qed")
|
||||
.addrs()
|
||||
.first()
|
||||
.expect("You cannot start the server without binding to at least one address; qed")
|
||||
.address()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,10 +17,13 @@
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use rustc_hex::FromHex;
|
||||
|
||||
use hash_fetch::urlhint::ContractClient;
|
||||
use util::{Bytes, Address, Mutex, H256, ToPretty};
|
||||
use bigint::hash::H256;
|
||||
use bytes::{Bytes, ToPretty};
|
||||
use hash_fetch::urlhint::{ContractClient, BoxFuture};
|
||||
use parking_lot::Mutex;
|
||||
use rustc_hex::FromHex;
|
||||
use util::Address;
|
||||
|
||||
const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
|
||||
const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
|
||||
@@ -64,7 +67,7 @@ impl ContractClient for FakeRegistrar {
|
||||
Ok(REGISTRAR.parse().unwrap())
|
||||
}
|
||||
|
||||
fn call(&self, address: Address, data: Bytes) -> ::futures::BoxFuture<Bytes, String> {
|
||||
fn call(&self, address: Address, data: Bytes) -> BoxFuture<Bytes, String> {
|
||||
let call = (address.to_hex(), data.to_hex());
|
||||
self.calls.lock().push(call.clone());
|
||||
let res = self.responses.lock().get(&call).cloned().expect(&format!("No response for call: {:?}", call));
|
||||
|
||||
62
dapps/src/tests/home.rs
Normal file
62
dapps/src/tests/home.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use tests::helpers::{serve_ui, request, assert_security_headers};
|
||||
|
||||
#[test]
|
||||
fn should_serve_home_js() {
|
||||
// given
|
||||
let server = serve_ui();
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET /inject.js HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
response.assert_header("Content-Type", "application/javascript");
|
||||
assert_eq!(response.body.contains("function(){"), true, "Expected function in: {}", response.body);
|
||||
assert_security_headers(&response.headers);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_serve_home() {
|
||||
// given
|
||||
let server = serve_ui();
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET / HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
response.assert_header("Content-Type", "text/html");
|
||||
assert_security_headers(&response.headers);
|
||||
}
|
||||
@@ -20,6 +20,7 @@ mod helpers;
|
||||
|
||||
mod api;
|
||||
mod fetch;
|
||||
mod home;
|
||||
mod redirection;
|
||||
mod rpc;
|
||||
mod validation;
|
||||
|
||||
@@ -201,7 +201,7 @@ fn should_serve_utils() {
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
assert_eq!(response.body.contains("function(){"), true);
|
||||
response.assert_header("Content-Type", "application/javascript");
|
||||
assert_eq!(response.body.contains("function(){"), true, "Expected function in: {}", response.body);
|
||||
assert_security_headers(&response.headers);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ fn should_reject_invalid_host() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 403 Forbidden".to_owned());
|
||||
response.assert_status("HTTP/1.1 403 Forbidden");
|
||||
assert!(response.body.contains("Provided Host header is not whitelisted."), response.body);
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ fn should_allow_valid_host() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -74,7 +74,7 @@ fn should_serve_dapps_domains() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -95,5 +95,5 @@ fn should_allow_parity_utils_even_on_invalid_domain() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
}
|
||||
|
||||
150
dapps/src/url.rs
150
dapps/src/url.rs
@@ -1,150 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! HTTP/HTTPS URL type. Based on URL type from Iron library.
|
||||
|
||||
use url_lib::{self};
|
||||
pub use url_lib::Host;
|
||||
|
||||
/// HTTP/HTTPS URL type for Iron.
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct Url {
|
||||
/// Raw url of url
|
||||
pub raw: url_lib::Url,
|
||||
|
||||
/// The host field of the URL, probably a domain.
|
||||
pub host: Host,
|
||||
|
||||
/// The connection port.
|
||||
pub port: u16,
|
||||
|
||||
/// The URL path, the resource to be accessed.
|
||||
///
|
||||
/// A *non-empty* vector encoding the parts of the URL path.
|
||||
/// Empty entries of `""` correspond to trailing slashes.
|
||||
pub path: Vec<String>,
|
||||
|
||||
/// The URL query.
|
||||
pub query: Option<String>,
|
||||
|
||||
/// The URL username field, from the userinfo section of the URL.
|
||||
///
|
||||
/// `None` if the `@` character was not part of the input OR
|
||||
/// if a blank username was provided.
|
||||
/// Otherwise, a non-empty string.
|
||||
pub username: Option<String>,
|
||||
|
||||
/// The URL password field, from the userinfo section of the URL.
|
||||
///
|
||||
/// `None` if the `@` character was not part of the input OR
|
||||
/// if a blank password was provided.
|
||||
/// Otherwise, a non-empty string.
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
impl Url {
|
||||
/// Create a URL from a string.
|
||||
///
|
||||
/// The input must be a valid URL with a special scheme for this to succeed.
|
||||
///
|
||||
/// HTTP and HTTPS are special schemes.
|
||||
///
|
||||
/// See: http://url.spec.whatwg.org/#special-scheme
|
||||
pub fn parse(input: &str) -> Result<Url, String> {
|
||||
// Parse the string using rust-url, then convert.
|
||||
match url_lib::Url::parse(input) {
|
||||
Ok(raw_url) => Url::from_generic_url(raw_url),
|
||||
Err(e) => Err(format!("{}", e))
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `Url` from a `rust-url` `Url`.
|
||||
pub fn from_generic_url(raw_url: url_lib::Url) -> Result<Url, String> {
|
||||
// Map empty usernames to None.
|
||||
let username = match raw_url.username() {
|
||||
"" => None,
|
||||
username => Some(username.to_owned())
|
||||
};
|
||||
|
||||
// Map empty passwords to None.
|
||||
let password = match raw_url.password() {
|
||||
Some(password) if !password.is_empty() => Some(password.to_owned()),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let port = raw_url.port_or_known_default().ok_or_else(|| format!("Unknown port for scheme: `{}`", raw_url.scheme()))?;
|
||||
let host = raw_url.host().ok_or_else(|| "Valid host, because only data:, mailto: protocols does not have host.".to_owned())?.to_owned();
|
||||
let path = raw_url.path_segments().ok_or_else(|| "Valid path segments. In HTTP we won't get cannot-be-a-base URLs".to_owned())?
|
||||
.map(|part| part.to_owned()).collect();
|
||||
let query = raw_url.query().map(|x| x.to_owned());
|
||||
|
||||
Ok(Url {
|
||||
port: port,
|
||||
host: host,
|
||||
path: path,
|
||||
query: query,
|
||||
raw: raw_url,
|
||||
username: username,
|
||||
password: password,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::Url;
|
||||
|
||||
#[test]
|
||||
fn test_default_port() {
|
||||
assert_eq!(Url::parse("http://example.com/wow").unwrap().port, 80u16);
|
||||
assert_eq!(Url::parse("https://example.com/wow").unwrap().port, 443u16);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_explicit_port() {
|
||||
assert_eq!(Url::parse("http://localhost:3097").unwrap().port, 3097u16);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_username() {
|
||||
assert!(Url::parse("http://@example.com").unwrap().username.is_none());
|
||||
assert!(Url::parse("http://:password@example.com").unwrap().username.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_empty_username() {
|
||||
let user = Url::parse("http://john:pass@example.com").unwrap().username;
|
||||
assert_eq!(user.unwrap(), "john");
|
||||
|
||||
let user = Url::parse("http://john:@example.com").unwrap().username;
|
||||
assert_eq!(user.unwrap(), "john");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_password() {
|
||||
assert!(Url::parse("http://michael@example.com").unwrap().password.is_none());
|
||||
assert!(Url::parse("http://:@example.com").unwrap().password.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_empty_password() {
|
||||
let pass = Url::parse("http://michael:pass@example.com").unwrap().password;
|
||||
assert_eq!(pass.unwrap(), "pass");
|
||||
|
||||
let pass = Url::parse("http://:pass@example.com").unwrap().password;
|
||||
assert_eq!(pass.unwrap(), "pass");
|
||||
}
|
||||
}
|
||||
209
dapps/src/web.rs
209
dapps/src/web.rs
@@ -17,26 +17,23 @@
|
||||
//! Serving web-based content (proxying)
|
||||
|
||||
use std::sync::Arc;
|
||||
use fetch::{self, Fetch};
|
||||
use parity_reactor::Remote;
|
||||
|
||||
use base32;
|
||||
use hyper::{self, server, net, Next, Encoder, Decoder};
|
||||
use hyper::status::StatusCode;
|
||||
use fetch::{self, Fetch};
|
||||
use hyper::{mime, StatusCode};
|
||||
|
||||
use apps;
|
||||
use endpoint::{Endpoint, Handler, EndpointPath};
|
||||
use endpoint::{Endpoint, EndpointPath, Request, Response};
|
||||
use futures::future;
|
||||
use handlers::{
|
||||
ContentFetcherHandler, ContentHandler, ContentValidator, ValidatorResponse,
|
||||
StreamingHandler, extract_url,
|
||||
StreamingHandler,
|
||||
};
|
||||
use url::Url;
|
||||
use {Embeddable, WebProxyTokens};
|
||||
|
||||
pub struct Web<F> {
|
||||
embeddable_on: Embeddable,
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
}
|
||||
|
||||
@@ -44,92 +41,27 @@ impl<F: Fetch> Web<F> {
|
||||
pub fn boxed(
|
||||
embeddable_on: Embeddable,
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
) -> Box<Endpoint> {
|
||||
Box::new(Web {
|
||||
embeddable_on,
|
||||
web_proxy_tokens,
|
||||
remote,
|
||||
fetch,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Fetch> Endpoint for Web<F> {
|
||||
fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
|
||||
Box::new(WebHandler {
|
||||
control: control,
|
||||
state: State::Initial,
|
||||
path: path,
|
||||
remote: self.remote.clone(),
|
||||
fetch: self.fetch.clone(),
|
||||
web_proxy_tokens: self.web_proxy_tokens.clone(),
|
||||
embeddable_on: self.embeddable_on.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct WebInstaller {
|
||||
embeddable_on: Embeddable,
|
||||
referer: String,
|
||||
}
|
||||
|
||||
impl ContentValidator for WebInstaller {
|
||||
type Error = String;
|
||||
|
||||
fn validate_and_install(&self, response: fetch::Response) -> Result<ValidatorResponse, String> {
|
||||
let status = StatusCode::from_u16(response.status().to_u16());
|
||||
let is_html = response.is_html();
|
||||
let mime = response.content_type().unwrap_or(mime!(Text/Html));
|
||||
let mut handler = StreamingHandler::new(
|
||||
response,
|
||||
status,
|
||||
mime,
|
||||
self.embeddable_on.clone(),
|
||||
);
|
||||
if is_html {
|
||||
handler.set_initial_content(&format!(
|
||||
r#"<script src="/{}/inject.js"></script><script>history.replaceState({{}}, "", "/?{}{}/{}")</script>"#,
|
||||
apps::UTILS_PATH,
|
||||
apps::URL_REFERER,
|
||||
apps::WEB_PATH,
|
||||
&self.referer,
|
||||
));
|
||||
}
|
||||
Ok(ValidatorResponse::Streaming(handler))
|
||||
}
|
||||
}
|
||||
|
||||
enum State<F: Fetch> {
|
||||
Initial,
|
||||
Error(ContentHandler),
|
||||
Fetching(ContentFetcherHandler<WebInstaller, F>),
|
||||
}
|
||||
|
||||
struct WebHandler<F: Fetch> {
|
||||
control: hyper::Control,
|
||||
state: State<F>,
|
||||
path: EndpointPath,
|
||||
remote: Remote,
|
||||
fetch: F,
|
||||
web_proxy_tokens: Arc<WebProxyTokens>,
|
||||
embeddable_on: Embeddable,
|
||||
}
|
||||
|
||||
impl<F: Fetch> WebHandler<F> {
|
||||
fn extract_target_url(&self, url: Option<Url>) -> Result<String, State<F>> {
|
||||
let token_and_url = self.path.app_params.get(0)
|
||||
fn extract_target_url(&self, path: &EndpointPath) -> Result<String, ContentHandler> {
|
||||
let token_and_url = path.app_params.get(0)
|
||||
.map(|encoded| encoded.replace('.', ""))
|
||||
.and_then(|encoded| base32::decode(base32::Alphabet::Crockford, &encoded.to_uppercase()))
|
||||
.and_then(|data| String::from_utf8(data).ok())
|
||||
.ok_or_else(|| State::Error(ContentHandler::error(
|
||||
.ok_or_else(|| ContentHandler::error(
|
||||
StatusCode::BadRequest,
|
||||
"Invalid parameter",
|
||||
"Couldn't parse given parameter:",
|
||||
self.path.app_params.get(0).map(String::as_str),
|
||||
path.app_params.get(0).map(String::as_str),
|
||||
self.embeddable_on.clone()
|
||||
)))?;
|
||||
))?;
|
||||
|
||||
let mut token_it = token_and_url.split('+');
|
||||
let token = token_it.next();
|
||||
@@ -139,9 +71,9 @@ impl<F: Fetch> WebHandler<F> {
|
||||
let domain = match token.and_then(|token| self.web_proxy_tokens.domain(token)) {
|
||||
Some(domain) => domain,
|
||||
_ => {
|
||||
return Err(State::Error(ContentHandler::error(
|
||||
return Err(ContentHandler::error(
|
||||
StatusCode::BadRequest, "Invalid Access Token", "Invalid or old web proxy access token supplied.", Some("Try refreshing the page."), self.embeddable_on.clone()
|
||||
)));
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -149,97 +81,86 @@ impl<F: Fetch> WebHandler<F> {
|
||||
let mut target_url = match target_url {
|
||||
Some(url) if url.starts_with("http://") || url.starts_with("https://") => url.to_owned(),
|
||||
_ => {
|
||||
return Err(State::Error(ContentHandler::error(
|
||||
return Err(ContentHandler::error(
|
||||
StatusCode::BadRequest, "Invalid Protocol", "Invalid protocol used.", None, self.embeddable_on.clone()
|
||||
)));
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
if !target_url.starts_with(&*domain) {
|
||||
return Err(State::Error(ContentHandler::error(
|
||||
return Err(ContentHandler::error(
|
||||
StatusCode::BadRequest, "Invalid Domain", "Dapp attempted to access invalid domain.", Some(&target_url), self.embeddable_on.clone(),
|
||||
)));
|
||||
));
|
||||
}
|
||||
|
||||
if !target_url.ends_with("/") {
|
||||
target_url = format!("{}/", target_url);
|
||||
}
|
||||
|
||||
// TODO [ToDr] Should just use `path.app_params`
|
||||
let (path, query) = match (&url, self.path.using_dapps_domains) {
|
||||
(&Some(ref url), true) => (&url.path[..], &url.query),
|
||||
(&Some(ref url), false) => (&url.path[2..], &url.query),
|
||||
_ => {
|
||||
return Err(State::Error(ContentHandler::error(
|
||||
StatusCode::BadRequest, "Invalid URL", "Couldn't parse URL", None, self.embeddable_on.clone()
|
||||
)));
|
||||
}
|
||||
};
|
||||
// Skip the token
|
||||
let query = path.query.as_ref().map_or_else(String::new, |query| format!("?{}", query));
|
||||
let path = path.app_params[1..].join("/");
|
||||
|
||||
let query = match *query {
|
||||
Some(ref query) => format!("?{}", query),
|
||||
None => "".into(),
|
||||
};
|
||||
|
||||
Ok(format!("{}{}{}", target_url, path.join("/"), query))
|
||||
Ok(format!("{}{}{}", target_url, path, query))
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Fetch> server::Handler<net::HttpStream> for WebHandler<F> {
|
||||
fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next {
|
||||
let url = extract_url(&request);
|
||||
impl<F: Fetch> Endpoint for Web<F> {
|
||||
fn respond(&self, path: EndpointPath, req: Request) -> Response {
|
||||
// First extract the URL (reject invalid URLs)
|
||||
let target_url = match self.extract_target_url(url) {
|
||||
let target_url = match self.extract_target_url(&path) {
|
||||
Ok(url) => url,
|
||||
Err(error) => {
|
||||
self.state = error;
|
||||
return Next::write();
|
||||
Err(response) => {
|
||||
return Box::new(future::ok(response.into()));
|
||||
}
|
||||
};
|
||||
|
||||
let mut handler = ContentFetcherHandler::new(
|
||||
target_url,
|
||||
self.path.clone(),
|
||||
self.control.clone(),
|
||||
let token = path.app_params.get(0)
|
||||
.expect("`target_url` is valid; app_params is not empty;qed")
|
||||
.to_owned();
|
||||
|
||||
Box::new(ContentFetcherHandler::new(
|
||||
req.method(),
|
||||
&target_url,
|
||||
path,
|
||||
WebInstaller {
|
||||
embeddable_on: self.embeddable_on.clone(),
|
||||
referer: self.path.app_params.get(0)
|
||||
.expect("`target_url` is valid; app_params is not empty;qed")
|
||||
.to_owned(),
|
||||
token,
|
||||
},
|
||||
self.embeddable_on.clone(),
|
||||
self.remote.clone(),
|
||||
self.fetch.clone(),
|
||||
);
|
||||
let res = handler.on_request(request);
|
||||
self.state = State::Fetching(handler);
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn on_request_readable(&mut self, decoder: &mut Decoder<net::HttpStream>) -> Next {
|
||||
match self.state {
|
||||
State::Initial => Next::end(),
|
||||
State::Error(ref mut handler) => handler.on_request_readable(decoder),
|
||||
State::Fetching(ref mut handler) => handler.on_request_readable(decoder),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
match self.state {
|
||||
State::Initial => Next::end(),
|
||||
State::Error(ref mut handler) => handler.on_response(res),
|
||||
State::Fetching(ref mut handler) => handler.on_response(res),
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<net::HttpStream>) -> Next {
|
||||
match self.state {
|
||||
State::Initial => Next::end(),
|
||||
State::Error(ref mut handler) => handler.on_response_writable(encoder),
|
||||
State::Fetching(ref mut handler) => handler.on_response_writable(encoder),
|
||||
}
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct WebInstaller {
|
||||
embeddable_on: Embeddable,
|
||||
token: String,
|
||||
}
|
||||
|
||||
impl ContentValidator for WebInstaller {
|
||||
type Error = String;
|
||||
|
||||
fn validate_and_install(self, response: fetch::Response) -> Result<ValidatorResponse, String> {
|
||||
let status = response.status();
|
||||
let is_html = response.is_html();
|
||||
let mime = response.content_type().unwrap_or(mime::TEXT_HTML);
|
||||
let mut handler = StreamingHandler::new(
|
||||
response,
|
||||
status,
|
||||
mime,
|
||||
self.embeddable_on,
|
||||
);
|
||||
if is_html {
|
||||
handler.set_initial_content(&format!(
|
||||
r#"<script src="/{}/inject.js"></script><script>history.replaceState({{}}, "", "/?{}{}/{}")</script>"#,
|
||||
apps::UTILS_PATH,
|
||||
apps::URL_REFERER,
|
||||
apps::WEB_PATH,
|
||||
&self.token,
|
||||
));
|
||||
}
|
||||
Ok(ValidatorResponse::Streaming(handler))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ description = "Ethcore Parity UI"
|
||||
homepage = "http://parity.io"
|
||||
license = "GPL-3.0"
|
||||
name = "parity-ui"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[build-dependencies]
|
||||
@@ -12,7 +12,7 @@ rustc_version = "0.1"
|
||||
[dependencies]
|
||||
parity-ui-dev = { path = "../../js", optional = true }
|
||||
# This is managed by the js/scripts/release.sh script on CI - keep it in a single line
|
||||
parity-ui-precompiled = { git = "https://github.com/paritytech/js-precompiled.git", optional = true, branch = "master" }
|
||||
parity-ui-precompiled = { git = "https://github.com/paritytech/js-precompiled.git", optional = true, branch = "stable" }
|
||||
|
||||
[features]
|
||||
no-precompiled-js = ["parity-ui-dev"]
|
||||
|
||||
@@ -3,7 +3,7 @@ description = "Ethcore development/test/build tools"
|
||||
homepage = "http://parity.io"
|
||||
license = "GPL-3.0"
|
||||
name = "ethcore-devtools"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -4,7 +4,7 @@ WORKDIR /build
|
||||
#ENV for build TAG
|
||||
ARG BUILD_TAG
|
||||
ENV BUILD_TAG ${BUILD_TAG:-master}
|
||||
RUN echo $BUILD_TAG
|
||||
RUN echo "Build tag:" $BUILD_TAG
|
||||
# install tools and dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --force-yes --no-install-recommends \
|
||||
@@ -48,7 +48,7 @@ RUN apt-get update && \
|
||||
# show backtraces
|
||||
RUST_BACKTRACE=1 && \
|
||||
# build parity
|
||||
cd /build&&git clone https://github.com/paritytech/parity && \
|
||||
cd /build&&git clone https://github.com/paritytech/parity && \
|
||||
cd parity && \
|
||||
git pull&& \
|
||||
git checkout $BUILD_TAG && \
|
||||
|
||||
227
docs/CHANGELOG-0.9.md
Normal file
227
docs/CHANGELOG-0.9.md
Normal file
@@ -0,0 +1,227 @@
|
||||
|
||||
## Parity [beta-0.9.1](https://github.com/paritytech/parity/releases/tag/beta-0.9.1) (2016-02-16)
|
||||
|
||||
Homestead transition block changed to 1100000.
|
||||
|
||||
- Beta patch to 0.9.1 [#445](https://github.com/paritytech/parity/pull/445)
|
||||
- Delay homestead transition [#430](https://github.com/paritytech/parity/pull/430)
|
||||
- (BETA) https link in the installer (?) [#392](https://github.com/paritytech/parity/pull/392)
|
||||
- beta: Check for handshake expiration before attempting replace [#377](https://github.com/paritytech/parity/pull/377)
|
||||
|
||||
## Parity [beta-0.9](https://github.com/paritytech/parity/releases/tag/beta-0.9) (2016-02-08)
|
||||
|
||||
First Parity Beta 0.9 released.
|
||||
|
||||
- Panic on missing counters; Client cleanup [#368](https://github.com/paritytech/parity/pull/368)
|
||||
- Update README for new PPAs. [#369](https://github.com/paritytech/parity/pull/369)
|
||||
- block_queue::clear should be more thorough [#365](https://github.com/paritytech/parity/pull/365)
|
||||
- Fixed an issue with forked counters [#363](https://github.com/paritytech/parity/pull/363)
|
||||
- Install parity [#362](https://github.com/paritytech/parity/pull/362)
|
||||
- DB directory versioning [#358](https://github.com/paritytech/parity/pull/358)
|
||||
- Raise FD limit for MacOS [#357](https://github.com/paritytech/parity/pull/357)
|
||||
- Travis slack integration. [#356](https://github.com/paritytech/parity/pull/356)
|
||||
- SignedTransaction structure [#350](https://github.com/paritytech/parity/pull/350)
|
||||
- License [#354](https://github.com/paritytech/parity/pull/354)
|
||||
- Performance optimizations [#353](https://github.com/paritytech/parity/pull/353)
|
||||
- Gitter in README. [#355](https://github.com/paritytech/parity/pull/355)
|
||||
- test efforts, receipt requests [#352](https://github.com/paritytech/parity/pull/352)
|
||||
- sync tests setup & local module coverage [#348](https://github.com/paritytech/parity/pull/348)
|
||||
- install parity script [#347](https://github.com/paritytech/parity/pull/347)
|
||||
- evmjit homestead merge [#342](https://github.com/paritytech/parity/pull/342)
|
||||
- Fixed sync stalling on fork [#343](https://github.com/paritytech/parity/pull/343)
|
||||
- Remerge 264 [#334](https://github.com/paritytech/parity/pull/334)
|
||||
- Ethsync tests bfix [#339](https://github.com/paritytech/parity/pull/339)
|
||||
- Fix default options. [#335](https://github.com/paritytech/parity/pull/335)
|
||||
- sync queue limit hotfix [#338](https://github.com/paritytech/parity/pull/338)
|
||||
- Network tests, separate local coverage for utils [#333](https://github.com/paritytech/parity/pull/333)
|
||||
- fix parity version so netstats can parse it [#332](https://github.com/paritytech/parity/pull/332)
|
||||
- reveal surprise [#331](https://github.com/paritytech/parity/pull/331)
|
||||
- Revert removal of `new_code`. [#330](https://github.com/paritytech/parity/pull/330)
|
||||
- Network mod tests first part [#329](https://github.com/paritytech/parity/pull/329)
|
||||
- Look ma no `dead_code` [#323](https://github.com/paritytech/parity/pull/323)
|
||||
- Fixing JIT, Updating hook to run `ethcore` tests. [#326](https://github.com/paritytech/parity/pull/326)
|
||||
- Final docs [#327](https://github.com/paritytech/parity/pull/327)
|
||||
- update install-deps.sh [#316](https://github.com/paritytech/parity/pull/316)
|
||||
- Finish all my docs. Fix previous test compilation. [#320](https://github.com/paritytech/parity/pull/320)
|
||||
- Additional evm tests (extops, call, jumps) and some docs [#317](https://github.com/paritytech/parity/pull/317)
|
||||
- More documentation. [#318](https://github.com/paritytech/parity/pull/318)
|
||||
- Additional documentation. [#315](https://github.com/paritytech/parity/pull/315)
|
||||
- unused functions cleanup [#310](https://github.com/paritytech/parity/pull/310)
|
||||
- update ethcore.github.io documentation automatically [#311](https://github.com/paritytech/parity/pull/311)
|
||||
- Another try with travis ci credentials [#314](https://github.com/paritytech/parity/pull/314)
|
||||
- Document some stuff. [#309](https://github.com/paritytech/parity/pull/309)
|
||||
- Check block parent on import; Peer timeouts [#303](https://github.com/paritytech/parity/pull/303)
|
||||
- Increasing coverage for evm. [#306](https://github.com/paritytech/parity/pull/306)
|
||||
- ethcore docs [#301](https://github.com/paritytech/parity/pull/301)
|
||||
- Replacing secure token for deployment [#305](https://github.com/paritytech/parity/pull/305)
|
||||
- doc.sh [#299](https://github.com/paritytech/parity/pull/299)
|
||||
- Building beta-* and stable-* tags [#302](https://github.com/paritytech/parity/pull/302)
|
||||
- Deploying artifacts for tags (release/beta) [#300](https://github.com/paritytech/parity/pull/300)
|
||||
- cov.sh to show coverage locally [#298](https://github.com/paritytech/parity/pull/298)
|
||||
- benchmark fixes [#297](https://github.com/paritytech/parity/pull/297)
|
||||
- Include JSONRPC CLI options. [#296](https://github.com/paritytech/parity/pull/296)
|
||||
- travis.yml fixes [#293](https://github.com/paritytech/parity/pull/293)
|
||||
- Improve version string. [#295](https://github.com/paritytech/parity/pull/295)
|
||||
- Fixed block queue test [#294](https://github.com/paritytech/parity/pull/294)
|
||||
- Util docs [#292](https://github.com/paritytech/parity/pull/292)
|
||||
- fixed building docs [#289](https://github.com/paritytech/parity/pull/289)
|
||||
- update travis to build PRs only against master [#290](https://github.com/paritytech/parity/pull/290)
|
||||
- Coverage effort [#272](https://github.com/paritytech/parity/pull/272)
|
||||
- updated docker containers [#288](https://github.com/paritytech/parity/pull/288)
|
||||
- rpc module fixes [#287](https://github.com/paritytech/parity/pull/287)
|
||||
- Test for Receipt RLP. [#282](https://github.com/paritytech/parity/pull/282)
|
||||
- Building from source guide [#284](https://github.com/paritytech/parity/pull/284)
|
||||
- Fixed neted empty list RLP encoding [#283](https://github.com/paritytech/parity/pull/283)
|
||||
- Fix CALLDATACOPY (and bonus CODECOPY, too!). [#279](https://github.com/paritytech/parity/pull/279)
|
||||
- added travis && coveralls badge to README.md [#280](https://github.com/paritytech/parity/pull/280)
|
||||
- coveralls coverage [#277](https://github.com/paritytech/parity/pull/277)
|
||||
- Travis [in progress] [#257](https://github.com/paritytech/parity/pull/257)
|
||||
- Travis on reorganized repo [#276](https://github.com/paritytech/parity/pull/276)
|
||||
- umbrella project [#275](https://github.com/paritytech/parity/pull/275)
|
||||
- Ethash disk cache [#273](https://github.com/paritytech/parity/pull/273)
|
||||
- Parity executable name and version [#274](https://github.com/paritytech/parity/pull/274)
|
||||
- Dockerfile [#195](https://github.com/paritytech/parity/pull/195)
|
||||
- Garbage collection test fix [#267](https://github.com/paritytech/parity/pull/267)
|
||||
- Fix stCallCreateCallCodeTest, add more tests [#271](https://github.com/paritytech/parity/pull/271)
|
||||
- Moved sync out of ethcore crate; Added block validation [#265](https://github.com/paritytech/parity/pull/265)
|
||||
- RLP encoder refactoring [#252](https://github.com/paritytech/parity/pull/252)
|
||||
- Chain sync tests and minor refactoring [#264](https://github.com/paritytech/parity/pull/264)
|
||||
- Common log init function [#263](https://github.com/paritytech/parity/pull/263)
|
||||
- changed max vm depth from 128 to 64, change homestead block to 1_000_000 [#262](https://github.com/paritytech/parity/pull/262)
|
||||
- fixed blockchain tests crash on log init [#261](https://github.com/paritytech/parity/pull/261)
|
||||
- Blockchain tests and some helpers for guarding temp directory [#256](https://github.com/paritytech/parity/pull/256)
|
||||
- Fix logging and random tests. [#260](https://github.com/paritytech/parity/pull/260)
|
||||
- Fix difficulty calculation algo. [#259](https://github.com/paritytech/parity/pull/259)
|
||||
- fix submodule version [#258](https://github.com/paritytech/parity/pull/258)
|
||||
- temp dir spawn refactoring [#246](https://github.com/paritytech/parity/pull/246)
|
||||
- fixed tests submodule branch [#254](https://github.com/paritytech/parity/pull/254)
|
||||
- rpc net methods returns real peer count && protocol version [#253](https://github.com/paritytech/parity/pull/253)
|
||||
- Add homestead & random tests. [#245](https://github.com/paritytech/parity/pull/245)
|
||||
- Fixing suicide with self-refund to be consistent with CPP. [#247](https://github.com/paritytech/parity/pull/247)
|
||||
- stubs for rpc methods [#251](https://github.com/paritytech/parity/pull/251)
|
||||
- clippy, missing docs, renaming etc. [#244](https://github.com/paritytech/parity/pull/244)
|
||||
- impl missing methods in tests [#243](https://github.com/paritytech/parity/pull/243)
|
||||
- General tests and some helpers [#239](https://github.com/paritytech/parity/pull/239)
|
||||
- Note additional tests are fixed, fix doc test. [#242](https://github.com/paritytech/parity/pull/242)
|
||||
- jsonrpc http server [#193](https://github.com/paritytech/parity/pull/193)
|
||||
- Ethash nonce is H64 not a u64 [#240](https://github.com/paritytech/parity/pull/240)
|
||||
- Fix import for bcMultiChainTest [#236](https://github.com/paritytech/parity/pull/236)
|
||||
- Client basic tests [#232](https://github.com/paritytech/parity/pull/232)
|
||||
- Fix ensure_db_good() and flush_queue(), block refactoring, check block format, be strict. [#231](https://github.com/paritytech/parity/pull/231)
|
||||
- Rlp [#207](https://github.com/paritytech/parity/pull/207)
|
||||
- Schedule documentation [#219](https://github.com/paritytech/parity/pull/219)
|
||||
- U256<->H256 Conversion [#206](https://github.com/paritytech/parity/pull/206)
|
||||
- Spawning new thread when we are reaching stack limit [#217](https://github.com/paritytech/parity/pull/217)
|
||||
- Blockchain tests [#211](https://github.com/paritytech/parity/pull/211)
|
||||
- fixed failing sync test [#218](https://github.com/paritytech/parity/pull/218)
|
||||
- Removing println [#216](https://github.com/paritytech/parity/pull/216)
|
||||
- Cleaning readme [#212](https://github.com/paritytech/parity/pull/212)
|
||||
- Fixing delegatecall [#196](https://github.com/paritytech/parity/pull/196)
|
||||
- Autogenerate the Args from the docopt macro. [#205](https://github.com/paritytech/parity/pull/205)
|
||||
- Networking fixes [#202](https://github.com/paritytech/parity/pull/202)
|
||||
- Argument parsing from CLI [#204](https://github.com/paritytech/parity/pull/204)
|
||||
- Removed wildcard from clippy version [#203](https://github.com/paritytech/parity/pull/203)
|
||||
- Fixed tests and tweaked sync progress report [#201](https://github.com/paritytech/parity/pull/201)
|
||||
- Heavy tests [#199](https://github.com/paritytech/parity/pull/199)
|
||||
- Mutithreaded IO [#198](https://github.com/paritytech/parity/pull/198)
|
||||
- Populating last_hashes [#197](https://github.com/paritytech/parity/pull/197)
|
||||
- Fixing clippy stuff [#170](https://github.com/paritytech/parity/pull/170)
|
||||
- basic .travis.yml [#194](https://github.com/paritytech/parity/pull/194)
|
||||
- Generating coverage reports. [#190](https://github.com/paritytech/parity/pull/190)
|
||||
- Adding doc requests comments [#192](https://github.com/paritytech/parity/pull/192)
|
||||
- moved src/bin/client.rs -> src/bin/client/main.rs [#185](https://github.com/paritytech/parity/pull/185)
|
||||
- removed overflowing_shr [#188](https://github.com/paritytech/parity/pull/188)
|
||||
- fixed wrapping ops on latest nightly [#187](https://github.com/paritytech/parity/pull/187)
|
||||
- Pruned state DB [#176](https://github.com/paritytech/parity/pull/176)
|
||||
- Memory management for cache [#180](https://github.com/paritytech/parity/pull/180)
|
||||
- Implement signs having low-s. [#183](https://github.com/paritytech/parity/pull/183)
|
||||
- Introduce sha3 crate and use it in ethash [#178](https://github.com/paritytech/parity/pull/178)
|
||||
- Multithreaded block queue [#173](https://github.com/paritytech/parity/pull/173)
|
||||
- Iterator for NibbleSlice and TrieDB. [#171](https://github.com/paritytech/parity/pull/171)
|
||||
- Handling all possible overflows [#145](https://github.com/paritytech/parity/pull/145)
|
||||
- Global secp256k1 context [#164](https://github.com/paritytech/parity/pull/164)
|
||||
- Ethash [#152](https://github.com/paritytech/parity/pull/152)
|
||||
- Move util into here [#153](https://github.com/paritytech/parity/pull/153)
|
||||
- EVM Interpreter [#103](https://github.com/paritytech/parity/pull/103)
|
||||
- Homestead transition support, maybe. [#141](https://github.com/paritytech/parity/pull/141)
|
||||
- externalities refactor [#131](https://github.com/paritytech/parity/pull/131)
|
||||
- More open files. [#140](https://github.com/paritytech/parity/pull/140)
|
||||
- Single array for logs output. [#133](https://github.com/paritytech/parity/pull/133)
|
||||
- Client app event handler [#132](https://github.com/paritytech/parity/pull/132)
|
||||
- Various consensus fixes. [#130](https://github.com/paritytech/parity/pull/130)
|
||||
- callcode builtins tests pass [#127](https://github.com/paritytech/parity/pull/127)
|
||||
- Client state syncing [#119](https://github.com/paritytech/parity/pull/119)
|
||||
- Split externalities from executive. [#126](https://github.com/paritytech/parity/pull/126)
|
||||
- executive error on not enoguh base gas [#124](https://github.com/paritytech/parity/pull/124)
|
||||
- Gav [#125](https://github.com/paritytech/parity/pull/125)
|
||||
- builtin sets excepted to true [#123](https://github.com/paritytech/parity/pull/123)
|
||||
- More state tests. [#122](https://github.com/paritytech/parity/pull/122)
|
||||
- updated to rocksdb wrapper version 0.3 [#121](https://github.com/paritytech/parity/pull/121)
|
||||
- out_of_gas -> excepted [#120](https://github.com/paritytech/parity/pull/120)
|
||||
- Parametrizing evm::Factory [#111](https://github.com/paritytech/parity/pull/111)
|
||||
- stLogs tests passing [#118](https://github.com/paritytech/parity/pull/118)
|
||||
- Fix executive. [#117](https://github.com/paritytech/parity/pull/117)
|
||||
- Fixes for marek's shooting from the hip. [#116](https://github.com/paritytech/parity/pull/116)
|
||||
- Executive revert fix [#115](https://github.com/paritytech/parity/pull/115)
|
||||
- Fix storage/account and add butress test. [#114](https://github.com/paritytech/parity/pull/114)
|
||||
- Refactored Pod & Diff types into separate files, JSON infrastructure revamp. [#113](https://github.com/paritytech/parity/pull/113)
|
||||
- Fix storage stuff and introduce per-item dirty-tracking. [#112](https://github.com/paritytech/parity/pull/112)
|
||||
- Check logs in state tests. [#109](https://github.com/paritytech/parity/pull/109)
|
||||
- executive gas calculation fixes [#108](https://github.com/paritytech/parity/pull/108)
|
||||
- proper gas calculation in executive [#107](https://github.com/paritytech/parity/pull/107)
|
||||
- Fixing MaxDepth param for executive [#105](https://github.com/paritytech/parity/pull/105)
|
||||
- Fix determination of state roots. [#106](https://github.com/paritytech/parity/pull/106)
|
||||
- transact substracts tx_gas [#104](https://github.com/paritytech/parity/pull/104)
|
||||
- Pretty-print and fix for state. [#102](https://github.com/paritytech/parity/pull/102)
|
||||
- Tier step price. [#101](https://github.com/paritytech/parity/pull/101)
|
||||
- Refactor Diff datastructures. [#100](https://github.com/paritytech/parity/pull/100)
|
||||
- externalities use u256 instead of u64 for gas calculation [#99](https://github.com/paritytech/parity/pull/99)
|
||||
- Executive tests [#97](https://github.com/paritytech/parity/pull/97)
|
||||
- State conensus tests now print mismatching diff on fail. [#98](https://github.com/paritytech/parity/pull/98)
|
||||
- State testing framework. First test is failing. [#96](https://github.com/paritytech/parity/pull/96)
|
||||
- executive tests [#95](https://github.com/paritytech/parity/pull/95)
|
||||
- Use U512s for ether cost calculation, complete transaction API [#94](https://github.com/paritytech/parity/pull/94)
|
||||
- Utils for consensus test decoding and better layout. [#93](https://github.com/paritytech/parity/pull/93)
|
||||
- executive fixes + tests [#89](https://github.com/paritytech/parity/pull/89)
|
||||
- All transaction tests pass. Nicer testing framework. [#92](https://github.com/paritytech/parity/pull/92)
|
||||
- Block verification tests; BlockProvider blockchain trait for testing [#88](https://github.com/paritytech/parity/pull/88)
|
||||
- State::exists, docs and tests. [#87](https://github.com/paritytech/parity/pull/87)
|
||||
- Add tests module, add two more transaction tests. [#86](https://github.com/paritytech/parity/pull/86)
|
||||
- bring back removed tests, removed build warnings [#82](https://github.com/paritytech/parity/pull/82)
|
||||
- Nicer transaction validation API. Nicer OutOfBounds API in general. [#85](https://github.com/paritytech/parity/pull/85)
|
||||
- Transaction fixes and consensus tests (all passing) [#84](https://github.com/paritytech/parity/pull/84)
|
||||
- fixed getting block info in evmjit + tests [#81](https://github.com/paritytech/parity/pull/81)
|
||||
- evm tests cleanup [#80](https://github.com/paritytech/parity/pull/80)
|
||||
- renamed VmFactory -> Factory [#77](https://github.com/paritytech/parity/pull/77)
|
||||
- fixed rust-evmjit description of improper_ctypes usage [#76](https://github.com/paritytech/parity/pull/76)
|
||||
- jit feature enabled by default [#75](https://github.com/paritytech/parity/pull/75)
|
||||
- evm [#52](https://github.com/paritytech/parity/pull/52)
|
||||
- state clone [#74](https://github.com/paritytech/parity/pull/74)
|
||||
- Block Verification (no tests yet) [#72](https://github.com/paritytech/parity/pull/72)
|
||||
- Improvements to LogEntry and Transaction [#73](https://github.com/paritytech/parity/pull/73)
|
||||
- Use getter in header in preparation for a Header trait; additional testing in enact_block(). [#64](https://github.com/paritytech/parity/pull/64)
|
||||
- BlockChain sync and Client app [#55](https://github.com/paritytech/parity/pull/55)
|
||||
- Block enactment (including test) [#63](https://github.com/paritytech/parity/pull/63)
|
||||
- Block complete. Needs tests. [#62](https://github.com/paritytech/parity/pull/62)
|
||||
- More on OpenBlock::close; State::kill_account added [#61](https://github.com/paritytech/parity/pull/61)
|
||||
- Remove genesis module, add more chain specs and separate out ethereum-specific stuff [#60](https://github.com/paritytech/parity/pull/60)
|
||||
- State::new_contract, camelCase engine params, missing param [#59](https://github.com/paritytech/parity/pull/59)
|
||||
- Use reorganisation [#58](https://github.com/paritytech/parity/pull/58)
|
||||
- Initial Ethash/Block skeleton implementations. [#57](https://github.com/paritytech/parity/pull/57)
|
||||
- Spec with tested Morden genesis decoder and builtins. [#54](https://github.com/paritytech/parity/pull/54)
|
||||
- Move all chain parameters into `engine_params` [#50](https://github.com/paritytech/parity/pull/50)
|
||||
- jit ffi improvements [please review] [#51](https://github.com/paritytech/parity/pull/51)
|
||||
- blockchain [please review] [#34](https://github.com/paritytech/parity/pull/34)
|
||||
- Move information from networkparams.rs into spec.rs [#48](https://github.com/paritytech/parity/pull/48)
|
||||
- Move bulking out in Engine/Params. [#47](https://github.com/paritytech/parity/pull/47)
|
||||
- Removed need for mutation in State. [#46](https://github.com/paritytech/parity/pull/46)
|
||||
- State::code and State::storage_at + tests. [#45](https://github.com/paritytech/parity/pull/45)
|
||||
- State functions for balance and nonce operations [#44](https://github.com/paritytech/parity/pull/44)
|
||||
- Account::storage_at, Account::ensure_cached and tests. [#43](https://github.com/paritytech/parity/pull/43)
|
||||
- Additional tests. [#42](https://github.com/paritytech/parity/pull/42)
|
||||
- seal todo done [#41](https://github.com/paritytech/parity/pull/41)
|
||||
- missing rustc_serialize crate && rlp `as_list` function [#40](https://github.com/paritytech/parity/pull/40)
|
||||
- More methods in Account, documentation and tests. [#39](https://github.com/paritytech/parity/pull/39)
|
||||
- Minor reworking of Account. [#38](https://github.com/paritytech/parity/pull/38)
|
||||
- Add Account and State classes. [#37](https://github.com/paritytech/parity/pull/37)
|
||||
- Revert regressions [#36](https://github.com/paritytech/parity/pull/36)
|
||||
315
docs/CHANGELOG-1.0.md
Normal file
315
docs/CHANGELOG-1.0.md
Normal file
@@ -0,0 +1,315 @@
|
||||
## Parity [v1.0.2](https://github.com/paritytech/parity/releases/tag/v1.0.2) (2016-04-11)
|
||||
|
||||
Parity 1.0.2 release improves Json RPC compatibility and fixes a number of stability issues.
|
||||
|
||||
- Flush password prompt [#1031](https://github.com/paritytech/parity/pull/1031)
|
||||
- [beta] dependencies update [#949](https://github.com/paritytech/parity/pull/949)
|
||||
- Master to beta v1.0.2 [#922](https://github.com/paritytech/parity/pull/922)
|
||||
- Master to beta 1.0.2 [#908](https://github.com/paritytech/parity/pull/908)
|
||||
|
||||
## Parity [v1.0.1](https://github.com/paritytech/parity/releases/tag/v1.0.1) (2016-03-28)
|
||||
|
||||
Parity 1.0.1 update fixes a number of issues with Json RPC, transaction propagation and syncing.
|
||||
|
||||
- Imporved sync error handling [#905](https://github.com/paritytech/parity/pull/905)
|
||||
- Publish locally-made transactions to peers. [#851](https://github.com/paritytech/parity/pull/851)
|
||||
- Merge fixes from master to beta [#845](https://github.com/paritytech/parity/pull/845)
|
||||
- Full sync restart on bad block [#844](https://github.com/paritytech/parity/pull/844)
|
||||
- Make BlockNumber optional, fix eth_call [#828](https://github.com/paritytech/parity/pull/828)
|
||||
- Web3sha3 beta [#826](https://github.com/paritytech/parity/pull/826)
|
||||
- Use network id for the web3_net_version return. [#821](https://github.com/paritytech/parity/pull/821)
|
||||
- Fix mining from spinning [#806](https://github.com/paritytech/parity/pull/806)
|
||||
- Merge master to beta [#796](https://github.com/paritytech/parity/pull/796)
|
||||
|
||||
## Parity [v1.0.0](https://github.com/paritytech/parity/releases/tag/v1.0.0) (2016-03-24)
|
||||
|
||||
Parity 1.0.0 release adds the following features:
|
||||
|
||||
- Standard JsonRPC interface.
|
||||
- Full Homestead compatibility.
|
||||
- Transaction management.
|
||||
- Mining with external miner.
|
||||
- Account management.
|
||||
- Geth key chain compatibility.
|
||||
- Additional command line options.
|
||||
- State trie pruning.
|
||||
- Cache and queue footprint.
|
||||
- Network discovery & NAT traversal.
|
||||
- Custom chain specification files.
|
||||
|
||||
Note that in this release the state database is in archive (full) mode by default. Run with one of the `--pruning` options to enable pruning.
|
||||
|
||||
- First part of multi-mining support [#804](https://github.com/paritytech/parity/pull/804)
|
||||
- Fixing future-current transactions clash [#802](https://github.com/paritytech/parity/pull/802)
|
||||
- Increase threads to num_cpus & fix author reporting [#800](https://github.com/paritytech/parity/pull/800)
|
||||
- another batch of rpc improvements [#798](https://github.com/paritytech/parity/pull/798)
|
||||
- Avoid tracing DELEGATECALL and CALLCODE. Plus tests for it. [#794](https://github.com/paritytech/parity/pull/794)
|
||||
- complete getting started steps for OS X [#793](https://github.com/paritytech/parity/pull/793)
|
||||
- Auto detect available port (with fixed test) [#788](https://github.com/paritytech/parity/pull/788)
|
||||
- eth_getTransactionReceipt [#792](https://github.com/paritytech/parity/pull/792)
|
||||
- Comprehensive tests for tracing transactions [#791](https://github.com/paritytech/parity/pull/791)
|
||||
- Disable preparing work package if miners don't ask for it. [#771](https://github.com/paritytech/parity/pull/771)
|
||||
- Listen on all interfaces for JSONRPC by default. [#786](https://github.com/paritytech/parity/pull/786)
|
||||
- eth_call [#783](https://github.com/paritytech/parity/pull/783)
|
||||
- Revert "Auto detect available port" [#789](https://github.com/paritytech/parity/pull/789)
|
||||
- added output to execution result [#777](https://github.com/paritytech/parity/pull/777)
|
||||
- Auto detect available port [#782](https://github.com/paritytech/parity/pull/782)
|
||||
- Allow 0x prefix for --author. [#785](https://github.com/paritytech/parity/pull/785)
|
||||
- updated dependencies, moved rpctest to its own submodule [#784](https://github.com/paritytech/parity/pull/784)
|
||||
- use ethjson module to load chain json tests [#778](https://github.com/paritytech/parity/pull/778)
|
||||
- Tracing implemented. [#772](https://github.com/paritytech/parity/pull/772)
|
||||
- test ethjson module on travis [#780](https://github.com/paritytech/parity/pull/780)
|
||||
- batch of rpc fixes [#775](https://github.com/paritytech/parity/pull/775)
|
||||
- rpctest executable [#757](https://github.com/paritytech/parity/pull/757)
|
||||
- Refactoring error transaction_queue error handling and `update_sealing` method. [#753](https://github.com/paritytech/parity/pull/753)
|
||||
- Avoid importing transactions with gas above 1.1*block_gas_limit to transaction queue [#760](https://github.com/paritytech/parity/pull/760)
|
||||
- Removing transactions that failed to be pushed to block. [#752](https://github.com/paritytech/parity/pull/752)
|
||||
- Updating clippy [#766](https://github.com/paritytech/parity/pull/766)
|
||||
- Attempting to add all transactions to mined block [#754](https://github.com/paritytech/parity/pull/754)
|
||||
- Prettier version w/o git dir; Use rustc compile time version [#761](https://github.com/paritytech/parity/pull/761)
|
||||
- Stop adding transactions to queue while not fully synced [#751](https://github.com/paritytech/parity/pull/751)
|
||||
- Verify sender's balance before importing transaction to queue [#746](https://github.com/paritytech/parity/pull/746)
|
||||
- Returning number of transactions pending in block not queue [#750](https://github.com/paritytech/parity/pull/750)
|
||||
- Speeding up build [#733](https://github.com/paritytech/parity/pull/733)
|
||||
- adding check for a sync when giving work to miner [#742](https://github.com/paritytech/parity/pull/742)
|
||||
- json deserialization module [#745](https://github.com/paritytech/parity/pull/745)
|
||||
- Update install-parity.sh [#749](https://github.com/paritytech/parity/pull/749)
|
||||
- Restart sync on getting old unknown header [#747](https://github.com/paritytech/parity/pull/747)
|
||||
- Missing return for #737 [#744](https://github.com/paritytech/parity/pull/744)
|
||||
- Enact block with uncles test [#741](https://github.com/paritytech/parity/pull/741)
|
||||
- Fix outdated libc version on dependency [#740](https://github.com/paritytech/parity/pull/740)
|
||||
- Fixing possible race in transaction queue [#735](https://github.com/paritytech/parity/pull/735)
|
||||
- Sync fixed again [#737](https://github.com/paritytech/parity/pull/737)
|
||||
- Don't change best block until extras is committed. [#734](https://github.com/paritytech/parity/pull/734)
|
||||
- stable only until travis speedup [#736](https://github.com/paritytech/parity/pull/736)
|
||||
- Optimizing uint operations (architecture independent) [#629](https://github.com/paritytech/parity/pull/629)
|
||||
- Add RLP, not a data item. [#725](https://github.com/paritytech/parity/pull/725)
|
||||
- PV63 receipts response [#687](https://github.com/paritytech/parity/pull/687)
|
||||
- another batch of rpc tests [#723](https://github.com/paritytech/parity/pull/723)
|
||||
- dockerfiles update [#726](https://github.com/paritytech/parity/pull/726)
|
||||
- Lock reports to avoid out of order badness. [#721](https://github.com/paritytech/parity/pull/721)
|
||||
- Fixed handshake leak [#722](https://github.com/paritytech/parity/pull/722)
|
||||
- Allow configuration of target gas limit. [#719](https://github.com/paritytech/parity/pull/719)
|
||||
- Version 1.1 in master [#714](https://github.com/paritytech/parity/pull/714)
|
||||
- Silence UDP warnings [#720](https://github.com/paritytech/parity/pull/720)
|
||||
- Rpc personal tests [#715](https://github.com/paritytech/parity/pull/715)
|
||||
- Fixing warnings [#704](https://github.com/paritytech/parity/pull/704)
|
||||
- docopts cleanups [#713](https://github.com/paritytech/parity/pull/713)
|
||||
- Removed rocksdb build dependency [#717](https://github.com/paritytech/parity/pull/717)
|
||||
- Fixed splitting Neighbours packet [#710](https://github.com/paritytech/parity/pull/710)
|
||||
- management of account expiration & memory [#701](https://github.com/paritytech/parity/pull/701)
|
||||
- Remove EarlyMerge from user docs. [#708](https://github.com/paritytech/parity/pull/708)
|
||||
- Fixes and traces for refcountdb. [#705](https://github.com/paritytech/parity/pull/705)
|
||||
- Check for NULL_RLP in AccountDB [#706](https://github.com/paritytech/parity/pull/706)
|
||||
- ethminer as crate [#700](https://github.com/paritytech/parity/pull/700)
|
||||
- Old ref-counted DB code [#692](https://github.com/paritytech/parity/pull/692)
|
||||
- next batch of rpc tests and fixes [#699](https://github.com/paritytech/parity/pull/699)
|
||||
- implemented eth_geStorageAt rpc method, added more tests for rpc [#695](https://github.com/paritytech/parity/pull/695)
|
||||
- Fix JournalDB era marker [#690](https://github.com/paritytech/parity/pull/690)
|
||||
- More sync fixes [#685](https://github.com/paritytech/parity/pull/685)
|
||||
- mark some key tests as heavy [#694](https://github.com/paritytech/parity/pull/694)
|
||||
- Limit incoming connections [#693](https://github.com/paritytech/parity/pull/693)
|
||||
- Updating clippy [#688](https://github.com/paritytech/parity/pull/688)
|
||||
- eth_accounts, eth_getBalance rpc functions && tests [#691](https://github.com/paritytech/parity/pull/691)
|
||||
- state query for archive jdb [#683](https://github.com/paritytech/parity/pull/683)
|
||||
- Fix for option 1 of JournalDB [#658](https://github.com/paritytech/parity/pull/658)
|
||||
- Rename into something that is a little more descriptive. [#689](https://github.com/paritytech/parity/pull/689)
|
||||
- JournalDB with in-memory overlay (option2) [#634](https://github.com/paritytech/parity/pull/634)
|
||||
- additional (failing) SecretStore test [#682](https://github.com/paritytech/parity/pull/682)
|
||||
- Updating clippy & fixing warnings. [#670](https://github.com/paritytech/parity/pull/670)
|
||||
- rpc web3 tests [#681](https://github.com/paritytech/parity/pull/681)
|
||||
- Making personal json-rpc configurable via cli [#677](https://github.com/paritytech/parity/pull/677)
|
||||
- RPC Pending Transactions Filter [#661](https://github.com/paritytech/parity/pull/661)
|
||||
- Rearrange journaldb infrastructure to make more extensible [#678](https://github.com/paritytech/parity/pull/678)
|
||||
- JournalDB -> Box<JournalDB>, and it's a trait. [#673](https://github.com/paritytech/parity/pull/673)
|
||||
- fix warning for transaction_queue.add usage [#676](https://github.com/paritytech/parity/pull/676)
|
||||
- Adding std::mem back (only for asm) [#680](https://github.com/paritytech/parity/pull/680)
|
||||
- update readme to exclude beta step (stable is ok) [#679](https://github.com/paritytech/parity/pull/679)
|
||||
- fixed U256 and transaction request deserialization [#675](https://github.com/paritytech/parity/pull/675)
|
||||
- More geth compatibility. [#666](https://github.com/paritytech/parity/pull/666)
|
||||
- Removing running clippy by default on nightly. [#671](https://github.com/paritytech/parity/pull/671)
|
||||
- rpc net submodule tests [#667](https://github.com/paritytech/parity/pull/667)
|
||||
- Client module overhaul [#665](https://github.com/paritytech/parity/pull/665)
|
||||
- Rpc transaction signing [#587](https://github.com/paritytech/parity/pull/587)
|
||||
- Transaction queue exposed via JSON rpc. [#652](https://github.com/paritytech/parity/pull/652)
|
||||
- Remove unneeded locking [#499](https://github.com/paritytech/parity/pull/499)
|
||||
- extend sync status interface to sync provider [#664](https://github.com/paritytech/parity/pull/664)
|
||||
- --archive is default. --pruning is option. [#663](https://github.com/paritytech/parity/pull/663)
|
||||
- jsonrpc uses client and sync interfaces [#641](https://github.com/paritytech/parity/pull/641)
|
||||
- Expose transaction insertion in sync lib [#609](https://github.com/paritytech/parity/pull/609)
|
||||
- Removing get prefix from poll_info [#660](https://github.com/paritytech/parity/pull/660)
|
||||
- Tx queue update height bug [#657](https://github.com/paritytech/parity/pull/657)
|
||||
- Tx_queue_docs -> To master [#651](https://github.com/paritytech/parity/pull/651)
|
||||
- blockchain import_route [#645](https://github.com/paritytech/parity/pull/645)
|
||||
- Stop workers before stopping event loop [#655](https://github.com/paritytech/parity/pull/655)
|
||||
- Validate sender before importing to queue [#650](https://github.com/paritytech/parity/pull/650)
|
||||
- Gas price threshold for transactions [#640](https://github.com/paritytech/parity/pull/640)
|
||||
- `dev` feature enabled when compiling without `--release` [#627](https://github.com/paritytech/parity/pull/627)
|
||||
- Don't call mark_as_bad needlessly [#648](https://github.com/paritytech/parity/pull/648)
|
||||
- Fixed sync handling large forks [#647](https://github.com/paritytech/parity/pull/647)
|
||||
- Additional documentation for transaction queue [#631](https://github.com/paritytech/parity/pull/631)
|
||||
- Transaction Queue Integration [#607](https://github.com/paritytech/parity/pull/607)
|
||||
- Keys cli [#639](https://github.com/paritytech/parity/pull/639)
|
||||
- fix build warning [#643](https://github.com/paritytech/parity/pull/643)
|
||||
- updated jsonrpc-core and http-server libs [#642](https://github.com/paritytech/parity/pull/642)
|
||||
- jsonrpc panics gracefully shutdown client [#638](https://github.com/paritytech/parity/pull/638)
|
||||
- Fixing CLI parameters [#633](https://github.com/paritytech/parity/pull/633)
|
||||
- Normal CLI options with geth. [#628](https://github.com/paritytech/parity/pull/628)
|
||||
- Do not remove the peer immediatelly on send error [#626](https://github.com/paritytech/parity/pull/626)
|
||||
- Jsonrpc block behind [#622](https://github.com/paritytech/parity/pull/622)
|
||||
- Remove println!s. [#624](https://github.com/paritytech/parity/pull/624)
|
||||
- JournalDB option 1 fix [#613](https://github.com/paritytech/parity/pull/613)
|
||||
- Network tracing cleanup [#611](https://github.com/paritytech/parity/pull/611)
|
||||
- Revert "Transaction Queue integration" [#602](https://github.com/paritytech/parity/pull/602)
|
||||
- fix benches compilation [#601](https://github.com/paritytech/parity/pull/601)
|
||||
- Transaction Queue integration [#595](https://github.com/paritytech/parity/pull/595)
|
||||
- verifier trait improvements [#597](https://github.com/paritytech/parity/pull/597)
|
||||
- build on rust stable [#600](https://github.com/paritytech/parity/pull/600)
|
||||
- Geth import silent if no geth [#599](https://github.com/paritytech/parity/pull/599)
|
||||
- Additional journaldb logging and assert [#593](https://github.com/paritytech/parity/pull/593)
|
||||
- Uncle inclusion in block authoring. [#578](https://github.com/paritytech/parity/pull/578)
|
||||
- Fixed potential deadlock on startup [#592](https://github.com/paritytech/parity/pull/592)
|
||||
- Fixing an overflow panic [#591](https://github.com/paritytech/parity/pull/591)
|
||||
- Fixed one more case of sync stalling [#590](https://github.com/paritytech/parity/pull/590)
|
||||
- JournalDB can now operate in "archive" mode [#589](https://github.com/paritytech/parity/pull/589)
|
||||
- Secret store integration with client [#586](https://github.com/paritytech/parity/pull/586)
|
||||
- fix build on nightly rust [#588](https://github.com/paritytech/parity/pull/588)
|
||||
- deserialization for uint generic [#585](https://github.com/paritytech/parity/pull/585)
|
||||
- TransactionsQueue implementation [#559](https://github.com/paritytech/parity/pull/559)
|
||||
- JSON-RPC personal service (follows #582) [#583](https://github.com/paritytech/parity/pull/583)
|
||||
- making key directory thread-safe [#582](https://github.com/paritytech/parity/pull/582)
|
||||
- verifier trait [#581](https://github.com/paritytech/parity/pull/581)
|
||||
- shrink_to_fit after removing hashes. [#580](https://github.com/paritytech/parity/pull/580)
|
||||
- support for rpc polling [#504](https://github.com/paritytech/parity/pull/504)
|
||||
- limit serde codegen only to rpc types submodule [#569](https://github.com/paritytech/parity/pull/569)
|
||||
- fork test for Issue test/568 [#573](https://github.com/paritytech/parity/pull/573)
|
||||
- Fixing clippy warnings = small refactoring of `request_blocks` [#560](https://github.com/paritytech/parity/pull/560)
|
||||
- Improved journaldb logging [#571](https://github.com/paritytech/parity/pull/571)
|
||||
- Additional check to ancient enactments. [#570](https://github.com/paritytech/parity/pull/570)
|
||||
- chainfilter shouldnt exclude to_block from results [#564](https://github.com/paritytech/parity/pull/564)
|
||||
- Fix coverage test run [#567](https://github.com/paritytech/parity/pull/567)
|
||||
- Mining [#547](https://github.com/paritytech/parity/pull/547)
|
||||
- fix uint warnings [#565](https://github.com/paritytech/parity/pull/565)
|
||||
- Finished blockchain generator. [#562](https://github.com/paritytech/parity/pull/562)
|
||||
- fixed broken master [#563](https://github.com/paritytech/parity/pull/563)
|
||||
- uint to separate crate [#544](https://github.com/paritytech/parity/pull/544)
|
||||
- improved test chain generator [#554](https://github.com/paritytech/parity/pull/554)
|
||||
- Fixing spelling in propagade->propagate [#558](https://github.com/paritytech/parity/pull/558)
|
||||
- Changing RefCell to Cell in transaction. [#557](https://github.com/paritytech/parity/pull/557)
|
||||
- Fix for morden consensus. [#556](https://github.com/paritytech/parity/pull/556)
|
||||
- blockchain generator [#550](https://github.com/paritytech/parity/pull/550)
|
||||
- Sparse Table Implementation (Row, Col) -> Val [#545](https://github.com/paritytech/parity/pull/545)
|
||||
- fixup install script [#548](https://github.com/paritytech/parity/pull/548)
|
||||
- Fixing clippy warnings [#546](https://github.com/paritytech/parity/pull/546)
|
||||
- ignore out directory [#543](https://github.com/paritytech/parity/pull/543)
|
||||
- u256 full multiplication [#539](https://github.com/paritytech/parity/pull/539)
|
||||
- Fix panic when downloading stales, update homestead transition [#537](https://github.com/paritytech/parity/pull/537)
|
||||
- changing x64 asm config [#534](https://github.com/paritytech/parity/pull/534)
|
||||
- uncomment state transition tests [#533](https://github.com/paritytech/parity/pull/533)
|
||||
- jsonrpc uses weak pointers to client [#532](https://github.com/paritytech/parity/pull/532)
|
||||
- Morden switch to Homestead rules at #494,000. [#531](https://github.com/paritytech/parity/pull/531)
|
||||
- Blockchain module cleanup [#524](https://github.com/paritytech/parity/pull/524)
|
||||
- Multiplication issue + very exhaustive tests for it [#528](https://github.com/paritytech/parity/pull/528)
|
||||
- EIP-8 [#498](https://github.com/paritytech/parity/pull/498)
|
||||
- Make "random" trie tests fully deterministic. [#527](https://github.com/paritytech/parity/pull/527)
|
||||
- udpated serde to version 0.7.0 [#526](https://github.com/paritytech/parity/pull/526)
|
||||
- Better memory management [#516](https://github.com/paritytech/parity/pull/516)
|
||||
- Typo [#523](https://github.com/paritytech/parity/pull/523)
|
||||
- U512 add/sub optimize [#521](https://github.com/paritytech/parity/pull/521)
|
||||
- Account management + geth keystore import (no utility crate added) [#509](https://github.com/paritytech/parity/pull/509)
|
||||
- Delayed UPnP initialization [#505](https://github.com/paritytech/parity/pull/505)
|
||||
- Fixing marking blocks as bad & SyncMessage bugs + small client refactoring. [#503](https://github.com/paritytech/parity/pull/503)
|
||||
- optimization of U256 [#515](https://github.com/paritytech/parity/pull/515)
|
||||
- Removed rocksdb from build scripts and instructions [#520](https://github.com/paritytech/parity/pull/520)
|
||||
- RocksDB abstraction layer + Hash index for state DB [#464](https://github.com/paritytech/parity/pull/464)
|
||||
- bloomfilter [#418](https://github.com/paritytech/parity/pull/418)
|
||||
- Fixed a race condition when connecting peer disconnects immediately [#519](https://github.com/paritytech/parity/pull/519)
|
||||
- ignore intellij idea project files as well [#518](https://github.com/paritytech/parity/pull/518)
|
||||
- updated version of unicase [#517](https://github.com/paritytech/parity/pull/517)
|
||||
- jsonrpc security, cors headers, fixed #359 [#493](https://github.com/paritytech/parity/pull/493)
|
||||
- Rust implementations to replace data tables (#161) [#482](https://github.com/paritytech/parity/pull/482)
|
||||
- fix issue with starting requested block number was not included itself [#512](https://github.com/paritytech/parity/pull/512)
|
||||
- fixed travis --org GH_TOKEN [#510](https://github.com/paritytech/parity/pull/510)
|
||||
- Improved log format [#506](https://github.com/paritytech/parity/pull/506)
|
||||
- Log address on failed connection attempt [#502](https://github.com/paritytech/parity/pull/502)
|
||||
- Bumping clippy and fixing warnings. [#501](https://github.com/paritytech/parity/pull/501)
|
||||
- Bumping versions. Fixes #496 [#500](https://github.com/paritytech/parity/pull/500)
|
||||
- Manage final user-input errors. [#494](https://github.com/paritytech/parity/pull/494)
|
||||
- Remove unneeded code, fix minor potential issue with length. [#495](https://github.com/paritytech/parity/pull/495)
|
||||
- Remove "unknown" from version string. [#488](https://github.com/paritytech/parity/pull/488)
|
||||
- Include git commit date & hash. [#486](https://github.com/paritytech/parity/pull/486)
|
||||
- Use proper version string. [#485](https://github.com/paritytech/parity/pull/485)
|
||||
- Networking fixes [#480](https://github.com/paritytech/parity/pull/480)
|
||||
- Fix potential deadlock on node table update [#484](https://github.com/paritytech/parity/pull/484)
|
||||
- Squash more warnings [#481](https://github.com/paritytech/parity/pull/481)
|
||||
- dev/test/build tools to separate crate [#477](https://github.com/paritytech/parity/pull/477)
|
||||
- Back to original slab crate [#479](https://github.com/paritytech/parity/pull/479)
|
||||
- Better user errors. [#476](https://github.com/paritytech/parity/pull/476)
|
||||
- UDP Discovery [#440](https://github.com/paritytech/parity/pull/440)
|
||||
- update readme with rust override [#475](https://github.com/paritytech/parity/pull/475)
|
||||
- fixed warnings on rust beta [#474](https://github.com/paritytech/parity/pull/474)
|
||||
- Secret store (part2 - encrypted key/value svc) [#449](https://github.com/paritytech/parity/pull/449)
|
||||
- Kill bad test. [#473](https://github.com/paritytech/parity/pull/473)
|
||||
- Make clippy an optional dependency [#422](https://github.com/paritytech/parity/pull/422)
|
||||
- parity compiling fine [#469](https://github.com/paritytech/parity/pull/469)
|
||||
- compiling ethcore on beta [#468](https://github.com/paritytech/parity/pull/468)
|
||||
- Utils compiling in beta [#467](https://github.com/paritytech/parity/pull/467)
|
||||
- Get rid of lru_cache dependency [#466](https://github.com/paritytech/parity/pull/466)
|
||||
- Add daemonization. [#459](https://github.com/paritytech/parity/pull/459)
|
||||
- Master upgrade [#448](https://github.com/paritytech/parity/pull/448)
|
||||
- Remove contributing stuff now that we have CLA bot. [#447](https://github.com/paritytech/parity/pull/447)
|
||||
- Add Morden bootnode. [#446](https://github.com/paritytech/parity/pull/446)
|
||||
- beta fixes to master [#441](https://github.com/paritytech/parity/pull/441)
|
||||
- Secret store (part1 - key management) [#423](https://github.com/paritytech/parity/pull/423)
|
||||
- Use 1100000 as the homestead transition, fix build instructions. [#438](https://github.com/paritytech/parity/pull/438)
|
||||
- More sync and propagation fixes [#420](https://github.com/paritytech/parity/pull/420)
|
||||
- back to cargo crates [#436](https://github.com/paritytech/parity/pull/436)
|
||||
- Fixing clippy warnings [#435](https://github.com/paritytech/parity/pull/435)
|
||||
- preserving root cargo lock [#434](https://github.com/paritytech/parity/pull/434)
|
||||
- Nightly fix [#432](https://github.com/paritytech/parity/pull/432)
|
||||
- nightly fixes [#431](https://github.com/paritytech/parity/pull/431)
|
||||
- Delay Homestead transition from 1,000,000. [#429](https://github.com/paritytech/parity/pull/429)
|
||||
- Nightly fix effort (still should fail) [#428](https://github.com/paritytech/parity/pull/428)
|
||||
- clippy version update, docopt-macro moving to fork [#425](https://github.com/paritytech/parity/pull/425)
|
||||
- Network/Sync fixes and optimizations [#416](https://github.com/paritytech/parity/pull/416)
|
||||
- Use latest era instead of end era as journal marker [#414](https://github.com/paritytech/parity/pull/414)
|
||||
- api changes [#402](https://github.com/paritytech/parity/pull/402)
|
||||
- Option for no init nodes. [#408](https://github.com/paritytech/parity/pull/408)
|
||||
- Fixed block_bodies not returning a list [#406](https://github.com/paritytech/parity/pull/406)
|
||||
- Fix test. [#405](https://github.com/paritytech/parity/pull/405)
|
||||
- Allow path to be configured. [#404](https://github.com/paritytech/parity/pull/404)
|
||||
- Upnp [#400](https://github.com/paritytech/parity/pull/400)
|
||||
- eth_syncing, fixed #397 [#398](https://github.com/paritytech/parity/pull/398)
|
||||
- Using modified version of ctrlc that catches SIGTERM [#399](https://github.com/paritytech/parity/pull/399)
|
||||
- Catching panics. [#396](https://github.com/paritytech/parity/pull/396)
|
||||
- jsonrpc [#391](https://github.com/paritytech/parity/pull/391)
|
||||
- Externalities tests (still clumsy) [#394](https://github.com/paritytech/parity/pull/394)
|
||||
- excluding test code itself from coverage [#395](https://github.com/paritytech/parity/pull/395)
|
||||
- Additional tweaks to options. [#390](https://github.com/paritytech/parity/pull/390)
|
||||
- --chain option for setting which network to go on. [#388](https://github.com/paritytech/parity/pull/388)
|
||||
- Ethash unit tests final [#387](https://github.com/paritytech/parity/pull/387)
|
||||
- jsonrpc [#374](https://github.com/paritytech/parity/pull/374)
|
||||
- Editorconfig file. [#384](https://github.com/paritytech/parity/pull/384)
|
||||
- Coverage effort [in progress] [#382](https://github.com/paritytech/parity/pull/382)
|
||||
- making root kcov runner simular to the one running on CI [#380](https://github.com/paritytech/parity/pull/380)
|
||||
- add gcc as a dependency to dockerfiles [#381](https://github.com/paritytech/parity/pull/381)
|
||||
- Check for handshake expiration before attempting connection replace [#375](https://github.com/paritytech/parity/pull/375)
|
||||
- Blocks propagation [#364](https://github.com/paritytech/parity/pull/364)
|
||||
- Network params. [#376](https://github.com/paritytech/parity/pull/376)
|
||||
- Add parity-node-zero to bootnodes. [#373](https://github.com/paritytech/parity/pull/373)
|
||||
- kcov uses travis_job_id instead of coveralls token [#370](https://github.com/paritytech/parity/pull/370)
|
||||
- Add parity-node-zero.ethcore.io to boot nodes. [#371](https://github.com/paritytech/parity/pull/371)
|
||||
|
||||
## Parity [v1.0.0-rc1](https://github.com/paritytech/parity/releases/tag/v1.0.0-rc1) (2016-03-15)
|
||||
|
||||
First Parity 1.0.0 release candidate.
|
||||
|
||||
- Version 1.0 in beta [#712](https://github.com/paritytech/parity/pull/712)
|
||||
- Fix test for beta [#617](https://github.com/paritytech/parity/pull/617)
|
||||
- JournalDB fix option 1 for beta [#614](https://github.com/paritytech/parity/pull/614)
|
||||
- Failing test. [#606](https://github.com/paritytech/parity/pull/606)
|
||||
- Fix transition points [#604](https://github.com/paritytech/parity/pull/604)
|
||||
- (BETA) Update README.md [#549](https://github.com/paritytech/parity/pull/549)
|
||||
- (BETA) instructions for beta release channel [#456](https://github.com/paritytech/parity/pull/456)
|
||||
- (BETA) fix nightly - remerge [#454](https://github.com/paritytech/parity/pull/454)
|
||||
- (BETA) fixing nightly version for beta [#452](https://github.com/paritytech/parity/pull/452)
|
||||
152
docs/CHANGELOG-1.1.md
Normal file
152
docs/CHANGELOG-1.1.md
Normal file
@@ -0,0 +1,152 @@
|
||||
## Parity [v1.1.0](https://github.com/paritytech/parity/releases/tag/v1.1.0) (2016-05-02)
|
||||
|
||||
Parity 1.1.0 introduces:
|
||||
|
||||
- Transaction tracing. Parity now optionally indexes & stores message-call/"internal transaction" information and provides additional RPC for querying.
|
||||
- Web interface for logs, status & JSON RPC.
|
||||
- Improved JSON RPC compatibility.
|
||||
- Reduced memory footprint.
|
||||
- Optimized EVM interpreter performance.
|
||||
|
||||
Full Changes:
|
||||
|
||||
- Exposing default extra data via ethcore RPC [#1032](https://github.com/paritytech/parity/pull/1032)
|
||||
- Net etiquette [#1028](https://github.com/paritytech/parity/pull/1028)
|
||||
- Bumping clippy & fixing warnings [#1024](https://github.com/paritytech/parity/pull/1024)
|
||||
- Tracedb interface && cli [#997](https://github.com/paritytech/parity/pull/997)
|
||||
- Switching to geth-attach supporting version of rpc core and server [#1022](https://github.com/paritytech/parity/pull/1022)
|
||||
- Fixing status page displaying homestead [#1020](https://github.com/paritytech/parity/pull/1020)
|
||||
- Core tracedb functionality. [#996](https://github.com/paritytech/parity/pull/996)
|
||||
- RPC method for supported modules [#1019](https://github.com/paritytech/parity/pull/1019)
|
||||
- Updating status page [#1015](https://github.com/paritytech/parity/pull/1015)
|
||||
- Disabling wallet [#1017](https://github.com/paritytech/parity/pull/1017)
|
||||
- More detailed fatal error reporting [#1016](https://github.com/paritytech/parity/pull/1016)
|
||||
- Support 'pending' block in RPC [#1007](https://github.com/paritytech/parity/pull/1007)
|
||||
- Enable pending block when there is local transaction pending. [#1005](https://github.com/paritytech/parity/pull/1005)
|
||||
- updating key files permissions on save [#1010](https://github.com/paritytech/parity/pull/1010)
|
||||
- IPC JSON RPC (for external interface) [#1009](https://github.com/paritytech/parity/pull/1009)
|
||||
- Fixing Firefox authorization issues [#1013](https://github.com/paritytech/parity/pull/1013)
|
||||
- cargo update [#1012](https://github.com/paritytech/parity/pull/1012)
|
||||
- Switching to rust-url@1.0.0 [#1011](https://github.com/paritytech/parity/pull/1011)
|
||||
- Exception handling in RPC & WebApps [#988](https://github.com/paritytech/parity/pull/988)
|
||||
- Fixed uint deserialization from hex [#1008](https://github.com/paritytech/parity/pull/1008)
|
||||
- Tweak timeout and packet size to handle slow networks better [#1004](https://github.com/paritytech/parity/pull/1004)
|
||||
- db key is generic and can be made smaller [#1006](https://github.com/paritytech/parity/pull/1006)
|
||||
- IPC with new serialization [#998](https://github.com/paritytech/parity/pull/998)
|
||||
- make jsonrpc api engine agnostic [#1001](https://github.com/paritytech/parity/pull/1001)
|
||||
- updated cargo.lock [#1002](https://github.com/paritytech/parity/pull/1002)
|
||||
- updated parity dependencies [#993](https://github.com/paritytech/parity/pull/993)
|
||||
- Auto (with codegen) binary serializer [#980](https://github.com/paritytech/parity/pull/980)
|
||||
- Fixing transaction queue last_nonces update [#995](https://github.com/paritytech/parity/pull/995)
|
||||
- import route contains ommited blocks [#994](https://github.com/paritytech/parity/pull/994)
|
||||
- fixed encoding 0u8 [#992](https://github.com/paritytech/parity/pull/992)
|
||||
- Use latest netstats [#989](https://github.com/paritytech/parity/pull/989)
|
||||
- RPC shared external miner [#984](https://github.com/paritytech/parity/pull/984)
|
||||
- Additional RPC methods for settings [#983](https://github.com/paritytech/parity/pull/983)
|
||||
- Fixing transaction_queue deadlock [#985](https://github.com/paritytech/parity/pull/985)
|
||||
- Refactoring of `parity/main.rs` [#981](https://github.com/paritytech/parity/pull/981)
|
||||
- Fixing clippy warnings. [#982](https://github.com/paritytech/parity/pull/982)
|
||||
- Bumping status page [#977](https://github.com/paritytech/parity/pull/977)
|
||||
- querying extras separated to its own module [#972](https://github.com/paritytech/parity/pull/972)
|
||||
- Exposing application logs via RPC. [#976](https://github.com/paritytech/parity/pull/976)
|
||||
- Addressing binary serialization for db types [#966](https://github.com/paritytech/parity/pull/966)
|
||||
- removed redundant unwraps [#935](https://github.com/paritytech/parity/pull/935)
|
||||
- fixed transaction queue merge conflict [#975](https://github.com/paritytech/parity/pull/975)
|
||||
- Configurable limit for transaction queue (CLI & Ethcore-RPC) [#974](https://github.com/paritytech/parity/pull/974)
|
||||
- Enforce limit caused `last_nonce` to return incorrect values. [#973](https://github.com/paritytech/parity/pull/973)
|
||||
- Even more detailed errors for transaction queue [#969](https://github.com/paritytech/parity/pull/969)
|
||||
- temporary fix of panic in blockchain garbage collection [#970](https://github.com/paritytech/parity/pull/970)
|
||||
- IPC codegen - some minor fixes & enhancements [#967](https://github.com/paritytech/parity/pull/967)
|
||||
- Additional logging for transactions [#968](https://github.com/paritytech/parity/pull/968)
|
||||
- refactored blockchain extras keys building [#963](https://github.com/paritytech/parity/pull/963)
|
||||
- Using hyper-mio branch in webapps. [#957](https://github.com/paritytech/parity/pull/957)
|
||||
- Remove nanomsg from build-dependencies [#965](https://github.com/paritytech/parity/pull/965)
|
||||
- Fix build for --target=armv7-unknown-linux-gnueabihf [#964](https://github.com/paritytech/parity/pull/964)
|
||||
- IPC RPC codegen extra feature [#962](https://github.com/paritytech/parity/pull/962)
|
||||
- IPC RPC codegen for generic implementation [#961](https://github.com/paritytech/parity/pull/961)
|
||||
- using db_path directory when upgrading [#960](https://github.com/paritytech/parity/pull/960)
|
||||
- IPC hypervisor [#958](https://github.com/paritytech/parity/pull/958)
|
||||
- Removing a transaction from queue now removes all from this sender with lower nonces. [#950](https://github.com/paritytech/parity/pull/950)
|
||||
- bump status page version 0.1.7 [#955](https://github.com/paritytech/parity/pull/955)
|
||||
- Changing cors header to be optional [#956](https://github.com/paritytech/parity/pull/956)
|
||||
- Update ARM Dockerfile [#959](https://github.com/paritytech/parity/pull/959)
|
||||
- Sensible gas limits for eth_sendTransaction [#953](https://github.com/paritytech/parity/pull/953)
|
||||
- Fix upgrade script and make parity run when no .parity dir. [#954](https://github.com/paritytech/parity/pull/954)
|
||||
- Tracing and docs for --pruning=auto. [#952](https://github.com/paritytech/parity/pull/952)
|
||||
- IPC serialization for custom parameters [#946](https://github.com/paritytech/parity/pull/946)
|
||||
- default filter from block should be Latest, not Earliest [#948](https://github.com/paritytech/parity/pull/948)
|
||||
- README.md: removes sudo from multirust installation [#943](https://github.com/paritytech/parity/pull/943)
|
||||
- Disable long lines formatting + ethash example. [#939](https://github.com/paritytech/parity/pull/939)
|
||||
- Ethcore-specific RPC methods for altering miner parameters. [#934](https://github.com/paritytech/parity/pull/934)
|
||||
- Use ethcore nanomsg bindings [#941](https://github.com/paritytech/parity/pull/941)
|
||||
- Update IPC codegen to latest syntax libs [#938](https://github.com/paritytech/parity/pull/938)
|
||||
- IPC documentation [#937](https://github.com/paritytech/parity/pull/937)
|
||||
- Bumping clippy and fixing warnings. [#936](https://github.com/paritytech/parity/pull/936)
|
||||
- Pruning auto [#927](https://github.com/paritytech/parity/pull/927)
|
||||
- IPC persistent client link [#933](https://github.com/paritytech/parity/pull/933)
|
||||
- IPC persistent client link [#930](https://github.com/paritytech/parity/pull/930)
|
||||
- IPC handshake (negotiating protocol/api version) [#928](https://github.com/paritytech/parity/pull/928)
|
||||
- Upgrade logic between versions [#914](https://github.com/paritytech/parity/pull/914)
|
||||
- executive tracing cleanup [#903](https://github.com/paritytech/parity/pull/903)
|
||||
- Ethcore-specific RPC methods [#923](https://github.com/paritytech/parity/pull/923)
|
||||
- Parameter to allow user to force the sealing mechanism [#918](https://github.com/paritytech/parity/pull/918)
|
||||
- updated dependencies [#921](https://github.com/paritytech/parity/pull/921)
|
||||
- Fixed send transaction deadlock [#920](https://github.com/paritytech/parity/pull/920)
|
||||
- --unlock is comma-delimited. [#916](https://github.com/paritytech/parity/pull/916)
|
||||
- fixed eth_getLogs [#915](https://github.com/paritytech/parity/pull/915)
|
||||
- create provided custom dir for keys if none [#912](https://github.com/paritytech/parity/pull/912)
|
||||
- spec loading cleanup [#858](https://github.com/paritytech/parity/pull/858)
|
||||
- WebApps HTTP Basic Auth Support [#906](https://github.com/paritytech/parity/pull/906)
|
||||
- Removing match on constant [#888](https://github.com/paritytech/parity/pull/888)
|
||||
- Update auth.rs [#907](https://github.com/paritytech/parity/pull/907)
|
||||
- Enabling webapps compilation by default [#904](https://github.com/paritytech/parity/pull/904)
|
||||
- fixed #895 [#898](https://github.com/paritytech/parity/pull/898)
|
||||
- Support for compile-time included WebApplications. [#899](https://github.com/paritytech/parity/pull/899)
|
||||
- Propagate transaction queue [#894](https://github.com/paritytech/parity/pull/894)
|
||||
- Use new json RPC server [#901](https://github.com/paritytech/parity/pull/901)
|
||||
- Gracefully dying when trying to enable RPC and app is compiled without it. [#900](https://github.com/paritytech/parity/pull/900)
|
||||
- Additional logging and friendlier error messages [#893](https://github.com/paritytech/parity/pull/893)
|
||||
- Avoid signalling readiness when app is about to be closed. [#897](https://github.com/paritytech/parity/pull/897)
|
||||
- fixed #875 and added tests for eth_sendTransaction [#890](https://github.com/paritytech/parity/pull/890)
|
||||
- passing key path to all invocations [#891](https://github.com/paritytech/parity/pull/891)
|
||||
- Fixed eth_call nonce and gas handling [#892](https://github.com/paritytech/parity/pull/892)
|
||||
- ipc rpc with nano transport (simple duplex) [#886](https://github.com/paritytech/parity/pull/886)
|
||||
- Bumping clippy and fixing warnings [#889](https://github.com/paritytech/parity/pull/889)
|
||||
- More descriptive expectations to transaction queue consistency. [#878](https://github.com/paritytech/parity/pull/878)
|
||||
- uint bug - replace add with or [#879](https://github.com/paritytech/parity/pull/879)
|
||||
- Fixing typo in bigint [#877](https://github.com/paritytech/parity/pull/877)
|
||||
- update misleading cli help msg for author [#874](https://github.com/paritytech/parity/pull/874)
|
||||
- Find geth data store cross-platform. [#871](https://github.com/paritytech/parity/pull/871)
|
||||
- Import geth 1.4.0 keys [#872](https://github.com/paritytech/parity/pull/872)
|
||||
- Syntax helpers for IPC RPC (part 2) [#854](https://github.com/paritytech/parity/pull/854)
|
||||
- Fixed bootnode URL and error message [#870](https://github.com/paritytech/parity/pull/870)
|
||||
- replace popcnt with mov (861) [#867](https://github.com/paritytech/parity/pull/867)
|
||||
- weekly dependencies update [#865](https://github.com/paritytech/parity/pull/865)
|
||||
- Remove unused mut [#866](https://github.com/paritytech/parity/pull/866)
|
||||
- fixed #855 [#864](https://github.com/paritytech/parity/pull/864)
|
||||
- simplified trace from functions, removed clippy warnings [#862](https://github.com/paritytech/parity/pull/862)
|
||||
- Update deprecated HashDB methods in docs. [#857](https://github.com/paritytech/parity/pull/857)
|
||||
- refactored loading transaction json tests [#853](https://github.com/paritytech/parity/pull/853)
|
||||
- reorganised price info lookup [#852](https://github.com/paritytech/parity/pull/852)
|
||||
- Publish locally-made transactions to peers. [#850](https://github.com/paritytech/parity/pull/850)
|
||||
- Add generalbeck's token [#847](https://github.com/paritytech/parity/pull/847)
|
||||
- Fix response for mining. [#846](https://github.com/paritytech/parity/pull/846)
|
||||
- USD-based pricing of gas. [#843](https://github.com/paritytech/parity/pull/843)
|
||||
- Parity can accept older work packages [#811](https://github.com/paritytech/parity/pull/811)
|
||||
- Caching for computing seed hashes (#541) [#841](https://github.com/paritytech/parity/pull/841)
|
||||
- checking transaction queue for pending transaction [#838](https://github.com/paritytech/parity/pull/838)
|
||||
- refactored loading of state tests [#817](https://github.com/paritytech/parity/pull/817)
|
||||
- tests for deserialization of transaction from issue #835 [#837](https://github.com/paritytech/parity/pull/837)
|
||||
- unlocks with no expiration [on top of 833] [#834](https://github.com/paritytech/parity/pull/834)
|
||||
- Unlock accounts on CLI. [#833](https://github.com/paritytech/parity/pull/833)
|
||||
- Make BlockNumber optional, fix eth_call [#829](https://github.com/paritytech/parity/pull/829)
|
||||
- Test socket to common test code (ethcore-devtools) [#831](https://github.com/paritytech/parity/pull/831)
|
||||
- Use network id for the web3_net_version return. [#822](https://github.com/paritytech/parity/pull/822)
|
||||
- json-rpc web3_sha3 [#824](https://github.com/paritytech/parity/pull/824)
|
||||
- remove some unused files [#819](https://github.com/paritytech/parity/pull/819)
|
||||
- debug symbols for master/beta [#818](https://github.com/paritytech/parity/pull/818)
|
||||
- Syntax helpers for IPC RPC [#809](https://github.com/paritytech/parity/pull/809)
|
||||
- refactored loading of execution tests [#803](https://github.com/paritytech/parity/pull/803)
|
||||
- Rustfmt.toml [#805](https://github.com/paritytech/parity/pull/805)
|
||||
- install-partiy runs brew reinstall parity on osx [#810](https://github.com/paritytech/parity/pull/810)
|
||||
- Fix mining from spinning [#807](https://github.com/paritytech/parity/pull/807)
|
||||
374
docs/CHANGELOG-1.2.md
Normal file
374
docs/CHANGELOG-1.2.md
Normal file
@@ -0,0 +1,374 @@
|
||||
## Parity [v1.2.4](https://github.com/paritytech/parity/releases/tag/v1.2.4) (2016-08-09)
|
||||
|
||||
Parity 1.2.4 Is a maintenance release that fixes a [few](https://github.com/paritytech/parity/pull/1888/commits) issues related to mining and peer synchronization.
|
||||
This release is marked as stable.
|
||||
|
||||
- Backports for beta [#1888](https://github.com/paritytech/parity/pull/1888)
|
||||
- BETA: fixed trace_transaction crash when block contained suicide [#1782](https://github.com/paritytech/parity/pull/1782)
|
||||
|
||||
## Parity [v1.2.3](https://github.com/paritytech/parity/releases/tag/v1.2.3) (2016-07-31)
|
||||
|
||||
Parity 1.2.3 is a patch release that addresses network stability issues for both Ethereum HF and Ethereum classic chains and brings a few changes to the transaction tracing API.
|
||||
|
||||
#### Tracing API changes
|
||||
- Added tracing for `CALLCODE`, `DELEGATECALL` and `SUICIDE`
|
||||
- `trace_call` returns traces in flat format
|
||||
- Added 2 new methods: `trace_rawTransaction` and `trace_replayTransaction`
|
||||
|
||||
Note that to continue using tracing features in this version you need to re-sync the blockchain. This can be done by using `parity export $HOME/ethereum-chain-backup.rlp` , deleting the database usually located at `~/.parity/906a34e69aec8c0d` followed by `parity import $HOME/ethereum-chain-backup.rlp`.
|
||||
|
||||
- [beta] Updating UI [#1778](https://github.com/paritytech/parity/pull/1778)
|
||||
- tracing backport [#1770](https://github.com/paritytech/parity/pull/1770)
|
||||
- Backport commits to beta [#1763](https://github.com/paritytech/parity/pull/1763)
|
||||
- Deadlock on incoming connection (#1672) [#1675](https://github.com/paritytech/parity/pull/1675)
|
||||
- [BETA] Removed DAO soft fork traces [#1640](https://github.com/paritytech/parity/pull/1640)
|
||||
|
||||
|
||||
## Parity [v1.2.2](https://github.com/paritytech/parity/releases/tag/v1.2.2) (2016-07-16)
|
||||
|
||||
#### New
|
||||
- DAO hard-fork.
|
||||
|
||||
DAO hard-fork implementation conforms to the [specification](https://blog.slock.it/hard-fork-specification-24b889e70703) and is enabled by default.
|
||||
|
||||
#### Changed
|
||||
- `--reseal-on-txs` defaults to `own`.
|
||||
- DAO soft-fork support has been removed along with related command line options.
|
||||
|
||||
#### Resolved issues
|
||||
- `--db-cache-size` consuming too much memory.
|
||||
`eth_getWork` RPC response additionally includes the block number.
|
||||
- Skipping transactions with invalid nonces when pushing to block.
|
||||
- Update sealing just once when externally importing many blocks (#1541).
|
||||
- Transaction tracing skipping simple transactions (#1606).
|
||||
- Other small fixes and improvements.
|
||||
|
||||
Full changelog
|
||||
|
||||
- DAO hard-fork (#1483) [#1636](https://github.com/paritytech/parity/pull/1636)
|
||||
- Backports for beta [#1628](https://github.com/paritytech/parity/pull/1628)
|
||||
- don't batch best block for branches (#1623) [#1626](https://github.com/paritytech/parity/pull/1626)
|
||||
- Merge bugfixes from master to beta [#1605](https://github.com/paritytech/parity/pull/1605)
|
||||
- (BETA) using block options cache instead of general cache for rocksdb [#1613](https://github.com/paritytech/parity/pull/1613)
|
||||
- Backport sealing fixes to beta [#1583](https://github.com/paritytech/parity/pull/1583)
|
||||
- v1.2.2 in beta [#1581](https://github.com/paritytech/parity/pull/1581)
|
||||
- Skipping transactions with invalid nonces when pushing to block. (#1545) [#1547](https://github.com/paritytech/parity/pull/1547)
|
||||
|
||||
|
||||
## Parity [v1.2.1](https://github.com/paritytech/parity/releases/tag/v1.2.1) (2016-07-01)
|
||||
|
||||
#### New
|
||||
- Options for more precise mining tuning (see below).
|
||||
- Informative notification when block mined.
|
||||
- HTTP signal on new work-package.
|
||||
- Optimised database insertion for self-mined blocks.
|
||||
- Short-circuit for local transaction gas-price approval.
|
||||
- A number of issues related to mining have been fixed.
|
||||
|
||||
##### Mining options
|
||||
- `--author` is now required for mining.
|
||||
- `--reseal-on-txs` Specify which transactions should force the node to reseal a block. By default parity updates the seal on incoming transactions to reduce transaction latency. Set this option to `none` to force updates on new blocks only.
|
||||
- `--reseal-min-period` Can be used to control how often a new pending block is generated if `none` is not selected on prior option.
|
||||
- `--work-queue-size` Controls how many pending blocks to keep in memory.
|
||||
- `--relay-set` Can be used to enable more strict transaction verification.
|
||||
- `--remove-solved` Move solved blocks from the work package queue instead of cloning them. This gives a slightly faster import speed, but means that extra solutions submitted for the same work package will go unused.
|
||||
- `--notify-work` Accepts a list of URLs that will receive a POST request when new work package is available. The body of the POST message is JSON encoded and has the same format as `eth_getWork` RPC response.
|
||||
|
||||
##### RPC
|
||||
|
||||
`eth_getWork` RPC response additionally includes the block number.
|
||||
|
||||
##### DAO soft-fork
|
||||
|
||||
DAO soft-fork control options have been replaced by the single `--fork` option which disables the soft-fork by default.
|
||||
|
||||
#### Changes
|
||||
|
||||
- v1.2.1 in beta [#1492](https://github.com/paritytech/parity/pull/1492)
|
||||
- (BETA) add artifacts [#1420](https://github.com/paritytech/parity/pull/1420)
|
||||
|
||||
## Parity [v1.2.0: "Security"](https://github.com/paritytech/parity/releases/tag/v1.2.0) (2016-06-24)
|
||||
|
||||
[Blog post](https://blog.parity.io/announcing-parity-1-2/)
|
||||
|
||||
#### New
|
||||
|
||||
- Transaction signing UI.
|
||||
- IPC/RPC module.
|
||||
- Optimised mining support.
|
||||
- Windows build.
|
||||
- DAO soft-fork support.
|
||||
|
||||
##### Transaction signing UI
|
||||
|
||||
This is a new framework for signing transactions. It fulfills three requirements:
|
||||
- You should never have to type your passwords into a Dapp.
|
||||
- No Javascript code should ever hold a secret.
|
||||
- No transaction should ever be signed without the consent of the user.
|
||||
|
||||
The feature is enabled through the `--signer` flag. When enabled, the user must ensure at least one "Signer UI" is set-up for managing transaction confirmation. There are two such UIs available; one through a Google Chrome Extension, separately installable and the second through a special web page hosted locally. Set-up must be done once for each such UI, through copying and pasting a token from the output console of Parity into the UI. Specific instructions are given in the UI.
|
||||
|
||||
From this point on, no transaction may ever be signed by Parity except through one of these allowed Signer UIs, and no password should ever be entered anywhere else.
|
||||
|
||||
##### IPC/RPC module and Mist/Geth compatibility
|
||||
|
||||
Should be started with `--geth` to ensure Mist compatibility.
|
||||
|
||||
##### Optimised mining support
|
||||
|
||||
Numerous improvements and optimisations have been added to our mining implementation. A large "active queue" ensures that late-included transactions are included in the mined block without sacrificing older results from latent-reported `ethminer` results.
|
||||
|
||||
##### Windows build
|
||||
|
||||
We're happy to announce full Windows support with 1.2!
|
||||
|
||||
##### Soft-fork
|
||||
|
||||
This release includes support for the proposed [DAO soft-fork](https://docs.google.com/document/d/10RktunzjKNfp6Y8Cu4EhR5V9IqxEZq42LU126EYhWY4/pub). Upon upgrade, all mining nodes can vote for or against the soft fork (this is done through altering the block gas limit; a gas limit of at most 4M results in the soft-fork being triggered).
|
||||
|
||||
By default, nodes vote "for" the DAO soft-fork (and try to reduce the gas limit to 3.1M). To vote against the soft-fork (keeping it at 4.7M), run with `--dont-help-rescue-dao`. Not upgrading is not recommended; if the majority votes with a soft-fork, an upgrade will be necessary to mine on the correct chain.
|
||||
|
||||
#### Changed
|
||||
- Fast pruning method is now default for a fresh sync.
|
||||
- Web UI renamed to Dapps UI.
|
||||
- JSONRPC and Dapps UI enabled by default.
|
||||
- CLI options ending `-off` renamed to GNU-consistent prefix `--no-`.
|
||||
- Dynamic gas-pricing (data feed and statistical techniques used to determine optimum gas prices).
|
||||
|
||||
Full changes:
|
||||
|
||||
- Signer enabled by default for UI [#1417](https://github.com/paritytech/parity/pull/1417)
|
||||
- Remove experimental pruning options. [#1415](https://github.com/paritytech/parity/pull/1415)
|
||||
- Fixing interface and port for parity ui [#1414](https://github.com/paritytech/parity/pull/1414)
|
||||
- Configurable gas limit cap. [#1405](https://github.com/paritytech/parity/pull/1405)
|
||||
- Bumping TopBar, Minimal SignerUI and wallet [#1413](https://github.com/paritytech/parity/pull/1413)
|
||||
- Sync: Update highest block for progress reporting [#1411](https://github.com/paritytech/parity/pull/1411)
|
||||
- Tweaked CLI options for the release [#1407](https://github.com/paritytech/parity/pull/1407)
|
||||
- Further rocksdb tuning [#1409](https://github.com/paritytech/parity/pull/1409)
|
||||
- Fixing jit compilation [#1406](https://github.com/paritytech/parity/pull/1406)
|
||||
- Bump clippy [#1403](https://github.com/paritytech/parity/pull/1403)
|
||||
- Shortcut SF condition when canon known [#1401](https://github.com/paritytech/parity/pull/1401)
|
||||
- Additional assertions for internal state of queue [#1402](https://github.com/paritytech/parity/pull/1402)
|
||||
- Replace deprecated hashdb trait names [#1394](https://github.com/paritytech/parity/pull/1394)
|
||||
- rpc api by default for ipc [#1400](https://github.com/paritytech/parity/pull/1400)
|
||||
- Ensure judging the SF trigger by relative branch [#1399](https://github.com/paritytech/parity/pull/1399)
|
||||
- Signer with unlocked account working as expected. [#1398](https://github.com/paritytech/parity/pull/1398)
|
||||
- Make --signer default. [#1392](https://github.com/paritytech/parity/pull/1392)
|
||||
- Presale wallet [#1376](https://github.com/paritytech/parity/pull/1376)
|
||||
- Removing signer connection limit [#1396](https://github.com/paritytech/parity/pull/1396)
|
||||
- Optional gas price in transactions come from statistics [#1388](https://github.com/paritytech/parity/pull/1388)
|
||||
- Update README.md with cargo install [ci-skip] [#1389](https://github.com/paritytech/parity/pull/1389)
|
||||
- Fixing possible overflow during multiplication [#1381](https://github.com/paritytech/parity/pull/1381)
|
||||
- Update SF to latest spec [#1386](https://github.com/paritytech/parity/pull/1386)
|
||||
- Sync optimization [#1385](https://github.com/paritytech/parity/pull/1385)
|
||||
- Fixing order of if statements to avoid overflows. [#1384](https://github.com/paritytech/parity/pull/1384)
|
||||
- New topbar & signer UI [#1383](https://github.com/paritytech/parity/pull/1383)
|
||||
- Install trigger for DAO-rescue soft-fork. [#1329](https://github.com/paritytech/parity/pull/1329)
|
||||
- Rocksdb flush/compact limit [#1375](https://github.com/paritytech/parity/pull/1375)
|
||||
- CentOS Dockerfile [#1377](https://github.com/paritytech/parity/pull/1377)
|
||||
- RPC method to return number of unconfirmed transactions... [#1371](https://github.com/paritytech/parity/pull/1371)
|
||||
- bump jsonrpc-http-server [#1369](https://github.com/paritytech/parity/pull/1369)
|
||||
- Fix lock order when updating sealing [#1364](https://github.com/paritytech/parity/pull/1364)
|
||||
- Update sealing on new transactions [#1365](https://github.com/paritytech/parity/pull/1365)
|
||||
- Fixed panic on aborted connection [#1370](https://github.com/paritytech/parity/pull/1370)
|
||||
- importing presale wallet [#1368](https://github.com/paritytech/parity/pull/1368)
|
||||
- Set default database file size large enough [#1363](https://github.com/paritytech/parity/pull/1363)
|
||||
- Reserved peers rpc API [#1360](https://github.com/paritytech/parity/pull/1360)
|
||||
- Fixing replacing transaction with lower gas_price result. [#1343](https://github.com/paritytech/parity/pull/1343)
|
||||
- fixed migration of empty pruning dir [#1362](https://github.com/paritytech/parity/pull/1362)
|
||||
- Transaction processing queue [#1335](https://github.com/paritytech/parity/pull/1335)
|
||||
- Fixing last nonce values in case transaction is replaced [#1359](https://github.com/paritytech/parity/pull/1359)
|
||||
- docopt is an optional dependency of ethkey and ethstore [#1358](https://github.com/paritytech/parity/pull/1358)
|
||||
- Fixing clippy warnings [#1354](https://github.com/paritytech/parity/pull/1354)
|
||||
- Reduce locking when syncing [#1357](https://github.com/paritytech/parity/pull/1357)
|
||||
- removed unnecessary logs [#1356](https://github.com/paritytech/parity/pull/1356)
|
||||
- Updating parity-dapps [#1353](https://github.com/paritytech/parity/pull/1353)
|
||||
- moved keystore tests files from util to ethstore [#1352](https://github.com/paritytech/parity/pull/1352)
|
||||
- removed redundant bigint deps [#1351](https://github.com/paritytech/parity/pull/1351)
|
||||
- Reopen "reserved peers and reserved-only flag" [#1350](https://github.com/paritytech/parity/pull/1350)
|
||||
- Configurable rocksdb cache size [#1348](https://github.com/paritytech/parity/pull/1348)
|
||||
- Fixing future order and errors when reaching limit. [#1346](https://github.com/paritytech/parity/pull/1346)
|
||||
- Removing priority on local transactions [#1342](https://github.com/paritytech/parity/pull/1342)
|
||||
- Revert "Reserved peers, reserved-only flag" [#1349](https://github.com/paritytech/parity/pull/1349)
|
||||
- Sync attack defense: Deactivate peers on invalid block bodies [#1345](https://github.com/paritytech/parity/pull/1345)
|
||||
- Reserved peers, reserved-only flag [#1347](https://github.com/paritytech/parity/pull/1347)
|
||||
- CI for ethkey and ethstore [#1341](https://github.com/paritytech/parity/pull/1341)
|
||||
- Fixed empty block body composition [#1340](https://github.com/paritytech/parity/pull/1340)
|
||||
- Provide a signer UI token by default. [#1334](https://github.com/paritytech/parity/pull/1334)
|
||||
- docker uses rustup, fixes #1337 [#1344](https://github.com/paritytech/parity/pull/1344)
|
||||
- Fixed network service dispose [#1339](https://github.com/paritytech/parity/pull/1339)
|
||||
- Sync: Cache last sync round block parents [#1331](https://github.com/paritytech/parity/pull/1331)
|
||||
- secret store separated from util [#1304](https://github.com/paritytech/parity/pull/1304)
|
||||
- --geth prevent getTransactionReceipt from using pending. [#1325](https://github.com/paritytech/parity/pull/1325)
|
||||
- Fixing locks order in miner. [#1328](https://github.com/paritytech/parity/pull/1328)
|
||||
- Update default gas limit, rename field [#1324](https://github.com/paritytech/parity/pull/1324)
|
||||
- Use constants for DatabaseConfig [#1318](https://github.com/paritytech/parity/pull/1318)
|
||||
- Fixing clippy warnings [#1321](https://github.com/paritytech/parity/pull/1321)
|
||||
- Bumping topbar. Fixing ws server closing when suspending [#1312](https://github.com/paritytech/parity/pull/1312)
|
||||
- Syncing fix [#1320](https://github.com/paritytech/parity/pull/1320)
|
||||
- Filling-in optional fields of TransactionRequest... [#1305](https://github.com/paritytech/parity/pull/1305)
|
||||
- Removing MakerOTC and DAO dapps [#1319](https://github.com/paritytech/parity/pull/1319)
|
||||
- Disabling ethcore_set* APIs by default (+ Status page update) [#1315](https://github.com/paritytech/parity/pull/1315)
|
||||
- fixed #1180 [#1282](https://github.com/paritytech/parity/pull/1282)
|
||||
- Network start/stop [#1313](https://github.com/paritytech/parity/pull/1313)
|
||||
- Additional logging for own transactions in queue [#1311](https://github.com/paritytech/parity/pull/1311)
|
||||
- DAO Rescue soft fork [#1309](https://github.com/paritytech/parity/pull/1309)
|
||||
- Appveyor config for windows build+installer [#1302](https://github.com/paritytech/parity/pull/1302)
|
||||
- Key load avoid warning [#1303](https://github.com/paritytech/parity/pull/1303)
|
||||
- More meaningful errors when sending transaction [#1290](https://github.com/paritytech/parity/pull/1290)
|
||||
- Gas price statistics. [#1291](https://github.com/paritytech/parity/pull/1291)
|
||||
- Fix read-ahead bug. [#1298](https://github.com/paritytech/parity/pull/1298)
|
||||
- firewall rules for windows installer [#1297](https://github.com/paritytech/parity/pull/1297)
|
||||
- x64 program files path for installer [#1296](https://github.com/paritytech/parity/pull/1296)
|
||||
- Fixed loosing peers on incoming connections. [#1293](https://github.com/paritytech/parity/pull/1293)
|
||||
- fixed #1261, overflow when calculating work [#1283](https://github.com/paritytech/parity/pull/1283)
|
||||
- snappy and minor block compression [#1286](https://github.com/paritytech/parity/pull/1286)
|
||||
- clarify build instructions [#1287](https://github.com/paritytech/parity/pull/1287)
|
||||
- fixed #1255 [#1280](https://github.com/paritytech/parity/pull/1280)
|
||||
- bump rust-crypto [#1289](https://github.com/paritytech/parity/pull/1289)
|
||||
- Security audit issues fixed [#1279](https://github.com/paritytech/parity/pull/1279)
|
||||
- Fixing origin/host validation [#1273](https://github.com/paritytech/parity/pull/1273)
|
||||
- windows installer + parity start ui cli option [#1284](https://github.com/paritytech/parity/pull/1284)
|
||||
- ipc lib version bump [#1285](https://github.com/paritytech/parity/pull/1285)
|
||||
- Syncing improvements [#1274](https://github.com/paritytech/parity/pull/1274)
|
||||
- removed redundant if condition [#1270](https://github.com/paritytech/parity/pull/1270)
|
||||
- Naive chunk creation, snapshotting [#1263](https://github.com/paritytech/parity/pull/1263)
|
||||
- Fixing generating new token while another parity instance is running. [#1272](https://github.com/paritytech/parity/pull/1272)
|
||||
- README: rustup and windows instructions [#1266](https://github.com/paritytech/parity/pull/1266)
|
||||
- Windows build [#1253](https://github.com/paritytech/parity/pull/1253)
|
||||
- removed try_seal from MiningBlockChainClient [#1262](https://github.com/paritytech/parity/pull/1262)
|
||||
- simplified block opening [#1232](https://github.com/paritytech/parity/pull/1232)
|
||||
- Clippy bump [#1259](https://github.com/paritytech/parity/pull/1259)
|
||||
- Fixing uint ASM macros compilation [#1258](https://github.com/paritytech/parity/pull/1258)
|
||||
- Signer port returned from RPC + Topbar showing count of unconfirmed transactions. [#1252](https://github.com/paritytech/parity/pull/1252)
|
||||
- codegen - avoid unwraps leading to compilation crash [#1250](https://github.com/paritytech/parity/pull/1250)
|
||||
- Dapps bump [#1257](https://github.com/paritytech/parity/pull/1257)
|
||||
- Windows named pipes [#1254](https://github.com/paritytech/parity/pull/1254)
|
||||
- remove unsafety from util/hash.rs and util/bigint/uint.rs [#1236](https://github.com/paritytech/parity/pull/1236)
|
||||
- Fixing CORS settings for special values: * & null. [#1247](https://github.com/paritytech/parity/pull/1247)
|
||||
- JSONRPC test strings avoid using \ char [#1246](https://github.com/paritytech/parity/pull/1246)
|
||||
- Tests for JSON serialisation of statediff/vmtrace [#1241](https://github.com/paritytech/parity/pull/1241)
|
||||
- Bumping Dapps & TopBar to newest version. [#1245](https://github.com/paritytech/parity/pull/1245)
|
||||
- keys import [#1240](https://github.com/paritytech/parity/pull/1240)
|
||||
- Splitting RPC Apis into more fine-grained sets [#1234](https://github.com/paritytech/parity/pull/1234)
|
||||
- Refactor triedb constructors to error on invalid state root [#1230](https://github.com/paritytech/parity/pull/1230)
|
||||
- Signer RPC method to check if signer is enabled [#1238](https://github.com/paritytech/parity/pull/1238)
|
||||
- Fixing signer behaviour when confirming transaction with wrong password. [#1237](https://github.com/paritytech/parity/pull/1237)
|
||||
- SystemUIs authorization [#1233](https://github.com/paritytech/parity/pull/1233)
|
||||
- IPC path for tesetnet with --geth compatibility [#1231](https://github.com/paritytech/parity/pull/1231)
|
||||
- Transaction tracing for eth_call [#1210](https://github.com/paritytech/parity/pull/1210)
|
||||
- Removing compilation warnings [#1227](https://github.com/paritytech/parity/pull/1227)
|
||||
- Allowing connections only from chrome-extension and self-hosted client [#1226](https://github.com/paritytech/parity/pull/1226)
|
||||
- Clippy bump & fixing warnings [#1219](https://github.com/paritytech/parity/pull/1219)
|
||||
- Bumping serde & syntex [#1216](https://github.com/paritytech/parity/pull/1216)
|
||||
- Minimal Signer UI (System UI) exposed over websockets. [#1211](https://github.com/paritytech/parity/pull/1211)
|
||||
- Switch RPC namespace form ethcore_ to trace_ [#1208](https://github.com/paritytech/parity/pull/1208)
|
||||
- Verify the state root exists before creating a State [#1217](https://github.com/paritytech/parity/pull/1217)
|
||||
- Integrate state diffing into the ethcore JSONRPC [#1206](https://github.com/paritytech/parity/pull/1206)
|
||||
- Updating topbar to latest version [#1220](https://github.com/paritytech/parity/pull/1220)
|
||||
- Loading local Dapps from FS. [#1214](https://github.com/paritytech/parity/pull/1214)
|
||||
- Ipc serialization & protocol fixes [#1188](https://github.com/paritytech/parity/pull/1188)
|
||||
- Have Ext::ret take self by value [#1187](https://github.com/paritytech/parity/pull/1187)
|
||||
- Simple WebSockets notification about new request [#1202](https://github.com/paritytech/parity/pull/1202)
|
||||
- Removing leftovers of ethminer [#1207](https://github.com/paritytech/parity/pull/1207)
|
||||
- fixed #1204 [#1205](https://github.com/paritytech/parity/pull/1205)
|
||||
- VM tracing and JSON RPC endpoint for it. [#1169](https://github.com/paritytech/parity/pull/1169)
|
||||
- devtools helpers extended [#1186](https://github.com/paritytech/parity/pull/1186)
|
||||
- Networking refactoring [#1172](https://github.com/paritytech/parity/pull/1172)
|
||||
- Client & Miner refactoring [#1195](https://github.com/paritytech/parity/pull/1195)
|
||||
- update readme [#1201](https://github.com/paritytech/parity/pull/1201)
|
||||
- Simple signing queue, confirmation APIs exposed in signer WebSockets. [#1182](https://github.com/paritytech/parity/pull/1182)
|
||||
- Using ordered hashmap to keep the order of dapps on home screen [#1199](https://github.com/paritytech/parity/pull/1199)
|
||||
- Disabling `ethcore` by default, adding x-frame-options header to dapps. [#1197](https://github.com/paritytech/parity/pull/1197)
|
||||
- transaction count verifier tests [#1196](https://github.com/paritytech/parity/pull/1196)
|
||||
- expunge x! and xx! from the codebase [#1192](https://github.com/paritytech/parity/pull/1192)
|
||||
- Database service upgrade (from the ipc branch) [#1185](https://github.com/paritytech/parity/pull/1185)
|
||||
- stop eth_syncing from returning true forever [#1181](https://github.com/paritytech/parity/pull/1181)
|
||||
- Sync fixes and tweaks [#1164](https://github.com/paritytech/parity/pull/1164)
|
||||
- Exposing RPC over Signer WebSockets [#1167](https://github.com/paritytech/parity/pull/1167)
|
||||
- implement missing rpc methods and tests [#1171](https://github.com/paritytech/parity/pull/1171)
|
||||
- json ipc server version bump [#1170](https://github.com/paritytech/parity/pull/1170)
|
||||
- Updated dependencies for windows build [#1173](https://github.com/paritytech/parity/pull/1173)
|
||||
- Framework for improved RPC unit tests [#1141](https://github.com/paritytech/parity/pull/1141)
|
||||
- remove all possible unsafe code in crypto [#1168](https://github.com/paritytech/parity/pull/1168)
|
||||
- Base for Signer Websockets server [#1158](https://github.com/paritytech/parity/pull/1158)
|
||||
- Write queue to speed-up db ipc [#1160](https://github.com/paritytech/parity/pull/1160)
|
||||
- Fixing few clippy warnings [#1163](https://github.com/paritytech/parity/pull/1163)
|
||||
- Change eth_signAndSendTransaction to personal_SignAndSendTransaction [#1154](https://github.com/paritytech/parity/pull/1154)
|
||||
- Support "earliest" and specific block parameters in RPC where possible [#1149](https://github.com/paritytech/parity/pull/1149)
|
||||
- migration fixes [#1155](https://github.com/paritytech/parity/pull/1155)
|
||||
- Empty trusted signer crate with it's general purpose described. [#1150](https://github.com/paritytech/parity/pull/1150)
|
||||
- More bootnodes for morden. [#1153](https://github.com/paritytech/parity/pull/1153)
|
||||
- move existing rpc tests into mocked module [#1151](https://github.com/paritytech/parity/pull/1151)
|
||||
- Bloomchain [#1014](https://github.com/paritytech/parity/pull/1014)
|
||||
- Renaming dapps repos. Updating dapps [#1142](https://github.com/paritytech/parity/pull/1142)
|
||||
- fixed pending transactions [#1147](https://github.com/paritytech/parity/pull/1147)
|
||||
- Basic benches to provide metrics for ipc optimizations [#1145](https://github.com/paritytech/parity/pull/1145)
|
||||
- Fixing clippy warnings [#1148](https://github.com/paritytech/parity/pull/1148)
|
||||
- correct signature of SecTrieDB::raw_mut [#1143](https://github.com/paritytech/parity/pull/1143)
|
||||
- Merge to master and start hypervisor for import/export [#1138](https://github.com/paritytech/parity/pull/1138)
|
||||
- Bumping clippy. Fixing warnings [#1139](https://github.com/paritytech/parity/pull/1139)
|
||||
- Display progress when importing [#1136](https://github.com/paritytech/parity/pull/1136)
|
||||
- foundation of simple db migration [#1128](https://github.com/paritytech/parity/pull/1128)
|
||||
- Fixpending [#1074](https://github.com/paritytech/parity/pull/1074)
|
||||
- Sync: Propagate uncles and fix status reporting [#1134](https://github.com/paritytech/parity/pull/1134)
|
||||
- Coloured, padding logging. [#1133](https://github.com/paritytech/parity/pull/1133)
|
||||
- Importing [#1132](https://github.com/paritytech/parity/pull/1132)
|
||||
- Have `die_with_error` use `fmt::Display` rather than Debug [#1116](https://github.com/paritytech/parity/pull/1116)
|
||||
- Exporting [#1129](https://github.com/paritytech/parity/pull/1129)
|
||||
- Sign and send transaction [#1124](https://github.com/paritytech/parity/pull/1124)
|
||||
- Fixing unused imports warnings [#1125](https://github.com/paritytech/parity/pull/1125)
|
||||
- Adding info messages on mined blocks [#1127](https://github.com/paritytech/parity/pull/1127)
|
||||
- Fix styling - don't mix spaces with tabs!!! [#1123](https://github.com/paritytech/parity/pull/1123)
|
||||
- Fix is_syncing so it's false as long as the update is trivial. [#1122](https://github.com/paritytech/parity/pull/1122)
|
||||
- Relock unlocked accounts after first use [#1120](https://github.com/paritytech/parity/pull/1120)
|
||||
- Avoid importing keys into wrong place. [#1119](https://github.com/paritytech/parity/pull/1119)
|
||||
- Implement receipt's gasUsed field [#1118](https://github.com/paritytech/parity/pull/1118)
|
||||
- New dapps & query parameter handling [#1113](https://github.com/paritytech/parity/pull/1113)
|
||||
- pretty print trace error [#1098](https://github.com/paritytech/parity/pull/1098)
|
||||
- New syncing strategy [#1095](https://github.com/paritytech/parity/pull/1095)
|
||||
- ethcore-db crate [#1097](https://github.com/paritytech/parity/pull/1097)
|
||||
- Fix the default for pruning. [#1107](https://github.com/paritytech/parity/pull/1107)
|
||||
- Make Id/ID and db/Db/DB usage consistent [#1105](https://github.com/paritytech/parity/pull/1105)
|
||||
- Miner holds it's own copy of spec/engine [#1091](https://github.com/paritytech/parity/pull/1091)
|
||||
- Apps listing API & Home webapp. [#1101](https://github.com/paritytech/parity/pull/1101)
|
||||
- CLI option for using JITEVM [#1103](https://github.com/paritytech/parity/pull/1103)
|
||||
- Fix up the seal fields in RPC output [#1096](https://github.com/paritytech/parity/pull/1096)
|
||||
- Fixing some warnings [#1102](https://github.com/paritytech/parity/pull/1102)
|
||||
- fixed incorrect decoding of header seal_fields. added tests. #1090 [#1094](https://github.com/paritytech/parity/pull/1094)
|
||||
- Bumping Clippy [#1093](https://github.com/paritytech/parity/pull/1093)
|
||||
- Injectable topbar support. [#1092](https://github.com/paritytech/parity/pull/1092)
|
||||
- New syncing part 1: Block collection [#1088](https://github.com/paritytech/parity/pull/1088)
|
||||
- Moving all Client public API types to separate mod & binary serialization codegen for that mod [#1051](https://github.com/paritytech/parity/pull/1051)
|
||||
- Subdomains support in content server (webapps server). [#1082](https://github.com/paritytech/parity/pull/1082)
|
||||
- Fix uncle getter [#1087](https://github.com/paritytech/parity/pull/1087)
|
||||
- Provide fallback for usd-per-eth option when offline. [#1085](https://github.com/paritytech/parity/pull/1085)
|
||||
- path centralized [#1083](https://github.com/paritytech/parity/pull/1083)
|
||||
- Limiting result of the execution to execution-specific errors [#1071](https://github.com/paritytech/parity/pull/1071)
|
||||
- Configurable keys security [#1080](https://github.com/paritytech/parity/pull/1080)
|
||||
- comma delimeting multiple cors headers [#1078](https://github.com/paritytech/parity/pull/1078)
|
||||
- Update error message [#1081](https://github.com/paritytech/parity/pull/1081)
|
||||
- Updating dapp-wallet [#1076](https://github.com/paritytech/parity/pull/1076)
|
||||
- Fixed connecting to local nodes on startup [#1070](https://github.com/paritytech/parity/pull/1070)
|
||||
- Validate signature in Tx queue [#1068](https://github.com/paritytech/parity/pull/1068)
|
||||
- moving deps to ethcore/hyper and bumping jsonrpc-http-server version [#1067](https://github.com/paritytech/parity/pull/1067)
|
||||
- Updating status page. Bringing back wallet [#1064](https://github.com/paritytech/parity/pull/1064)
|
||||
- Fix --geth IPC for MacOS. [#1062](https://github.com/paritytech/parity/pull/1062)
|
||||
- Fixing formatter for defaultExtraData [#1060](https://github.com/paritytech/parity/pull/1060)
|
||||
- --geth IPC compatibility [#1059](https://github.com/paritytech/parity/pull/1059)
|
||||
- Moving dependencies to ethcore & uniforming syntax libs through all crates [#1050](https://github.com/paritytech/parity/pull/1050)
|
||||
- update hyper branch mio [#1054](https://github.com/paritytech/parity/pull/1054)
|
||||
- IPC lib update [#1047](https://github.com/paritytech/parity/pull/1047)
|
||||
- Updating hyper-mio revision [#1048](https://github.com/paritytech/parity/pull/1048)
|
||||
- Bump ipc-lib version [#1046](https://github.com/paritytech/parity/pull/1046)
|
||||
- Tidy up CLI options and make JSONRPC & webapps on by default. [#1045](https://github.com/paritytech/parity/pull/1045)
|
||||
- Fixing clippy warnings [#1044](https://github.com/paritytech/parity/pull/1044)
|
||||
- Fixing RPC modules compatibility [#1041](https://github.com/paritytech/parity/pull/1041)
|
||||
- Fixing hyper-mio revision [#1043](https://github.com/paritytech/parity/pull/1043)
|
||||
- Updating locations of webapp stuff [#1040](https://github.com/paritytech/parity/pull/1040)
|
||||
- JSON-RPC over IPC [#1039](https://github.com/paritytech/parity/pull/1039)
|
||||
- Update nix/mio for ARM [#1036](https://github.com/paritytech/parity/pull/1036)
|
||||
- Basic Authority [#991](https://github.com/paritytech/parity/pull/991)
|
||||
- Prioritizing of local transaction [#1023](https://github.com/paritytech/parity/pull/1023)
|
||||
- Version 1.2 [#1030](https://github.com/paritytech/parity/pull/1030)
|
||||
- Bumping status page [#1033](https://github.com/paritytech/parity/pull/1033)
|
||||
500
docs/CHANGELOG-1.3.md
Normal file
500
docs/CHANGELOG-1.3.md
Normal file
@@ -0,0 +1,500 @@
|
||||
## Parity [v1.3.15](https://github.com/paritytech/parity/releases/tag/v1.3.15) (2016-12-10)
|
||||
|
||||
This patch release fixes an issue with syncing on the Ropsten test network.
|
||||
|
||||
- Backporting to stable [#3793](https://github.com/paritytech/parity/pull/3793)
|
||||
|
||||
## Parity [v1.3.14](https://github.com/paritytech/parity/releases/tag/v1.3.14) (2016-11-25)
|
||||
|
||||
Parity 1.3.14 fixes a few stability issues and adds support for the Ropsten testnet.
|
||||
|
||||
- Backporting to stable [#3616](https://github.com/paritytech/parity/pull/3616)
|
||||
|
||||
## Parity [v1.3.13](https://github.com/paritytech/parity/releases/tag/v1.3.13) (2016-11-18)
|
||||
|
||||
This release fixes an issue with EIP-155 transactions being allowed into the transaction pool.
|
||||
|
||||
- [stable] Check tx signatures before adding to the queue. [#3521](https://github.com/paritytech/parity/pull/3521)
|
||||
- Fix Stable Docker Build [#3479](https://github.com/paritytech/parity/pull/3479)
|
||||
|
||||
## Parity [v1.3.12](https://github.com/paritytech/parity/releases/tag/v1.3.12) (2016-11-16)
|
||||
|
||||
This stable release enables EIP-155/160/161/170 hardfork at block 2675000 (1885000 for test network).
|
||||
|
||||
- [stable] EIP-170 [#3462](https://github.com/paritytech/parity/pull/3462)
|
||||
- #3035 Backport to stable [#3441](https://github.com/paritytech/parity/pull/3441)
|
||||
|
||||
## Parity [v1.3.11](https://github.com/paritytech/parity/releases/tag/v1.3.11) (2016-11-11)
|
||||
|
||||
This is a maintenance release for the stable series to delay the EIP-155/160/161 hard fork transition. **Update from 1.3.10 is mandatory**. It also deprecates and disables the old Parity UI.
|
||||
|
||||
- [stable] Disable HF and UI [#3372](https://github.com/paritytech/parity/pull/3372)
|
||||
- [stable] EIP-155 update with Vitalik's new test vectors (#3166) [#3190](https://github.com/paritytech/parity/pull/3190)
|
||||
- Backport EIP-150 to stable [#2672](https://github.com/paritytech/parity/pull/2672)
|
||||
- Create gitlab-ci.yml for stable [#2517](https://github.com/paritytech/parity/pull/2517)
|
||||
|
||||
## Parity [v1.3.10](https://github.com/paritytech/parity/releases/tag/v1.3.10) (2016-11-04)
|
||||
|
||||
The latest 1.3 series release, now considered stable.
|
||||
|
||||
This includes several additional optimisations and fixes together with provisional support for the upcoming hard fork for EIP155/160/161.
|
||||
|
||||
- Stable branch reset to 1.3.10 [#3156](https://github.com/paritytech/parity/pull/3156)
|
||||
- Backporting to beta [#3149](https://github.com/paritytech/parity/pull/3149)
|
||||
- apply post-consolidation migrations after consolidating (BETA) [#3048](https://github.com/paritytech/parity/pull/3048)
|
||||
- [beta] Fix the brainwallet functionality. (#2994) [#3005](https://github.com/paritytech/parity/pull/3005)
|
||||
- Bumping json-ipc-server [#2989](https://github.com/paritytech/parity/pull/2989)
|
||||
- Backports for 1.3.10 [#2987](https://github.com/paritytech/parity/pull/2987)
|
||||
|
||||
## Parity [v1.3.9](https://github.com/paritytech/parity/releases/tag/v1.3.9) (2016-10-21)
|
||||
|
||||
This release enables EIP-150 hard fork for Ethereum Classic chain and resolves a few stability and performance issues, such as:
|
||||
- Interrupted syncing on the test network.
|
||||
- Block import delays caused by a large number of incoming transactions. A full re-sync is recommended for performance improvement to take effect.
|
||||
|
||||
Full changes:
|
||||
- [beta] Resolve morden fork [#2776](https://github.com/paritytech/parity/pull/2776)
|
||||
- Fixing botched merge [#2767](https://github.com/paritytech/parity/pull/2767)
|
||||
- Backports for beta [#2764](https://github.com/paritytech/parity/pull/2764)
|
||||
- Introduce EIP150 hardfork block for ETC [#2736](https://github.com/paritytech/parity/pull/2736)
|
||||
- [beta] fix issues with no test dir present (#2659) [#2724](https://github.com/paritytech/parity/pull/2724)
|
||||
- [beta] Bumping jsonrpc-http-server [#2715](https://github.com/paritytech/parity/pull/2715)
|
||||
- [beta] Fix migration system, better errors [#2661](https://github.com/paritytech/parity/pull/2661)
|
||||
|
||||
## Parity [v1.3.8](https://github.com/paritytech/parity/releases/tag/v1.3.8) (2016-10-15)
|
||||
|
||||
Parity 1.3.8 is our EIP150 hard-fork compliant release.
|
||||
|
||||
Running this will enact a mild change of the protocol at block number 2,463,000 which should occur on Tuesday 18th October 2016 at approximately 12:20 London time (BST). This change alters the gas prices for a number of operations, mainly centring around i/o intensive Merkle trie lookups (`BALANCE`, `EXTCODESIZE` &c.) and state-trie polluters (`SUICIDE`, `CREATE` and `CALL`). These operations were heavily underpriced, an oversight which lead to the recent degradation of network service. The full details of the alteration are specified in [EIP-150](https://github.com/ethereum/EIPs/issues/150).
|
||||
|
||||
Additionally several issues have been fixed including:
|
||||
- a transaction queue limitation leading to dropped transactions;
|
||||
- a synchronisation issue leading to stalls when syncing;
|
||||
|
||||
And some small features including database performance improvements and additional logging.
|
||||
|
||||
#### Upgrading private chain specification files.
|
||||
|
||||
All the chain specification files now have EIP-150 rules enabled by default. To continue using the chain add the `eip150Transition` key under `Engine/ethash/params` and set it to a future transition block as shown in [this example](https://github.com/paritytech/parity/blob/85eeb3ea6e5e21ad8e5644241edf82eb8069f536/ethcore/res/ethereum/morden.json#L13).
|
||||
|
||||
The key related to homestead transition has been renamed from `frontierCompatibilityModeLimit` to `homesteadTransition`.
|
||||
|
||||
#### Full changes
|
||||
|
||||
- [beta] EIP150.1c [#2599](https://github.com/paritytech/parity/pull/2599)
|
||||
- Remove count limit for local transactions [#2634](https://github.com/paritytech/parity/pull/2634)
|
||||
- Tweak DB and mining defaults [#2598](https://github.com/paritytech/parity/pull/2598)
|
||||
- Revert "Bloom upgrade in beta" [#2635](https://github.com/paritytech/parity/pull/2635)
|
||||
- Bloom upgrade in beta [#2609](https://github.com/paritytech/parity/pull/2609)
|
||||
- Backports to beta [#2628](https://github.com/paritytech/parity/pull/2628)
|
||||
|
||||
## Parity [v1.3.7](https://github.com/paritytech/parity/releases/tag/v1.3.7) (2016-10-12)
|
||||
|
||||
This release contains fixes to reduce memory usage under the DoS attack and improve transaction relay.
|
||||
|
||||
- Configurable history size in beta [#2587](https://github.com/paritytech/parity/pull/2587)
|
||||
- Backports to beta [#2592](https://github.com/paritytech/parity/pull/2592)
|
||||
|
||||
|
||||
## Parity [v1.3.6](https://github.com/paritytech/parity/releases/tag/v1.3.6) (2016-10-11)
|
||||
|
||||
Parity 1.3.6 is another hotfix release to address transaction spam and deal with stability issues. With this release transaction pool gas limit no longer applies to local transactions. Full list of changes is available here:
|
||||
|
||||
- Backports to beta v1.3.6 [#2571](https://github.com/paritytech/parity/pull/2571)
|
||||
- Use global state cache when mining [#2529](https://github.com/paritytech/parity/pull/2529)
|
||||
- Transaction queue limited by gas [#2528](https://github.com/paritytech/parity/pull/2528)
|
||||
|
||||
## Parity [v1.3.5](https://github.com/paritytech/parity/releases/tag/v1.3.5) (2016-10-08)
|
||||
|
||||
1.3.5 is a hotfix release for the transaction propagation issue. Transaction pool limit is now calculated based on the block gas limit.
|
||||
|
||||
- Update appveyor rustc [beta] [#2521](https://github.com/paritytech/parity/pull/2521)
|
||||
- Increase size of transaction queue by default [#2519](https://github.com/paritytech/parity/pull/2519)
|
||||
|
||||
## Parity [v1.3.4](https://github.com/paritytech/parity/releases/tag/v1.3.4) (2016-10-07)
|
||||
|
||||
Parity 1.3.4 release contains more optimizations to internal caching as well as stability improvements.
|
||||
|
||||
It also introduces an ability for miners to choose a transaction ordering strategy:
|
||||
|
||||
--tx-queue-strategy S Prioritization strategy used to order transactions
|
||||
in the queue. S may be:
|
||||
gas - Prioritize txs with low gas limit;
|
||||
gas_price - Prioritize txs with high gas price;
|
||||
gas_factor - Prioritize txs using gas price
|
||||
and gas limit ratio [default: gas_factor].
|
||||
|
||||
- Backport to beta [#2518](https://github.com/paritytech/parity/pull/2518)
|
||||
- [beta] Fixing RPC Filter conversion to EthFilter [#2501](https://github.com/paritytech/parity/pull/2501)
|
||||
- [beta] Using pending block only if is not old [#2515](https://github.com/paritytech/parity/pull/2515)
|
||||
- Backports into beta [#2512](https://github.com/paritytech/parity/pull/2512)
|
||||
- CLI to specify queue ordering strategy [#2494](https://github.com/paritytech/parity/pull/2494)
|
||||
- Fix ethstore opening all key files in the directory at once (BETA) [#2472](https://github.com/paritytech/parity/pull/2472)
|
||||
- Beta backports [#2465](https://github.com/paritytech/parity/pull/2465)
|
||||
- IPC-library dependency fork & bump for beta [#2455](https://github.com/paritytech/parity/pull/2455)
|
||||
|
||||
## Parity [v1.3.3](https://github.com/paritytech/parity/releases/tag/v1.3.3) (2016-10-04)
|
||||
|
||||
1.3.3 is another hotfix release for the DoS attack
|
||||
|
||||
- Jumptable cache [#2435](https://github.com/paritytech/parity/pull/2435)
|
||||
- fix broken beta compilation (backport to beta) [#2414](https://github.com/paritytech/parity/pull/2414)
|
||||
- Run inplace upgrades after version update [#2411](https://github.com/paritytech/parity/pull/2411)
|
||||
|
||||
## Parity [v1.3.2](https://github.com/paritytech/parity/releases/tag/v1.3.2) (2016-09-29)
|
||||
|
||||
This is a hotfix release to address stability and performance issues uncovered during the network DoS attack. Full list of changes is available [here](https://github.com/paritytech/parity/compare/v1.3.1...v1.3.2)
|
||||
|
||||
- Beta Backports [#2396](https://github.com/paritytech/parity/pull/2396)
|
||||
- Fixing penalization in future [#2493](https://github.com/paritytech/parity/pull/2493)
|
||||
- A quick fix for missing tree route blocks [#2400](https://github.com/paritytech/parity/pull/2400)
|
||||
- Cache the fork block header after snapshot restoration [#2391](https://github.com/paritytech/parity/pull/2391)
|
||||
- correct sync memory usage calculation (BETA) [#2386](https://github.com/paritytech/parity/pull/2386)
|
||||
- Accounts bloom [#2357](https://github.com/paritytech/parity/pull/2357)
|
||||
- Disable colors when generating signer token. [#2379](https://github.com/paritytech/parity/pull/2379)
|
||||
- Fixing jit feature compilation [#2376](https://github.com/paritytech/parity/pull/2376)
|
||||
- Clear state cache on sealed block import [#2377](https://github.com/paritytech/parity/pull/2377)
|
||||
- DIV optimization (beta) [#2353](https://github.com/paritytech/parity/pull/2353)
|
||||
- Canonical state cache [#2308](https://github.com/paritytech/parity/pull/2308)
|
||||
- Reorder transaction_by_hash to favour canon search [#2331](https://github.com/paritytech/parity/pull/2331)
|
||||
- Lenient bytes deserialization [#2340](https://github.com/paritytech/parity/pull/2340)
|
||||
- Penalize transactions with gas above gas limit [#2271](https://github.com/paritytech/parity/pull/2271)
|
||||
- Peek transaction queue via RPC [#2270](https://github.com/paritytech/parity/pull/2270)
|
||||
- Handle RLP to string UTF-8 decoding errors (#2217) [#2226](https://github.com/paritytech/parity/pull/2226)
|
||||
- Fixing compilation without default features [beta] [#2207](https://github.com/paritytech/parity/pull/2207)
|
||||
- Avoid cloning clean stuff [beta backport] [#2173](https://github.com/paritytech/parity/pull/2173)
|
||||
- v1.3.2 in beta [#2200](https://github.com/paritytech/parity/pull/2200)
|
||||
|
||||
## Parity [v1.3.1](https://github.com/paritytech/parity/releases/tag/v1.3.1) (2016-09-11)
|
||||
|
||||
1.3.1 includes many [bugfixes](https://github.com/paritytech/parity/commit/2a82fa0a47b00bedfec520a2fdd3cc31aa4ccd8c). Critical ones:
|
||||
- **Chain reorganisation fix** Transaction receipts / traces were sometimes linked with incorrect block hash. Fixed in https://github.com/paritytech/parity/commit/a9587f8965a32c84973c35ce1c8d51d07044143f
|
||||
- **Trace overflow fix** Overflow which occurred during tracing. Fixed in https://github.com/paritytech/parity/pull/1979
|
||||
|
||||
- Backports to beta [#2068](https://github.com/paritytech/parity/pull/2068)
|
||||
- Fixing serde overflow error (#1977) [#2030](https://github.com/paritytech/parity/pull/2030)
|
||||
- Simplified db pruning detection in beta [#1924](https://github.com/paritytech/parity/pull/1924)
|
||||
- Backports to beta [#1919](https://github.com/paritytech/parity/pull/1919)
|
||||
|
||||
## Parity [v1.3.0: "Acuity"](https://github.com/paritytech/parity/releases/tag/v1.3.0) (2016-08-12)
|
||||
|
||||
As well as many bug fixes, 1.3.0 includes a number of important improvements including:
|
||||
- **Optimisations** Heavily optimised block/transaction processing core - up to 2x faster than 1.2 series.
|
||||
- **Database compression** Databases take as much as 30% less storage than before.
|
||||
- **State snapshotting** An installation synchronised from scratch in 1-2 minutes can be made after downloading the 140MB state snapshot. See [the wiki](https://github.com/paritytech/parity/wiki/Getting-Synced) for more information.
|
||||
- **Process isolation** The networking/chain-synchronisation is now a fully independent process.
|
||||
|
||||
Incremental improvements include:
|
||||
- Additional [RPCs](https://github.com/paritytech/parity/wiki/JSONRPC) for transaction tracing, state diffing, VM tracing, asynchronous transaction posting, accounts metadata and message signing.
|
||||
- Improved logging, including for chain reorganisations.
|
||||
- Added a `--fast-and-loose` option for additional speed-ups which can compromise integrity on a dirty shutdown.
|
||||
- Column families to ensure maximal inter-database integrity.
|
||||
- Key naming includes date/time of creation.
|
||||
- Various improvements to networking robustness and performance.
|
||||
- Solidity compilation supported through RPC if `solc` is available.
|
||||
- Various improvements to the miner including [HTTP push work notification](https://github.com/ethcoreparitytech/parity/wiki/Mining#starting-it).
|
||||
|
||||
Full changes:
|
||||
- Bumping Parity UI [#1920](https://github.com/paritytech/parity/pull/1920)
|
||||
- Adding entrypoints to docker images [#1909](https://github.com/paritytech/parity/pull/1909)
|
||||
- Save nodes removed from backing_overlay until commit [#1917](https://github.com/paritytech/parity/pull/1917)
|
||||
- RPC for importing geth keys [#1916](https://github.com/paritytech/parity/pull/1916)
|
||||
- Peers RPC + UI displaying active/connected/max peers [#1915](https://github.com/paritytech/parity/pull/1915)
|
||||
- RPC for deriving address from phrase. [#1912](https://github.com/paritytech/parity/pull/1912)
|
||||
- adjust polling & connection timeouts for ipc [#1910](https://github.com/paritytech/parity/pull/1910)
|
||||
- Don't return deleted nodes that are not yet flushed [#1908](https://github.com/paritytech/parity/pull/1908)
|
||||
- Wallet rpcs [#1898](https://github.com/paritytech/parity/pull/1898)
|
||||
- Fix binary serialization bug [#1907](https://github.com/paritytech/parity/pull/1907)
|
||||
- fixed #1889, .DS_Store is no longer treated as key file [#1892](https://github.com/paritytech/parity/pull/1892)
|
||||
- Purging .derefs, fixing clippy warnings. [#1890](https://github.com/paritytech/parity/pull/1890)
|
||||
- RocksDB version bump [#1904](https://github.com/paritytech/parity/pull/1904)
|
||||
- Fix ipc compilation and add ipc feature to test targets [#1902](https://github.com/paritytech/parity/pull/1902)
|
||||
- Autocreating geth dir if none and geth mode on [#1896](https://github.com/paritytech/parity/pull/1896)
|
||||
- v1.4.0 in master [#1886](https://github.com/paritytech/parity/pull/1886)
|
||||
- Adding more details to miner log [#1891](https://github.com/paritytech/parity/pull/1891)
|
||||
- moved hash.rs to bigint library [#1827](https://github.com/paritytech/parity/pull/1827)
|
||||
- fixed cache_manager lock order [#1877](https://github.com/paritytech/parity/pull/1877)
|
||||
- Fixing miner deadlock [#1885](https://github.com/paritytech/parity/pull/1885)
|
||||
- Updating WS + Increasing token validity [#1882](https://github.com/paritytech/parity/pull/1882)
|
||||
- take snapshot at specified block and slightly better informants [#1873](https://github.com/paritytech/parity/pull/1873)
|
||||
- RPC errors & logs [#1845](https://github.com/paritytech/parity/pull/1845)
|
||||
- Reduce max open files [#1876](https://github.com/paritytech/parity/pull/1876)
|
||||
- Send new block hashes to all peers [#1875](https://github.com/paritytech/parity/pull/1875)
|
||||
- Use UntrustedRlp for block verification [#1872](https://github.com/paritytech/parity/pull/1872)
|
||||
- Update cache usage on commiting block info [#1871](https://github.com/paritytech/parity/pull/1871)
|
||||
- Validating conversion U256->usize when doing gas calculation (for 32bits) [#1870](https://github.com/paritytech/parity/pull/1870)
|
||||
- Sync to peers with confirmed fork block only [#1863](https://github.com/paritytech/parity/pull/1863)
|
||||
- miner and client take spec reference [#1853](https://github.com/paritytech/parity/pull/1853)
|
||||
- Unlock account with timeout for geth compatibility [#1854](https://github.com/paritytech/parity/pull/1854)
|
||||
- Fixed reported max height and transaction propagation [#1852](https://github.com/paritytech/parity/pull/1852)
|
||||
- Snapshot creation and restoration [#1679](https://github.com/paritytech/parity/pull/1679)
|
||||
- fix deprecated typo [#1850](https://github.com/paritytech/parity/pull/1850)
|
||||
- Split IO and network crates [#1828](https://github.com/paritytech/parity/pull/1828)
|
||||
- updated classic JSON spec with classic bootnodes, fixes #1842 [#1847](https://github.com/paritytech/parity/pull/1847)
|
||||
- protect unsafety in plainhasher; get more unique hashes [#1841](https://github.com/paritytech/parity/pull/1841)
|
||||
- use mutex in dbtransaction [#1843](https://github.com/paritytech/parity/pull/1843)
|
||||
- Fix state not using "account_starting_nonce" [#1830](https://github.com/paritytech/parity/pull/1830)
|
||||
- Supporting blockid in eth_call and trace_call/trace_raw [#1837](https://github.com/paritytech/parity/pull/1837)
|
||||
- eth_checkTransaction renamed to eth_checkRequest [#1817](https://github.com/paritytech/parity/pull/1817)
|
||||
- Bump json-ipc-server again [#1839](https://github.com/paritytech/parity/pull/1839)
|
||||
- Fixing another deadlock in trace db [#1833](https://github.com/paritytech/parity/pull/1833)
|
||||
- Fix up the VM trace. [#1829](https://github.com/paritytech/parity/pull/1829)
|
||||
- fixed parsing export params, fixes #1826 [#1834](https://github.com/paritytech/parity/pull/1834)
|
||||
- More performance optimizations [#1814](https://github.com/paritytech/parity/pull/1814)
|
||||
- Bumping clippy & fixing warnings [#1823](https://github.com/paritytech/parity/pull/1823)
|
||||
- removed unused code from util and unnecessary dependency of FixedHash [#1824](https://github.com/paritytech/parity/pull/1824)
|
||||
- Remove (almost all) panickers from trie module [#1776](https://github.com/paritytech/parity/pull/1776)
|
||||
- Fixing account naming [#1810](https://github.com/paritytech/parity/pull/1810)
|
||||
- JournalDB inject [#1806](https://github.com/paritytech/parity/pull/1806)
|
||||
- No block number in get work while in geth-compat mode. [#1821](https://github.com/paritytech/parity/pull/1821)
|
||||
- Import wallet fix [#1820](https://github.com/paritytech/parity/pull/1820)
|
||||
- Supporting eth_sign in Signer [#1787](https://github.com/paritytech/parity/pull/1787)
|
||||
- Fixing cache update after chain reorg [#1816](https://github.com/paritytech/parity/pull/1816)
|
||||
- Development mode for Signer UI [#1788](https://github.com/paritytech/parity/pull/1788)
|
||||
- Miner tweaks [#1797](https://github.com/paritytech/parity/pull/1797)
|
||||
- Util & ipc clenup [#1807](https://github.com/paritytech/parity/pull/1807)
|
||||
- Fixing unlock parsing [#1802](https://github.com/paritytech/parity/pull/1802)
|
||||
- fixed importing presale wallet with encseed longer than 96 bytes [#1801](https://github.com/paritytech/parity/pull/1801)
|
||||
- DRYing build scripts [#1795](https://github.com/paritytech/parity/pull/1795)
|
||||
- Allow code from spec json [#1790](https://github.com/paritytech/parity/pull/1790)
|
||||
- nano-tests (ipc transport) to the CI [#1793](https://github.com/paritytech/parity/pull/1793)
|
||||
- Commit best block after closing transaction [#1791](https://github.com/paritytech/parity/pull/1791)
|
||||
- Place thread name in the log output [#1792](https://github.com/paritytech/parity/pull/1792)
|
||||
- Fix ipc tests and bring to CI [#1789](https://github.com/paritytech/parity/pull/1789)
|
||||
- dynamic keys pickup [#1779](https://github.com/paritytech/parity/pull/1779)
|
||||
- ipc version bump [#1783](https://github.com/paritytech/parity/pull/1783)
|
||||
- Prevent deadlock on trace GC [#1780](https://github.com/paritytech/parity/pull/1780)
|
||||
- fixed trace_transaction crash when block contained suicide [#1781](https://github.com/paritytech/parity/pull/1781)
|
||||
- Fix block body migration [#1777](https://github.com/paritytech/parity/pull/1777)
|
||||
- cache manager and clearing tracing cache [#1769](https://github.com/paritytech/parity/pull/1769)
|
||||
- Return storage as H256 from RPC. [#1774](https://github.com/paritytech/parity/pull/1774)
|
||||
- Instant sealing engine [#1767](https://github.com/paritytech/parity/pull/1767)
|
||||
- fix state unsafety with a mostly-guaranteed handle [#1755](https://github.com/paritytech/parity/pull/1755)
|
||||
- Gas for mem optimization [#1768](https://github.com/paritytech/parity/pull/1768)
|
||||
- Min and Max peers setting [#1771](https://github.com/paritytech/parity/pull/1771)
|
||||
- Disable WAL [#1765](https://github.com/paritytech/parity/pull/1765)
|
||||
- Add new line when printing start strings [#1766](https://github.com/paritytech/parity/pull/1766)
|
||||
- Log tweak [#1764](https://github.com/paritytech/parity/pull/1764)
|
||||
- Remove update_sealing call on importing own block [#1762](https://github.com/paritytech/parity/pull/1762)
|
||||
- Single DB [#1741](https://github.com/paritytech/parity/pull/1741)
|
||||
- Tweak format of log so it's not so verbose. [#1758](https://github.com/paritytech/parity/pull/1758)
|
||||
- Combine mining queue and enabled into single locked datum [#1749](https://github.com/paritytech/parity/pull/1749)
|
||||
- Collect consensus/null engines into a single module [#1754](https://github.com/paritytech/parity/pull/1754)
|
||||
- Fix failing deserialization test [#1756](https://github.com/paritytech/parity/pull/1756)
|
||||
- Stackoverflow fix [#1742](https://github.com/paritytech/parity/pull/1742)
|
||||
- compaction profile used during migration, fixes #1750 [#1751](https://github.com/paritytech/parity/pull/1751)
|
||||
- Splitting documentation into separate build job [#1752](https://github.com/paritytech/parity/pull/1752)
|
||||
- handle keys deserialization errors, fixes #1592 [#1701](https://github.com/paritytech/parity/pull/1701)
|
||||
- add gitlab-ci yaml [#1753](https://github.com/paritytech/parity/pull/1753)
|
||||
- Better handling of multiple migrations [#1747](https://github.com/paritytech/parity/pull/1747)
|
||||
- Disconnect peers on a fork [#1738](https://github.com/paritytech/parity/pull/1738)
|
||||
- Add RPC & client call to replay a transaction. [#1734](https://github.com/paritytech/parity/pull/1734)
|
||||
- another version bump for jsonrpc-ipc [#1744](https://github.com/paritytech/parity/pull/1744)
|
||||
- Trace other types of calls [#1727](https://github.com/paritytech/parity/pull/1727)
|
||||
- Fixing compilation on latest nightly [#1736](https://github.com/paritytech/parity/pull/1736)
|
||||
- Blocks and snapshot compression [#1687](https://github.com/paritytech/parity/pull/1687)
|
||||
- bump json-ipc-server version [#1739](https://github.com/paritytech/parity/pull/1739)
|
||||
- Use std::sync::Condvar [#1732](https://github.com/paritytech/parity/pull/1732)
|
||||
- Bump json-ipc-server version [#1733](https://github.com/paritytech/parity/pull/1733)
|
||||
- bump json-ipc-server version [#1731](https://github.com/paritytech/parity/pull/1731)
|
||||
- Fixing some clippy warnings [#1728](https://github.com/paritytech/parity/pull/1728)
|
||||
- Bumping Parity UI [#1682](https://github.com/paritytech/parity/pull/1682)
|
||||
- Various improvements to tracing & diagnostics. [#1707](https://github.com/paritytech/parity/pull/1707)
|
||||
- Fixed reading chunked EIP8 handshake [#1712](https://github.com/paritytech/parity/pull/1712)
|
||||
- Fix for importing blocks from a pipe file [#1724](https://github.com/paritytech/parity/pull/1724)
|
||||
- Proper errors for binary serializer [#1714](https://github.com/paritytech/parity/pull/1714)
|
||||
- Use a transaction for writing blocks [#1718](https://github.com/paritytech/parity/pull/1718)
|
||||
- Exclude generated code from coverage [#1720](https://github.com/paritytech/parity/pull/1720)
|
||||
- Use single binary for ipc modules [#1710](https://github.com/paritytech/parity/pull/1710)
|
||||
- Log a chain-reorg. [#1715](https://github.com/paritytech/parity/pull/1715)
|
||||
- Restore new block informant message [#1716](https://github.com/paritytech/parity/pull/1716)
|
||||
- Parallel block body download [#1659](https://github.com/paritytech/parity/pull/1659)
|
||||
- Rotate blockchain cache [#1709](https://github.com/paritytech/parity/pull/1709)
|
||||
- Fix broken internal names. [#1711](https://github.com/paritytech/parity/pull/1711)
|
||||
- cli overhaul [#1600](https://github.com/paritytech/parity/pull/1600)
|
||||
- Key files include timestamp in name. [#1700](https://github.com/paritytech/parity/pull/1700)
|
||||
- Fixing warnings [#1705](https://github.com/paritytech/parity/pull/1705)
|
||||
- Ethereum classic [#1706](https://github.com/paritytech/parity/pull/1706)
|
||||
- Docker Arguments [#1703](https://github.com/paritytech/parity/pull/1703)
|
||||
- Informant tidyup. [#1699](https://github.com/paritytech/parity/pull/1699)
|
||||
- Name and meta in accounts [#1695](https://github.com/paritytech/parity/pull/1695)
|
||||
- Stackoverflow #1686 [#1698](https://github.com/paritytech/parity/pull/1698)
|
||||
- filtering transactions toAddress includes contract creation [#1697](https://github.com/paritytech/parity/pull/1697)
|
||||
- Prevent syncing to ancient blocks [#1693](https://github.com/paritytech/parity/pull/1693)
|
||||
- Enable WAL and disable DB repair [#1696](https://github.com/paritytech/parity/pull/1696)
|
||||
- Returning error when transaction is rejected (for consistency) [#1667](https://github.com/paritytech/parity/pull/1667)
|
||||
- Disabling signer when in geth-compatibility mode [#1676](https://github.com/paritytech/parity/pull/1676)
|
||||
- Suicides tracing [#1688](https://github.com/paritytech/parity/pull/1688)
|
||||
- small cleanup of substate.rs [#1685](https://github.com/paritytech/parity/pull/1685)
|
||||
- resolve #411: remove install scripts [#1684](https://github.com/paritytech/parity/pull/1684)
|
||||
- IPC (feature-gated) [#1654](https://github.com/paritytech/parity/pull/1654)
|
||||
- Bumping JSONRPC-http-server [#1678](https://github.com/paritytech/parity/pull/1678)
|
||||
- Fixing hash deserialisation [#1674](https://github.com/paritytech/parity/pull/1674)
|
||||
- Ping discovery nodes gradually [#1671](https://github.com/paritytech/parity/pull/1671)
|
||||
- Fixing the deadlock on incoming connection [#1672](https://github.com/paritytech/parity/pull/1672)
|
||||
- Fixing errors returned by sendTransaction* method family [#1665](https://github.com/paritytech/parity/pull/1665)
|
||||
- Moved syncing log out of the client [#1670](https://github.com/paritytech/parity/pull/1670)
|
||||
- Host validation (again) [#1666](https://github.com/paritytech/parity/pull/1666)
|
||||
- Update install-deps.sh [ci skip] [#1664](https://github.com/paritytech/parity/pull/1664)
|
||||
- fix typos [#1644](https://github.com/paritytech/parity/pull/1644)
|
||||
- Size for blocks [#1668](https://github.com/paritytech/parity/pull/1668)
|
||||
- Revert "Validating Host headers in RPC requests" [#1663](https://github.com/paritytech/parity/pull/1663)
|
||||
- Validating Host headers in RPC requests [#1658](https://github.com/paritytech/parity/pull/1658)
|
||||
- fixed failing master [#1662](https://github.com/paritytech/parity/pull/1662)
|
||||
- Fixing clippy warnings [#1660](https://github.com/paritytech/parity/pull/1660)
|
||||
- Don't ping all nodes on start [#1656](https://github.com/paritytech/parity/pull/1656)
|
||||
- More performance optimizations [#1649](https://github.com/paritytech/parity/pull/1649)
|
||||
- Removing unused client code [#1645](https://github.com/paritytech/parity/pull/1645)
|
||||
- Asynchronous transactions (polling based for now). [#1652](https://github.com/paritytech/parity/pull/1652)
|
||||
- Sync stand-alone binary and feature-gated dependencies refactoring [#1637](https://github.com/paritytech/parity/pull/1637)
|
||||
- Re-enabling Parity UI [#1627](https://github.com/paritytech/parity/pull/1627)
|
||||
- Blockchain repair on missing state root [#1646](https://github.com/paritytech/parity/pull/1646)
|
||||
- Multi-mode logging. [#1643](https://github.com/paritytech/parity/pull/1643)
|
||||
- Pro paths [#1650](https://github.com/paritytech/parity/pull/1650)
|
||||
- Performance optimizations [#1642](https://github.com/paritytech/parity/pull/1642)
|
||||
- Removed DAO soft fork traces [#1639](https://github.com/paritytech/parity/pull/1639)
|
||||
- Compiler version update for windows [#1638](https://github.com/paritytech/parity/pull/1638)
|
||||
- Delete values immediately from DB overlay [#1631](https://github.com/paritytech/parity/pull/1631)
|
||||
- DAO hard-fork [#1483](https://github.com/paritytech/parity/pull/1483)
|
||||
- fix network_start regression [#1629](https://github.com/paritytech/parity/pull/1629)
|
||||
- Die if the DB is newer than the one supported. [#1630](https://github.com/paritytech/parity/pull/1630)
|
||||
- Cleanup of colour code. Use is_a_tty. [#1621](https://github.com/paritytech/parity/pull/1621)
|
||||
- don't batch best block for branches [#1623](https://github.com/paritytech/parity/pull/1623)
|
||||
- In-memory trie operations [#1408](https://github.com/paritytech/parity/pull/1408)
|
||||
- Fix "pending" parameter on RPC block requests [#1602](https://github.com/paritytech/parity/pull/1602)
|
||||
- Allow RPC to use solc to compile solidity [#1607](https://github.com/paritytech/parity/pull/1607)
|
||||
- IPC RPC deriving for traits [#1599](https://github.com/paritytech/parity/pull/1599)
|
||||
- Utilize cached kcov if exists [#1619](https://github.com/paritytech/parity/pull/1619)
|
||||
- Fixing no-ui feature [#1618](https://github.com/paritytech/parity/pull/1618)
|
||||
- Couple of rocksdb optimizations [#1614](https://github.com/paritytech/parity/pull/1614)
|
||||
- Miner tests [#1597](https://github.com/paritytech/parity/pull/1597)
|
||||
- Sync IPC interface [#1584](https://github.com/paritytech/parity/pull/1584)
|
||||
- Make sure reserved peers are in the node table [#1616](https://github.com/paritytech/parity/pull/1616)
|
||||
- Fix bloomchain on blockchain repair [#1610](https://github.com/paritytech/parity/pull/1610)
|
||||
- fixed broken tracing [#1615](https://github.com/paritytech/parity/pull/1615)
|
||||
- fix benchmark compilation [#1612](https://github.com/paritytech/parity/pull/1612)
|
||||
- Updating jsonrpc-http-server [#1611](https://github.com/paritytech/parity/pull/1611)
|
||||
- replace synchronization primitives with those from parking_lot [#1593](https://github.com/paritytech/parity/pull/1593)
|
||||
- ui compilation feature [#1604](https://github.com/paritytech/parity/pull/1604)
|
||||
- is_zero() and pow() optimisations for uint [#1608](https://github.com/paritytech/parity/pull/1608)
|
||||
- Optimizing & Cleaning the build [#1591](https://github.com/paritytech/parity/pull/1591)
|
||||
- Fix logging [#1590](https://github.com/paritytech/parity/pull/1590)
|
||||
- remove unnecessary mutex in logging [#1601](https://github.com/paritytech/parity/pull/1601)
|
||||
- Using streamlined parity-ui repository [#1566](https://github.com/paritytech/parity/pull/1566)
|
||||
- Optimizing InstructionInfo access. [#1595](https://github.com/paritytech/parity/pull/1595)
|
||||
- V7 Migration progress indicator [#1594](https://github.com/paritytech/parity/pull/1594)
|
||||
- bring snapshotting work into master [#1577](https://github.com/paritytech/parity/pull/1577)
|
||||
- Bump clippy [#1587](https://github.com/paritytech/parity/pull/1587)
|
||||
- refactoring of handshake messages serialization in ipc [#1586](https://github.com/paritytech/parity/pull/1586)
|
||||
- expunge &Vec<T> pattern [#1579](https://github.com/paritytech/parity/pull/1579)
|
||||
- EVM gas for memory tiny optimization [#1578](https://github.com/paritytech/parity/pull/1578)
|
||||
- cleaned up parity/signer [#1551](https://github.com/paritytech/parity/pull/1551)
|
||||
- Major sync <-> client interactions refactoring [#1572](https://github.com/paritytech/parity/pull/1572)
|
||||
- failing test with overlayrecent pruning [#1567](https://github.com/paritytech/parity/pull/1567)
|
||||
- Enable state queries for OverlayRecent DB [#1575](https://github.com/paritytech/parity/pull/1575)
|
||||
- have AccountDB use address hash for uniqueness [#1533](https://github.com/paritytech/parity/pull/1533)
|
||||
- Very basic EVM binary. [#1574](https://github.com/paritytech/parity/pull/1574)
|
||||
- Some obvious evm & uint optimizations [#1576](https://github.com/paritytech/parity/pull/1576)
|
||||
- Fixing clippy warnings [#1568](https://github.com/paritytech/parity/pull/1568)
|
||||
- Miner's gas price gets updated dynamically [#1570](https://github.com/paritytech/parity/pull/1570)
|
||||
- bringing hypervisor as a crate in ipc dir [#1565](https://github.com/paritytech/parity/pull/1565)
|
||||
- Init public interface with IO message [#1573](https://github.com/paritytech/parity/pull/1573)
|
||||
- Uncommenting simple Miner tests [#1571](https://github.com/paritytech/parity/pull/1571)
|
||||
- Kill lock unwraps [#1558](https://github.com/paritytech/parity/pull/1558)
|
||||
- Fixing deadlock in miner [#1569](https://github.com/paritytech/parity/pull/1569)
|
||||
- Idealpeers in log [#1563](https://github.com/paritytech/parity/pull/1563)
|
||||
- Simple style fix. [#1561](https://github.com/paritytech/parity/pull/1561)
|
||||
- Enum variants serialisation test&fix [#1559](https://github.com/paritytech/parity/pull/1559)
|
||||
- Supporting /api/ping for dapps server [#1543](https://github.com/paritytech/parity/pull/1543)
|
||||
- Client IPC Interface [#1493](https://github.com/paritytech/parity/pull/1493)
|
||||
- Kill timers when removing IO handler [#1554](https://github.com/paritytech/parity/pull/1554)
|
||||
- Fix and add info messages [#1552](https://github.com/paritytech/parity/pull/1552)
|
||||
- Fix indent of #1541 [#1555](https://github.com/paritytech/parity/pull/1555)
|
||||
- Update sealing just once when externally importing many blocks [#1541](https://github.com/paritytech/parity/pull/1541)
|
||||
- Remove soft-fork stuff. [#1548](https://github.com/paritytech/parity/pull/1548)
|
||||
- fix codegen warning [#1550](https://github.com/paritytech/parity/pull/1550)
|
||||
- Extend migration framework [#1546](https://github.com/paritytech/parity/pull/1546)
|
||||
- Refactoring dapps to support API endpoints. [#1542](https://github.com/paritytech/parity/pull/1542)
|
||||
- serde is no longer util dependency [#1534](https://github.com/paritytech/parity/pull/1534)
|
||||
- mention wiki in README [#1549](https://github.com/paritytech/parity/pull/1549)
|
||||
- Skipping transactions with invalid nonces when pushing to block. [#1545](https://github.com/paritytech/parity/pull/1545)
|
||||
- Silent running operating modes [#1477](https://github.com/paritytech/parity/pull/1477)
|
||||
- util cleanup [#1474](https://github.com/paritytech/parity/pull/1474)
|
||||
- Calculating gas using usize (if supplied gaslimit fits in usize) [#1518](https://github.com/paritytech/parity/pull/1518)
|
||||
- add owning NibbleVec [#1536](https://github.com/paritytech/parity/pull/1536)
|
||||
- Attempt to fix blochchain/extras DBs sync [#1538](https://github.com/paritytech/parity/pull/1538)
|
||||
- Client API refactoring - limiting errors to crate-level error types [#1525](https://github.com/paritytech/parity/pull/1525)
|
||||
- IPC codegen enhancement - allow void methods [#1540](https://github.com/paritytech/parity/pull/1540)
|
||||
- Fixing serving nested files for dapps. [#1539](https://github.com/paritytech/parity/pull/1539)
|
||||
- Fixed public address config [#1537](https://github.com/paritytech/parity/pull/1537)
|
||||
- Fixing compilation&clippy warnings [#1531](https://github.com/paritytech/parity/pull/1531)
|
||||
- creating ethereum dir while in geth mode [#1530](https://github.com/paritytech/parity/pull/1530)
|
||||
- Bumping clippy [#1532](https://github.com/paritytech/parity/pull/1532)
|
||||
- Make signer default as long as --unlock isn't used. [#1524](https://github.com/paritytech/parity/pull/1524)
|
||||
- add client timeout when requesting usd price for gas [#1526](https://github.com/paritytech/parity/pull/1526)
|
||||
- Fix gitter-url link in README.md [#1528](https://github.com/paritytech/parity/pull/1528)
|
||||
- Fix error message. [#1527](https://github.com/paritytech/parity/pull/1527)
|
||||
- BTreeMap binary serialization [#1489](https://github.com/paritytech/parity/pull/1489)
|
||||
- Save block reference in the queue on notification [#1501](https://github.com/paritytech/parity/pull/1501)
|
||||
- bigint tests to run on CI [#1522](https://github.com/paritytech/parity/pull/1522)
|
||||
- Client api cleaning - uncles are returned as rlp [#1516](https://github.com/paritytech/parity/pull/1516)
|
||||
- Fatdb integration with CLI [#1464](https://github.com/paritytech/parity/pull/1464)
|
||||
- Optimizing/simplifying shr [#1517](https://github.com/paritytech/parity/pull/1517)
|
||||
- change IPC codegen to allow attributes [#1500](https://github.com/paritytech/parity/pull/1500)
|
||||
- Fix warnings [#1514](https://github.com/paritytech/parity/pull/1514)
|
||||
- FatDB [#1452](https://github.com/paritytech/parity/pull/1452)
|
||||
- Fix the reseal mechanism. [#1513](https://github.com/paritytech/parity/pull/1513)
|
||||
- Update Dockerfile ubuntu-aarch64 [#1509](https://github.com/paritytech/parity/pull/1509)
|
||||
- Update Ubuntu-arm Dockerfile [#1510](https://github.com/paritytech/parity/pull/1510)
|
||||
- Update Ubuntu-jit Dockerfile [#1511](https://github.com/paritytech/parity/pull/1511)
|
||||
- Update Ubuntu Dockerfile [#1512](https://github.com/paritytech/parity/pull/1512)
|
||||
- Update CentOS Dockerfile [#1508](https://github.com/paritytech/parity/pull/1508)
|
||||
- bump status page v0.5.1 [#1502](https://github.com/paritytech/parity/pull/1502)
|
||||
- Update CentOS Dockerfile [#1507](https://github.com/paritytech/parity/pull/1507)
|
||||
- Update Dockerfile ubuntu-aarch64 [#1506](https://github.com/paritytech/parity/pull/1506)
|
||||
- Update Ubuntu-arm Dockerfile [#1505](https://github.com/paritytech/parity/pull/1505)
|
||||
- Update Ubuntu-jit Dockerfile [#1504](https://github.com/paritytech/parity/pull/1504)
|
||||
- Update Ubuntu Dockerfile [#1503](https://github.com/paritytech/parity/pull/1503)
|
||||
- Optionally clone block behind work-package [#1497](https://github.com/paritytech/parity/pull/1497)
|
||||
- Fix no colour on windows. [#1498](https://github.com/paritytech/parity/pull/1498)
|
||||
- Workaround for hyper panic [#1495](https://github.com/paritytech/parity/pull/1495)
|
||||
- Colourful notification on mine [#1488](https://github.com/paritytech/parity/pull/1488)
|
||||
- Quick fix for max open files error [#1494](https://github.com/paritytech/parity/pull/1494)
|
||||
- Work notification over HTTP [#1491](https://github.com/paritytech/parity/pull/1491)
|
||||
- Sealed block importing and propagation optimization [#1478](https://github.com/paritytech/parity/pull/1478)
|
||||
- vm factory to mining client [#1487](https://github.com/paritytech/parity/pull/1487)
|
||||
- topbar dialog fix [#1479](https://github.com/paritytech/parity/pull/1479)
|
||||
- Minor additions to allow resetting of code. [#1482](https://github.com/paritytech/parity/pull/1482)
|
||||
- Introduce options for fine-grained management of work queue. [#1484](https://github.com/paritytech/parity/pull/1484)
|
||||
- Snapshot state restoration [#1308](https://github.com/paritytech/parity/pull/1308)
|
||||
- Merge master into pv64 branch [#1486](https://github.com/paritytech/parity/pull/1486)
|
||||
- Ensure we don't reject our own transactions for gasprice. [#1485](https://github.com/paritytech/parity/pull/1485)
|
||||
- Signing parity executable & windows installer in appveyor [#1481](https://github.com/paritytech/parity/pull/1481)
|
||||
- Rearrange fork CLI options. [#1476](https://github.com/paritytech/parity/pull/1476)
|
||||
- give appveyor some breath [#1475](https://github.com/paritytech/parity/pull/1475)
|
||||
- Ensure we always get the latest work when mining on submitted. [#1469](https://github.com/paritytech/parity/pull/1469)
|
||||
- Tests for views [#1471](https://github.com/paritytech/parity/pull/1471)
|
||||
- json ipc version bump [#1470](https://github.com/paritytech/parity/pull/1470)
|
||||
- verifier is no longer a template type of client [#1467](https://github.com/paritytech/parity/pull/1467)
|
||||
- Allow configuration of when to reseal blocks. [#1460](https://github.com/paritytech/parity/pull/1460)
|
||||
- removed unsafe code [#1466](https://github.com/paritytech/parity/pull/1466)
|
||||
- WS bump + Adding default for value [#1465](https://github.com/paritytech/parity/pull/1465)
|
||||
- Attempt DB repair if corrupted [#1461](https://github.com/paritytech/parity/pull/1461)
|
||||
- Database configuration extended [#1454](https://github.com/paritytech/parity/pull/1454)
|
||||
- Updating WS-RS server [#1459](https://github.com/paritytech/parity/pull/1459)
|
||||
- Reduced IO messages; removed panics on IO notifications [#1457](https://github.com/paritytech/parity/pull/1457)
|
||||
- Handle errors when starting parity --signer [#1451](https://github.com/paritytech/parity/pull/1451)
|
||||
- Fixed losing queued blocks on error [#1453](https://github.com/paritytech/parity/pull/1453)
|
||||
- Updated to latest hyper with patched mio [#1450](https://github.com/paritytech/parity/pull/1450)
|
||||
- Retweak BASE and MULTIPLIER in rocksdb config. [#1445](https://github.com/paritytech/parity/pull/1445)
|
||||
- Removing Miner::default. [#1410](https://github.com/paritytech/parity/pull/1410)
|
||||
- Don't mine without --author [#1436](https://github.com/paritytech/parity/pull/1436)
|
||||
- Revert the rescuedao extradata. [#1437](https://github.com/paritytech/parity/pull/1437)
|
||||
- More conservative settings for rocksdb. [#1440](https://github.com/paritytech/parity/pull/1440)
|
||||
- v1.3.0 in master [#1421](https://github.com/paritytech/parity/pull/1421)
|
||||
- Update Ubuntu-arm Dockerfile [#1429](https://github.com/paritytech/parity/pull/1429)
|
||||
- Create Dockerfile ubuntu-aarch64 [#1430](https://github.com/paritytech/parity/pull/1430)
|
||||
- Update CentOS Dockerfile [#1424](https://github.com/paritytech/parity/pull/1424)
|
||||
- Update Ubuntu Dockerfile [#1426](https://github.com/paritytech/parity/pull/1426)
|
||||
- Update Ubuntu-jit Dockerfile [#1427](https://github.com/paritytech/parity/pull/1427)
|
||||
- Update SF blocknumber to 1800000. [#1418](https://github.com/paritytech/parity/pull/1418)
|
||||
744
docs/CHANGELOG-1.4.md
Normal file
744
docs/CHANGELOG-1.4.md
Normal file
@@ -0,0 +1,744 @@
|
||||
## Parity [v1.4.12](https://github.com/paritytech/parity/releases/tag/v1.4.12) (2017-02-22)
|
||||
|
||||
This stable release fixes an issue with block uncle validation. Parity now allows uncle headers to have timestamp set to arbitrary future value.
|
||||
|
||||
- Stable Backporting ([#4633](https://github.com/paritytech/parity/pull/4633)) [#4642](https://github.com/paritytech/parity/pull/4642)
|
||||
- Tweak some checks.
|
||||
- Fixed build and added a difficulty test
|
||||
- Bump to v1.4.12
|
||||
- Add missing maxCodeSize [#4585](https://github.com/paritytech/parity/pull/4585)
|
||||
|
||||
## Parity [v1.4.11](https://github.com/paritytech/parity/releases/tag/v1.4.11) (2017-02-17)
|
||||
|
||||
This release corrects the Ropsten chain specification file.
|
||||
|
||||
- Bump to v1.4.11 [#4587](https://github.com/paritytech/parity/pull/4587)
|
||||
- Fixing etherscan price parsing ([#4202](https://github.com/paritytech/parity/pull/4202)) [#4209](https://github.com/paritytech/parity/pull/4209)
|
||||
- Fixing etherscan price parsing
|
||||
- Handling all errors
|
||||
- Removed pdbs
|
||||
- Add missing maxCodeSize [#4585](https://github.com/paritytech/parity/pull/4585)
|
||||
|
||||
## Parity [v1.4.10](https://github.com/paritytech/parity/releases/tag/v1.4.10) (2017-01-18)
|
||||
|
||||
Parity 1.4.10 is a first stable release of 1.4.x series. It includes a few minor networking fixes.
|
||||
|
||||
- Gas_limit for blocks, mined by Parity will be divisible by 37 (#4154) [#4179](https://github.com/paritytech/parity/pull/4179)
|
||||
- gas_limit for new blocks will divide evenly by 13
|
||||
- increased PARITY_GAS_LIMIT_DETERMINANT to 37
|
||||
- separate method for marking mined block
|
||||
- debug_asserts(gas_limit within protocol range)
|
||||
- round_block_gas_limit method is now static
|
||||
- made round_block_gas_limit free-function
|
||||
- multiplier->multiple
|
||||
- Backporing to 1.4.10-stable [#4110](https://github.com/paritytech/parity/pull/4110)
|
||||
- Bump to v1.4.10
|
||||
- No reorg limit for ancient blocks
|
||||
- Update registration after every write
|
||||
|
||||
## Parity [v1.4.9](https://github.com/paritytech/parity/releases/tag/v1.4.9) (2017-01-09)
|
||||
|
||||
This fixes an issue introduced in 1.4.8 that causes Parity to panic on propagating transactions in some cases.
|
||||
|
||||
- v1.4.9 in beta [#4097](https://github.com/paritytech/parity/pull/4097)
|
||||
- Bump to v1.4.9
|
||||
- Disable armv6 build
|
||||
- beta Fix queue deadlock [#4095](https://github.com/paritytech/parity/pull/4095)
|
||||
- Fix rebroadcast panic beta [#4085](https://github.com/paritytech/parity/pull/4085)
|
||||
- fix compile
|
||||
- fix backport
|
||||
- clean up old method
|
||||
- remove unnecessary reference
|
||||
- simplify
|
||||
- Fixing 'simplify'
|
||||
|
||||
## Parity [v1.4.8](https://github.com/paritytech/parity/releases/tag/v1.4.8) (2017-01-06)
|
||||
|
||||
Ethereum Classic Hard Fork ready release containing various bugfixes:
|
||||
|
||||
- Fix for excessive transactions propagation
|
||||
- Fix for inconsistent `logIndex` in transaction receipts
|
||||
|
||||
See [full list of changes](https://github.com/paritytech/parity/compare/v1.4.7...v1.4.8):
|
||||
|
||||
- Beta backports [#4067](https://github.com/paritytech/parity/pull/4067)
|
||||
- Re-broadcast transactions to few random peers on each new block. (#4054) [#4061](https://github.com/paritytech/parity/pull/4061)
|
||||
- Tolerate errors in user_defaults [#4060](https://github.com/paritytech/parity/pull/4060)
|
||||
- ETC Config change backport [#4056](https://github.com/paritytech/parity/pull/4056)
|
||||
- [beta] Avoid re-broadcasting transactions on each block [#4047](https://github.com/paritytech/parity/pull/4047)
|
||||
- Beta Backports [#4012](https://github.com/paritytech/parity/pull/4012)
|
||||
|
||||
## Parity [v1.4.7](https://github.com/paritytech/parity/releases/tag/v1.4.7) (2016-12-27)
|
||||
|
||||
This maintenance release fixes an issue with sync falling behind occasionally.
|
||||
|
||||
- Backporting to beta [#3980](https://github.com/paritytech/parity/pull/3980)
|
||||
- [beta] enforce gas limit falls within engine bounds [#3816](https://github.com/paritytech/parity/pull/3816)
|
||||
|
||||
|
||||
## Parity [v1.4.6](https://github.com/paritytech/parity/releases/tag/v1.4.6) (2016-12-05)
|
||||
|
||||
This patch release fixes an issue with syncing on the Ropsten test network.
|
||||
|
||||
- Backporting to beta [#3718](https://github.com/paritytech/parity/pull/3718)
|
||||
- [beta] scrollable contract deploy & execute modals [#3656](https://github.com/paritytech/parity/pull/3656)
|
||||
|
||||
## Parity [v1.4.5](https://github.com/paritytech/parity/releases/tag/v1.4.5) (2016-11-26)
|
||||
|
||||
1.4.5 release fixes a number of issues, notably:
|
||||
- High CPU usage when idle.
|
||||
- Key recovery phrases generated on windows now can be imported.
|
||||
|
||||
#### Configuration changes
|
||||
- `--usd-per-tx` is now set to 0.0025 by default.
|
||||
|
||||
#### New features
|
||||
- Support for Ropsten test network is introduced with `--chain=ropsten` or `--testnet`. Morden network is still available via `--chain=morden`
|
||||
|
||||
#### Full changes
|
||||
- [beta] Pin package versions for React [#3628](https://github.com/paritytech/parity/pull/3628)
|
||||
- Backporting to beta [#3623](https://github.com/paritytech/parity/pull/3623)
|
||||
- [beta] Ropsten chain for UI [#3622](https://github.com/paritytech/parity/pull/3622)
|
||||
|
||||
## Parity [v1.4.4](https://github.com/paritytech/parity/releases/tag/v1.4.4) (2016-11-18)
|
||||
|
||||
This is a maintenance release that fixes an issue with EIP-155 transactions being added to the transaction pool. It also improves syncing stability and resolved a number of UI issues.
|
||||
Full changelog is available [here.](https://github.com/paritytech/parity/commit/3e0d033eaf789cfdf517f4a97effc500f1f9263b)
|
||||
|
||||
- [beta] apps typo fix [#3533](https://github.com/paritytech/parity/pull/3533)
|
||||
- Backporting to beta [#3525](https://github.com/paritytech/parity/pull/3525)
|
||||
|
||||
## Parity [v1.4.3](https://github.com/paritytech/parity/releases/tag/v1.4.3) (2016-11-16)
|
||||
|
||||
This release includes memory footprint optimization as well as a few fixes in the UI.
|
||||
EIP-155/160/161/170 hardfork is enabled at block 2675000 (1885000 for test network).
|
||||
Full changelog is available [here.](https://github.com/paritytech/parity/compare/v1.4.2...v1.4.3)
|
||||
|
||||
- [beta] EIP-170 [#3464](https://github.com/paritytech/parity/pull/3464)
|
||||
- Backports to beta [#3465](https://github.com/paritytech/parity/pull/3465)
|
||||
- Backport: additional fields on transaction and receipt [#3463](https://github.com/paritytech/parity/pull/3463)
|
||||
- v1.4.3 in beta [#3424](https://github.com/paritytech/parity/pull/3424)
|
||||
|
||||
## Parity [v1.4.2](https://github.com/paritytech/parity/releases/tag/v1.4.2) (2016-11-10)
|
||||
|
||||
This release fixes a few additional issues:
|
||||
- Parity now correctly handles external `--dapps-interface` and `--ui-interface` in the UI.
|
||||
- Crash in `eth_getUncle*` has been fixed.
|
||||
- macOS installer now includes an uninstall script.
|
||||
- Security token input UI has been fixed.
|
||||
- Correct display for tokens with minimum decimals.
|
||||
|
||||
And some additional minor changes. Full changelog is [available](https://github.com/paritytech/parity/compare/v1.4.1...v1.4.2)
|
||||
- Backporting to beta [#3344](https://github.com/paritytech/parity/pull/3344)
|
||||
- Backporting to beta [#3324](https://github.com/paritytech/parity/pull/3324)
|
||||
|
||||
## Parity [v1.4.1](https://github.com/paritytech/parity/releases/tag/v1.4.1) (2016-11-09)
|
||||
|
||||
This is a hotfix release to address a couple of issues with 1.4.0:
|
||||
|
||||
- UI token is requested instead of being supplied automatically.
|
||||
- Running with `--geth` results in an error.
|
||||
|
||||
- Backporting to beta [#3293](https://github.com/paritytech/parity/pull/3293)
|
||||
|
||||
## Parity [v1.4.0](https://github.com/paritytech/parity/releases/tag/v1.4.0) (2016-11-07)
|
||||
|
||||
First beta release of the 1.4 series.
|
||||
|
||||
This includes the new Parity Wallet and Warp-Sync synchronisation as well as several optimisations and fixes.
|
||||
|
||||
- Add secure flag back [#3246](https://github.com/paritytech/parity/pull/3246)
|
||||
- [BETA] verify chunk hashes in cli restore [#3242](https://github.com/paritytech/parity/pull/3242)
|
||||
- Backporting to beta [#3239](https://github.com/paritytech/parity/pull/3239)
|
||||
- UI fixes backporting [#3234](https://github.com/paritytech/parity/pull/3234)
|
||||
- Backporting to beta [#3229](https://github.com/paritytech/parity/pull/3229)
|
||||
- Beta branch cleanup [#3226](https://github.com/paritytech/parity/pull/3226)
|
||||
- [beta] Set passive mode for first run only (#3214) [#3216](https://github.com/paritytech/parity/pull/3216)
|
||||
- Mode configuration backported to beta [#3213](https://github.com/paritytech/parity/pull/3213)
|
||||
- Backporting [#3198](https://github.com/paritytech/parity/pull/3198)
|
||||
- [beta] EIP-155 update with Vitalik's new test vectors (#3166) [#3189](https://github.com/paritytech/parity/pull/3189)
|
||||
- Backporting to beta [#3176](https://github.com/paritytech/parity/pull/3176)
|
||||
- parity-ui-precompiled pinned to beta [#3168](https://github.com/paritytech/parity/pull/3168)
|
||||
- EIP-155 update with Vitalik's new test vectors [#3166](https://github.com/paritytech/parity/pull/3166)
|
||||
- Push precompiled for beta/stable, npm only master [#3163](https://github.com/paritytech/parity/pull/3163)
|
||||
- Back to real root after npm publish [#3178](https://github.com/paritytech/parity/pull/3178)
|
||||
- Remove extra cd js [#3177](https://github.com/paritytech/parity/pull/3177)
|
||||
- Fixes Gas price selection bug [#3175](https://github.com/paritytech/parity/pull/3175)
|
||||
- Exposing state root and logsBloom in RPC receipts [#3174](https://github.com/paritytech/parity/pull/3174)
|
||||
- Exposing v,r,s from transaction signature in RPC [#3172](https://github.com/paritytech/parity/pull/3172)
|
||||
- Enabling personal RPC over IPC by default [#3165](https://github.com/paritytech/parity/pull/3165)
|
||||
- Gitlab CI badge [#3164](https://github.com/paritytech/parity/pull/3164)
|
||||
- Dependencies in README [#3162](https://github.com/paritytech/parity/pull/3162)
|
||||
- Make the footer a bit less ugly. [#3160](https://github.com/paritytech/parity/pull/3160)
|
||||
- Linux build case sensitivity fix [#3161](https://github.com/paritytech/parity/pull/3161)
|
||||
- abbreviated enode, `CopyToClipboard` component [#3131](https://github.com/paritytech/parity/pull/3131)
|
||||
- EIPs 155, 160, 161 [#2976](https://github.com/paritytech/parity/pull/2976)
|
||||
- beta reset to 1.4.0 [#3157](https://github.com/paritytech/parity/pull/3157)
|
||||
- Fix histogram [#3150](https://github.com/paritytech/parity/pull/3150)
|
||||
- Remove network label from TabBar [#3142](https://github.com/paritytech/parity/pull/3142)
|
||||
- Speed up unresponsive Contract events & Account transactions [#3145](https://github.com/paritytech/parity/pull/3145)
|
||||
- Better windows shortcut [#3147](https://github.com/paritytech/parity/pull/3147)
|
||||
- Redirect content to the same address as requested [#3133](https://github.com/paritytech/parity/pull/3133)
|
||||
- Fixed peer ping timeout [#3137](https://github.com/paritytech/parity/pull/3137)
|
||||
- Fix for windows build [#3125](https://github.com/paritytech/parity/pull/3125)
|
||||
- Fix AddessInput icon position [#3132](https://github.com/paritytech/parity/pull/3132)
|
||||
- Fixed not scrollable accounts in tokenreg dapp [#3128](https://github.com/paritytech/parity/pull/3128)
|
||||
- Returning cache headers for network content [#3123](https://github.com/paritytech/parity/pull/3123)
|
||||
- Optimise contract events display [#3120](https://github.com/paritytech/parity/pull/3120)
|
||||
- Add basic validation for contract execute values [#3118](https://github.com/paritytech/parity/pull/3118)
|
||||
- Dapps errors embeddable on signer [#3115](https://github.com/paritytech/parity/pull/3115)
|
||||
- Use enode RPC in UI [#3108](https://github.com/paritytech/parity/pull/3108)
|
||||
- Windows tray app [#3103](https://github.com/paritytech/parity/pull/3103)
|
||||
- Displaying CLI errors on stderr [#3116](https://github.com/paritytech/parity/pull/3116)
|
||||
- new InputAddressSelect component [#3071](https://github.com/paritytech/parity/pull/3071)
|
||||
- Bump mio [#3117](https://github.com/paritytech/parity/pull/3117)
|
||||
- Minor typo fixed. [#3110](https://github.com/paritytech/parity/pull/3110)
|
||||
- Sort by ETH balance and contract by date [#3107](https://github.com/paritytech/parity/pull/3107)
|
||||
- Add RPC enode lookup [#3096](https://github.com/paritytech/parity/pull/3096)
|
||||
- Initializing logger for each command [#3090](https://github.com/paritytech/parity/pull/3090)
|
||||
- Allow registration of content bundles in GitHubHint [#3094](https://github.com/paritytech/parity/pull/3094)
|
||||
- Add read-only inputs to UI plus Copy to Clipboard buttons [#3095](https://github.com/paritytech/parity/pull/3095)
|
||||
- Allow boolean dropdowns for contract deploy [#3077](https://github.com/paritytech/parity/pull/3077)
|
||||
- Add mac installer files [#2995](https://github.com/paritytech/parity/pull/2995)
|
||||
- Fixing dapps sorting [#3086](https://github.com/paritytech/parity/pull/3086)
|
||||
- Add a Gitter chat badge to README.md [#3092](https://github.com/paritytech/parity/pull/3092)
|
||||
- Fixes webpack HTML loader [#3089](https://github.com/paritytech/parity/pull/3089)
|
||||
- Redirecting /home to new UI [#3084](https://github.com/paritytech/parity/pull/3084)
|
||||
- Allow GitHubHint content owner to update url [#3083](https://github.com/paritytech/parity/pull/3083)
|
||||
- Remove token assets (moved to ethcore/dapps-assets) [#3082](https://github.com/paritytech/parity/pull/3082)
|
||||
- Goodbye Gavcoin, Hello Gavcoin [#3080](https://github.com/paritytech/parity/pull/3080)
|
||||
- Load network dapps [#3078](https://github.com/paritytech/parity/pull/3078)
|
||||
- Swap account phrase input to normal (non-multiline) [#3060](https://github.com/paritytech/parity/pull/3060)
|
||||
- Fix minor typo in informant [#3056](https://github.com/paritytech/parity/pull/3056)
|
||||
- Warp sync status display [#3045](https://github.com/paritytech/parity/pull/3045)
|
||||
- Enhance address input [#3065](https://github.com/paritytech/parity/pull/3065)
|
||||
- Go to Accounts Page if Tooltips are displayed [#3063](https://github.com/paritytech/parity/pull/3063)
|
||||
- Change contract Execute bool values & query bool value display [#3024](https://github.com/paritytech/parity/pull/3024)
|
||||
- Update Parity logo [#3036](https://github.com/paritytech/parity/pull/3036)
|
||||
- settings: replace background patterns (inline) [#3047](https://github.com/paritytech/parity/pull/3047)
|
||||
- Multiple line description for dapps [#3058](https://github.com/paritytech/parity/pull/3058)
|
||||
- Fix status log order [#3062](https://github.com/paritytech/parity/pull/3062)
|
||||
- Graphical gas price selection [#2898](https://github.com/paritytech/parity/pull/2898)
|
||||
- [Registry dApp] Actions not available before selecting accounts [#3032](https://github.com/paritytech/parity/pull/3032)
|
||||
- apply post-consolidation migrations after consolidating [#3020](https://github.com/paritytech/parity/pull/3020)
|
||||
- fix chain badge padding [#3046](https://github.com/paritytech/parity/pull/3046)
|
||||
- Don't delete Tags input on blur (eg. tab) [#3044](https://github.com/paritytech/parity/pull/3044)
|
||||
- Fixing last hashes for ethcall [#3043](https://github.com/paritytech/parity/pull/3043)
|
||||
- Remove signer icons [#3039](https://github.com/paritytech/parity/pull/3039)
|
||||
- execute periodic snapshot in new thread [#3029](https://github.com/paritytech/parity/pull/3029)
|
||||
- fix background of embedded signer [#3026](https://github.com/paritytech/parity/pull/3026)
|
||||
- registry dapp: fix reducer [#3028](https://github.com/paritytech/parity/pull/3028)
|
||||
- Replace Execute by Query in contract button [#3031](https://github.com/paritytech/parity/pull/3031)
|
||||
- Fixing GavCoin dApp overflow issues [#3030](https://github.com/paritytech/parity/pull/3030)
|
||||
- execute contract function: validate address [#3013](https://github.com/paritytech/parity/pull/3013)
|
||||
- Align tag inputs with other input boxes [#2965](https://github.com/paritytech/parity/pull/2965)
|
||||
- Sweep panickers from IO and network [#3018](https://github.com/paritytech/parity/pull/3018)
|
||||
- Terms & Conditions [#3019](https://github.com/paritytech/parity/pull/3019)
|
||||
- open column families after reparing db corruption [#3017](https://github.com/paritytech/parity/pull/3017)
|
||||
- Snapshot sync and block gap info in `eth_syncing` [#2948](https://github.com/paritytech/parity/pull/2948)
|
||||
- personal_ RPCs to AutoArgs [#3000](https://github.com/paritytech/parity/pull/3000)
|
||||
- RPCs for mode change [#3002](https://github.com/paritytech/parity/pull/3002)
|
||||
- Fix a test sensitive to slow execution. [#3014](https://github.com/paritytech/parity/pull/3014)
|
||||
- Fixes search filtering issues [#3011](https://github.com/paritytech/parity/pull/3011)
|
||||
- Restart sync if no more peers with snapshots [#3007](https://github.com/paritytech/parity/pull/3007)
|
||||
- Allow empty/non-existant input arrays for ABIs in contract view [#3001](https://github.com/paritytech/parity/pull/3001)
|
||||
- Allow operation when no registry is available [#2980](https://github.com/paritytech/parity/pull/2980)
|
||||
- Make JS lint & test run on Travis [#2894](https://github.com/paritytech/parity/pull/2894)
|
||||
- Update account dropdowns [#2959](https://github.com/paritytech/parity/pull/2959)
|
||||
- Modify gas price statistics [#2947](https://github.com/paritytech/parity/pull/2947)
|
||||
- Fixes pending/mined transactions in registry dApp [#3004](https://github.com/paritytech/parity/pull/3004)
|
||||
- Prevent connecting to self [#2997](https://github.com/paritytech/parity/pull/2997)
|
||||
- Disable verbose in gitlab CI [#2999](https://github.com/paritytech/parity/pull/2999)
|
||||
- Allow warnings in gitlab [#2998](https://github.com/paritytech/parity/pull/2998)
|
||||
- Fix the brainwallet functionality. [#2994](https://github.com/paritytech/parity/pull/2994)
|
||||
- Provided gas description update [#2993](https://github.com/paritytech/parity/pull/2993)
|
||||
- Print messages to stderr [#2991](https://github.com/paritytech/parity/pull/2991)
|
||||
- Networking and syncing tweaks [#2990](https://github.com/paritytech/parity/pull/2990)
|
||||
- Allow build warnings [#2985](https://github.com/paritytech/parity/pull/2985)
|
||||
- Display network status for finished Signer requests [#2983](https://github.com/paritytech/parity/pull/2983)
|
||||
- Fixed rejecting transactions [#2984](https://github.com/paritytech/parity/pull/2984)
|
||||
- mio version bump [#2982](https://github.com/paritytech/parity/pull/2982)
|
||||
- Publish parity.js to npmjs registry [#2978](https://github.com/paritytech/parity/pull/2978)
|
||||
- Import raw private key [#2945](https://github.com/paritytech/parity/pull/2945)
|
||||
- refactor etherscan.io links [#2896](https://github.com/paritytech/parity/pull/2896)
|
||||
- Use separate lock for code cache [#2977](https://github.com/paritytech/parity/pull/2977)
|
||||
- Add favicon [#2974](https://github.com/paritytech/parity/pull/2974)
|
||||
- Align password change dialog with create dialog ordering [#2970](https://github.com/paritytech/parity/pull/2970)
|
||||
- WS bump [#2973](https://github.com/paritytech/parity/pull/2973)
|
||||
- Discovery performance optimization [#2972](https://github.com/paritytech/parity/pull/2972)
|
||||
- Pass gas & gasPrice to token transfers [#2964](https://github.com/paritytech/parity/pull/2964)
|
||||
- Updating ws-rs [#2962](https://github.com/paritytech/parity/pull/2962)
|
||||
- Run cargo with verbose flag when testing [#2943](https://github.com/paritytech/parity/pull/2943)
|
||||
- Fixing clippy warnings take two [#2961](https://github.com/paritytech/parity/pull/2961)
|
||||
- Snapshot sync improvements [#2960](https://github.com/paritytech/parity/pull/2960)
|
||||
- Gavcoin event display updates [#2956](https://github.com/paritytech/parity/pull/2956)
|
||||
- Eslint fixes [#2957](https://github.com/paritytech/parity/pull/2957)
|
||||
- Add import of raw private key RPCs [#2942](https://github.com/paritytech/parity/pull/2942)
|
||||
- Bring in styling queues from original Gavcoin [#2936](https://github.com/paritytech/parity/pull/2936)
|
||||
- Validating minimal required gas for a transaction [#2937](https://github.com/paritytech/parity/pull/2937)
|
||||
- Even more snapshot validity checks [#2935](https://github.com/paritytech/parity/pull/2935)
|
||||
- Shared code cache [#2921](https://github.com/paritytech/parity/pull/2921)
|
||||
- Updating bootnodes for ETC [#2938](https://github.com/paritytech/parity/pull/2938)
|
||||
- More bootnodes [#2926](https://github.com/paritytech/parity/pull/2926)
|
||||
- Revert hash updates until testable [#2925](https://github.com/paritytech/parity/pull/2925)
|
||||
- Release.sh verbose output [#2924](https://github.com/paritytech/parity/pull/2924)
|
||||
- additional release.sh debugging info [#2922](https://github.com/paritytech/parity/pull/2922)
|
||||
- Pass the js-precompiled commit hash to cargo update [#2920](https://github.com/paritytech/parity/pull/2920)
|
||||
- Next nonce RPC [#2917](https://github.com/paritytech/parity/pull/2917)
|
||||
- Get rid of duplicated code in EVM [#2915](https://github.com/paritytech/parity/pull/2915)
|
||||
- Transaction Queue banning [#2524](https://github.com/paritytech/parity/pull/2524)
|
||||
- Revert to gas price ordering [#2919](https://github.com/paritytech/parity/pull/2919)
|
||||
- Personal split [#2879](https://github.com/paritytech/parity/pull/2879)
|
||||
- Fixing config values for pruning_history [#2918](https://github.com/paritytech/parity/pull/2918)
|
||||
- Apply pending block details on commit [#2254](https://github.com/paritytech/parity/pull/2254)
|
||||
- Fixed GetNodeData output [#2892](https://github.com/paritytech/parity/pull/2892)
|
||||
- New sync protocol ID [#2912](https://github.com/paritytech/parity/pull/2912)
|
||||
- Clippy bump [#2877](https://github.com/paritytech/parity/pull/2877)
|
||||
- iconomi token images [#2906](https://github.com/paritytech/parity/pull/2906)
|
||||
- Fixes too long description and Token balance value in Dapps/Accounts [#2902](https://github.com/paritytech/parity/pull/2902)
|
||||
- Add missing images for local dapps [#2890](https://github.com/paritytech/parity/pull/2890)
|
||||
- Fix Webpack, again [#2895](https://github.com/paritytech/parity/pull/2895)
|
||||
- Enable suicide json test [#2893](https://github.com/paritytech/parity/pull/2893)
|
||||
- More snapshot fixes and optimizations [#2883](https://github.com/paritytech/parity/pull/2883)
|
||||
- Fixes CI JS precompiled build [#2886](https://github.com/paritytech/parity/pull/2886)
|
||||
- Fix empty tags modification [#2884](https://github.com/paritytech/parity/pull/2884)
|
||||
- Fix up informant. [#2865](https://github.com/paritytech/parity/pull/2865)
|
||||
- Get rid of MemoryDB denote [#2881](https://github.com/paritytech/parity/pull/2881)
|
||||
- Add inject to "bundle everything" list [#2871](https://github.com/paritytech/parity/pull/2871)
|
||||
- Fixes signer and MUI errors throwing [#2876](https://github.com/paritytech/parity/pull/2876)
|
||||
- Fix failing tests after log parsing updates [#2878](https://github.com/paritytech/parity/pull/2878)
|
||||
- Sweep some more panics [#2848](https://github.com/paritytech/parity/pull/2848)
|
||||
- Make GitLab js-precompiled really update Cargo.toml in main repo [#2869](https://github.com/paritytech/parity/pull/2869)
|
||||
- IPC version bump [#2870](https://github.com/paritytech/parity/pull/2870)
|
||||
- Snapshot sync fixes and optimizations [#2863](https://github.com/paritytech/parity/pull/2863)
|
||||
- Add Check and Change Password for an Account [#2861](https://github.com/paritytech/parity/pull/2861)
|
||||
- Output git fetch/push to log files [#2862](https://github.com/paritytech/parity/pull/2862)
|
||||
- Align contract event log l&f with transactions [#2812](https://github.com/paritytech/parity/pull/2812)
|
||||
- Nicer port in use errors [#2859](https://github.com/paritytech/parity/pull/2859)
|
||||
- Remove personal_* calls from dapps [#2860](https://github.com/paritytech/parity/pull/2860)
|
||||
- Token sorting, zero-ETH transfer & token decimals [#2805](https://github.com/paritytech/parity/pull/2805)
|
||||
- Don't fail badly when no transactions in last 100 blocks. [#2856](https://github.com/paritytech/parity/pull/2856)
|
||||
- Fixing home.parity address for new signer [#2851](https://github.com/paritytech/parity/pull/2851)
|
||||
- Enabling UI build back [#2853](https://github.com/paritytech/parity/pull/2853)
|
||||
- Remove eventName in unsubscribe API arguments [#2844](https://github.com/paritytech/parity/pull/2844)
|
||||
- Don't return empty names as clickable titles [#2809](https://github.com/paritytech/parity/pull/2809)
|
||||
- Auto-bump js-precompiled on release [#2828](https://github.com/paritytech/parity/pull/2828)
|
||||
- Remove ethcore::common re-export module [#2792](https://github.com/paritytech/parity/pull/2792)
|
||||
- Prevent database corruption on OOM [#2832](https://github.com/paritytech/parity/pull/2832)
|
||||
- Download/Export Addressbook [#2847](https://github.com/paritytech/parity/pull/2847)
|
||||
- Snapshot and blockchain stability improvements [#2843](https://github.com/paritytech/parity/pull/2843)
|
||||
- Extended network options [#2845](https://github.com/paritytech/parity/pull/2845)
|
||||
- fix failing master test build [#2846](https://github.com/paritytech/parity/pull/2846)
|
||||
- Local dapps embeddable on signer port [#2815](https://github.com/paritytech/parity/pull/2815)
|
||||
- Trigger accounts/contracts search on search input change [#2838](https://github.com/paritytech/parity/pull/2838)
|
||||
- Move snapshot sync to a subprotocol [#2820](https://github.com/paritytech/parity/pull/2820)
|
||||
- fix node log being reversed [#2839](https://github.com/paritytech/parity/pull/2839)
|
||||
- Fixes currency symbol font size in Shapeshift modal [#2840](https://github.com/paritytech/parity/pull/2840)
|
||||
- Disable personal APIs by default for security reasons [#2834](https://github.com/paritytech/parity/pull/2834)
|
||||
- Clear cached content [#2833](https://github.com/paritytech/parity/pull/2833)
|
||||
- Add ethcore_[dapps|signer]Port APIs [#2821](https://github.com/paritytech/parity/pull/2821)
|
||||
- CLI option to skip seal check when importing [#2842](https://github.com/paritytech/parity/pull/2842)
|
||||
- Fix case error in Dapps import [#2837](https://github.com/paritytech/parity/pull/2837)
|
||||
- Double click on address in account detail view should select it [#2841](https://github.com/paritytech/parity/pull/2841)
|
||||
- Bump js-precompiled to 20161022-223915 UTC [#2826](https://github.com/paritytech/parity/pull/2826)
|
||||
- Adjust paths to handle CORS changes [#2816](https://github.com/paritytech/parity/pull/2816)
|
||||
- RPC for dapps port and signer port [#2819](https://github.com/paritytech/parity/pull/2819)
|
||||
- Update build to working version on pre-compiled repo [#2825](https://github.com/paritytech/parity/pull/2825)
|
||||
- Adjust network name badge colours (darker) [#2823](https://github.com/paritytech/parity/pull/2823)
|
||||
- Removing submodule in favour of rust crate [#2756](https://github.com/paritytech/parity/pull/2756)
|
||||
- Return old-ish content even when syncing [#2757](https://github.com/paritytech/parity/pull/2757)
|
||||
- fix Signer UI [#2750](https://github.com/paritytech/parity/pull/2750)
|
||||
- USG, GBP, Euro & Yuan updates [#2818](https://github.com/paritytech/parity/pull/2818)
|
||||
- Make locally installed apps available again (Fixes #2771) [#2808](https://github.com/paritytech/parity/pull/2808)
|
||||
- Additional RPCs for password management [#2779](https://github.com/paritytech/parity/pull/2779)
|
||||
- flush DB changes on drop [#2795](https://github.com/paritytech/parity/pull/2795)
|
||||
- rename State::snapshot to checkpoint to avoid confusion [#2796](https://github.com/paritytech/parity/pull/2796)
|
||||
- Missing changes required to make new UI work [#2793](https://github.com/paritytech/parity/pull/2793)
|
||||
- Cleanup method decoding (Fixes #2811) [#2810](https://github.com/paritytech/parity/pull/2810)
|
||||
- Use trace API for decentralized transaction list [#2784](https://github.com/paritytech/parity/pull/2784)
|
||||
- Automatic compaction selection on Linux [#2785](https://github.com/paritytech/parity/pull/2785)
|
||||
- Update token images [#2804](https://github.com/paritytech/parity/pull/2804)
|
||||
- Hackergold token images [#2801](https://github.com/paritytech/parity/pull/2801)
|
||||
- Additional token images [#2800](https://github.com/paritytech/parity/pull/2800)
|
||||
- Additional token images [#2798](https://github.com/paritytech/parity/pull/2798)
|
||||
- Resolve morden fork [#2773](https://github.com/paritytech/parity/pull/2773)
|
||||
- Using SipHashes from crates.io [#2778](https://github.com/paritytech/parity/pull/2778)
|
||||
- Fixed issues on Searchable Addresses [#2790](https://github.com/paritytech/parity/pull/2790)
|
||||
- Currency icons [#2788](https://github.com/paritytech/parity/pull/2788)
|
||||
- Update token images [#2783](https://github.com/paritytech/parity/pull/2783)
|
||||
- Fix warning in master [#2775](https://github.com/paritytech/parity/pull/2775)
|
||||
- Add empty account existence test from beta. [#2769](https://github.com/paritytech/parity/pull/2769)
|
||||
- Update name of basiccoin manager [#2768](https://github.com/paritytech/parity/pull/2768)
|
||||
- sweep most unwraps from ethcore crate, dapps crate [#2762](https://github.com/paritytech/parity/pull/2762)
|
||||
- Check queue to determine major importing [#2763](https://github.com/paritytech/parity/pull/2763)
|
||||
- Trace filtering fix [#2760](https://github.com/paritytech/parity/pull/2760)
|
||||
- Update js precompiled to 20161020-141636 [#2761](https://github.com/paritytech/parity/pull/2761)
|
||||
- Incrementally calculate verification queue heap size [#2749](https://github.com/paritytech/parity/pull/2749)
|
||||
- Don't add empty accounts to bloom [#2753](https://github.com/paritytech/parity/pull/2753)
|
||||
- fix contract deployments not showing up [#2759](https://github.com/paritytech/parity/pull/2759)
|
||||
- Fixes a positioning issue in Address Selection component [#2754](https://github.com/paritytech/parity/pull/2754)
|
||||
- fix linting issues [#2758](https://github.com/paritytech/parity/pull/2758)
|
||||
- Making Trie.iter non-recursive [#2733](https://github.com/paritytech/parity/pull/2733)
|
||||
- Block import optimization [#2748](https://github.com/paritytech/parity/pull/2748)
|
||||
- Update js-precompiled to 20161020-110858 [#2752](https://github.com/paritytech/parity/pull/2752)
|
||||
- Fixing small files fetching [#2742](https://github.com/paritytech/parity/pull/2742)
|
||||
- Fixing stalled sync [#2747](https://github.com/paritytech/parity/pull/2747)
|
||||
- refactor signer components [#2691](https://github.com/paritytech/parity/pull/2691)
|
||||
- Png images with backgrounds (original svg) [#2740](https://github.com/paritytech/parity/pull/2740)
|
||||
- Make address selection searchable [#2739](https://github.com/paritytech/parity/pull/2739)
|
||||
- very basic dapp add/remove interface [#2721](https://github.com/paritytech/parity/pull/2721)
|
||||
- Frontport commits from beta to master [#2743](https://github.com/paritytech/parity/pull/2743)
|
||||
- Implements Trace API Formatter [#2732](https://github.com/paritytech/parity/pull/2732)
|
||||
- bump parking_lot to 0.3.x series [#2702](https://github.com/paritytech/parity/pull/2702)
|
||||
- Unify major syncing detection [#2699](https://github.com/paritytech/parity/pull/2699)
|
||||
- Fixes gas/gasPrice change not reflected in transaction modal [#2735](https://github.com/paritytech/parity/pull/2735)
|
||||
- Fixing build UI stuff along with Rust [#2726](https://github.com/paritytech/parity/pull/2726)
|
||||
- Fixed Snackbar not showing and/or behind transactions (#2730) [#2731](https://github.com/paritytech/parity/pull/2731)
|
||||
- Updating json tests to latest develop commit [#2728](https://github.com/paritytech/parity/pull/2728)
|
||||
- dapps: show errors [#2727](https://github.com/paritytech/parity/pull/2727)
|
||||
- node logs: break lines [#2722](https://github.com/paritytech/parity/pull/2722)
|
||||
- Bumping JSON-RPC http server [#2714](https://github.com/paritytech/parity/pull/2714)
|
||||
- Add ability to copy address to the clipboard [#2716](https://github.com/paritytech/parity/pull/2716)
|
||||
- Sort tags when displaying ; use AND for search results [#2720](https://github.com/paritytech/parity/pull/2720)
|
||||
- allow-same-origin for iframe [#2711](https://github.com/paritytech/parity/pull/2711)
|
||||
- Update Registry address (mainnet) [#2713](https://github.com/paritytech/parity/pull/2713)
|
||||
- Allow tags for Accounts, Addresses and Contracts [#2712](https://github.com/paritytech/parity/pull/2712)
|
||||
- Correct parameters for eth_sign [#2703](https://github.com/paritytech/parity/pull/2703)
|
||||
- Bump js-precompiled to 20161018-161705 [#2698](https://github.com/paritytech/parity/pull/2698)
|
||||
- Add inject.js (for web3 exposed) [#2692](https://github.com/paritytech/parity/pull/2692)
|
||||
- Remove obsolete dapps and update security headers [#2694](https://github.com/paritytech/parity/pull/2694)
|
||||
- Snapshot sync part 2 [#2098](https://github.com/paritytech/parity/pull/2098)
|
||||
- Fix issues with no ethereum test dir present (2382) [#2659](https://github.com/paritytech/parity/pull/2659)
|
||||
- Apply UI PRs after master merge [#2690](https://github.com/paritytech/parity/pull/2690)
|
||||
- Fix importing traces for non-canon blocks [#2683](https://github.com/paritytech/parity/pull/2683)
|
||||
- Fixing random test failures [#2577](https://github.com/paritytech/parity/pull/2577)
|
||||
- Disable IPC in default build for 1.4 [#2657](https://github.com/paritytech/parity/pull/2657)
|
||||
- use pruning history in CLI snapshots [#2658](https://github.com/paritytech/parity/pull/2658)
|
||||
- Fixing --no-default-features again and evmbin [#2670](https://github.com/paritytech/parity/pull/2670)
|
||||
- Settings > Proxy for proxy.pac setup instructions [#2678](https://github.com/paritytech/parity/pull/2678)
|
||||
- Re-instate transaitions to allow updating busy indicator [#2682](https://github.com/paritytech/parity/pull/2682)
|
||||
- signer: remove reject counter [#2685](https://github.com/paritytech/parity/pull/2685)
|
||||
- Initial new UI source code import [#2607](https://github.com/paritytech/parity/pull/2607)
|
||||
- Additional dapp logo images [#2677](https://github.com/paritytech/parity/pull/2677)
|
||||
- Redirect from :8080 to :8180 [#2676](https://github.com/paritytech/parity/pull/2676)
|
||||
- script to update js-precompiled [#2673](https://github.com/paritytech/parity/pull/2673)
|
||||
- Styling in FF is not 100% [#2669](https://github.com/paritytech/parity/pull/2669)
|
||||
- Don't allow gavcoin transfer with no balances [#2667](https://github.com/paritytech/parity/pull/2667)
|
||||
- fix signer rejections [#2666](https://github.com/paritytech/parity/pull/2666)
|
||||
- better text on unique background pattern [#2664](https://github.com/paritytech/parity/pull/2664)
|
||||
- Adjust z-index for error overlay [#2662](https://github.com/paritytech/parity/pull/2662)
|
||||
- Fix address selection for contract deployment [#2660](https://github.com/paritytech/parity/pull/2660)
|
||||
- Add additional contract images [#2655](https://github.com/paritytech/parity/pull/2655)
|
||||
- Update /api/* to point to :8080/api/* (first generation interface) [#2612](https://github.com/paritytech/parity/pull/2612)
|
||||
- Initial import of new UI (compiled JS code) [#2220](https://github.com/paritytech/parity/pull/2220)
|
||||
- Fixing evmbin compilation [#2652](https://github.com/paritytech/parity/pull/2652)
|
||||
- Fix up ETC EIP-150 transition to 2,500,000. [#2636](https://github.com/paritytech/parity/pull/2636)
|
||||
- Fixing compilation without default features [#2638](https://github.com/paritytech/parity/pull/2638)
|
||||
- [frontport] CLI to specify queue ordering strategy (#2494) [#2623](https://github.com/paritytech/parity/pull/2623)
|
||||
- Support for decryption in Signer [#2421](https://github.com/paritytech/parity/pull/2421)
|
||||
- EIP150.1c [#2591](https://github.com/paritytech/parity/pull/2591)
|
||||
- Release merge with origin with ours strategy [#2631](https://github.com/paritytech/parity/pull/2631)
|
||||
- Adjust build output directories [#2630](https://github.com/paritytech/parity/pull/2630)
|
||||
- cater for txhash returning null/empty object [#2629](https://github.com/paritytech/parity/pull/2629)
|
||||
- snapshot: single byte for empty accounts [#2625](https://github.com/paritytech/parity/pull/2625)
|
||||
- Configurable history size in master [#2606](https://github.com/paritytech/parity/pull/2606)
|
||||
- Database performance tweaks [#2619](https://github.com/paritytech/parity/pull/2619)
|
||||
- Enable suicide json test [#2626](https://github.com/paritytech/parity/pull/2626)
|
||||
- Split journaldb commit into two functions: journal_under and mark_canonical [#2329](https://github.com/paritytech/parity/pull/2329)
|
||||
- Fixed tx queue limit for local transactions [#2616](https://github.com/paritytech/parity/pull/2616)
|
||||
- Additional logs when transactions is removed from queue [#2617](https://github.com/paritytech/parity/pull/2617)
|
||||
- mitigate refcell conflict in state diffing [#2601](https://github.com/paritytech/parity/pull/2601)
|
||||
- Fix tests [#2611](https://github.com/paritytech/parity/pull/2611)
|
||||
- small styling updates [#2610](https://github.com/paritytech/parity/pull/2610)
|
||||
- Remove web3 from Signer, bring in parity.js API [#2604](https://github.com/paritytech/parity/pull/2604)
|
||||
- Mostly configurable canonical cache size [#2516](https://github.com/paritytech/parity/pull/2516)
|
||||
- Added peers details to ethcore_netPeers RPC [#2580](https://github.com/paritytech/parity/pull/2580)
|
||||
- Display account password hint when available [#2596](https://github.com/paritytech/parity/pull/2596)
|
||||
- Fix gas estimation on transfer when data supplied [#2593](https://github.com/paritytech/parity/pull/2593)
|
||||
- remove unused npm packages [#2590](https://github.com/paritytech/parity/pull/2590)
|
||||
- Bundle fonts as part of the build process [#2588](https://github.com/paritytech/parity/pull/2588)
|
||||
- Contract constructor params [#2586](https://github.com/paritytech/parity/pull/2586)
|
||||
- Update json test suite [#2574](https://github.com/paritytech/parity/pull/2574)
|
||||
- Filter apps that has been replaced for the local list [#2583](https://github.com/paritytech/parity/pull/2583)
|
||||
- Display local apps listed by Parity [#2581](https://github.com/paritytech/parity/pull/2581)
|
||||
- Network-specific nodes file [#2569](https://github.com/paritytech/parity/pull/2569)
|
||||
- Dont close when block is known to be invalid [#2572](https://github.com/paritytech/parity/pull/2572)
|
||||
- deny compiler warnings in CI [#2570](https://github.com/paritytech/parity/pull/2570)
|
||||
- adjust alignment of queries [#2573](https://github.com/paritytech/parity/pull/2573)
|
||||
- update ethcore-bigint crate to 0.1.1 [#2562](https://github.com/paritytech/parity/pull/2562)
|
||||
- Registry dapp uses setAddress to actually set addresses now [#2568](https://github.com/paritytech/parity/pull/2568)
|
||||
- Add the new EIP150 test. [#2563](https://github.com/paritytech/parity/pull/2563)
|
||||
- fix failing tests [#2567](https://github.com/paritytech/parity/pull/2567)
|
||||
- ΞTH -> ETH [#2566](https://github.com/paritytech/parity/pull/2566)
|
||||
- Ensure polling is only done when connected [#2565](https://github.com/paritytech/parity/pull/2565)
|
||||
- Fixed race condition in trace import [#2555](https://github.com/paritytech/parity/pull/2555)
|
||||
- Disable misbehaving peers while seeking for best block [#2537](https://github.com/paritytech/parity/pull/2537)
|
||||
- TX queue gas limit config and allow local transactions over the gas limit [#2553](https://github.com/paritytech/parity/pull/2553)
|
||||
- standard component for address -> name mappings (consistent use everywhere) [#2557](https://github.com/paritytech/parity/pull/2557)
|
||||
- Remove unwrap from client module [#2554](https://github.com/paritytech/parity/pull/2554)
|
||||
- Removing panickers from sync module [#2551](https://github.com/paritytech/parity/pull/2551)
|
||||
- Address images (tokens, dapps) as registered via contentHash (when available) [#2526](https://github.com/paritytech/parity/pull/2526)
|
||||
- TokenReg set & get images working [#2540](https://github.com/paritytech/parity/pull/2540)
|
||||
- adjust app_id where /api/content/<hash> is called, fixes #2541 [#2543](https://github.com/paritytech/parity/pull/2543)
|
||||
- connection dialog now shows up in dapps as well, closes #2538 [#2550](https://github.com/paritytech/parity/pull/2550)
|
||||
- display account uuid (where available), closes #2546 [#2549](https://github.com/paritytech/parity/pull/2549)
|
||||
- create accounts via recovery phrase [#2545](https://github.com/paritytech/parity/pull/2545)
|
||||
- Build ethcore/js-precompiled on GitLab [#2522](https://github.com/paritytech/parity/pull/2522)
|
||||
- Return errors from eth_call RPC [#2498](https://github.com/paritytech/parity/pull/2498)
|
||||
- registry dapp: manage records [#2323](https://github.com/paritytech/parity/pull/2323)
|
||||
- Print backtrace on panic [#2535](https://github.com/paritytech/parity/pull/2535)
|
||||
- GitHubHint dapp [#2531](https://github.com/paritytech/parity/pull/2531)
|
||||
- Backports to master [#2530](https://github.com/paritytech/parity/pull/2530)
|
||||
- Handle reorganizations in the state cache [#2490](https://github.com/paritytech/parity/pull/2490)
|
||||
- Hypervisor: terminate hanging modules [#2513](https://github.com/paritytech/parity/pull/2513)
|
||||
- signer & node connection prompts/indicators [#2504](https://github.com/paritytech/parity/pull/2504)
|
||||
- Using pending block only if is not old [#2514](https://github.com/paritytech/parity/pull/2514)
|
||||
- More caching optimizations [#2505](https://github.com/paritytech/parity/pull/2505)
|
||||
- Fixed possible panic in the networking [#2495](https://github.com/paritytech/parity/pull/2495)
|
||||
- Trim password from file [#2503](https://github.com/paritytech/parity/pull/2503)
|
||||
- Fixing RPC Filter conversion to EthFilter [#2500](https://github.com/paritytech/parity/pull/2500)
|
||||
- Fixing error message for transactions [#2496](https://github.com/paritytech/parity/pull/2496)
|
||||
- Adjustable stack size for EVM [#2483](https://github.com/paritytech/parity/pull/2483)
|
||||
- [master] Fixing penalization in future [#2499](https://github.com/paritytech/parity/pull/2499)
|
||||
- Preserve cache on reverting the snapshot [#2488](https://github.com/paritytech/parity/pull/2488)
|
||||
- RocksDB version bump [#2492](https://github.com/paritytech/parity/pull/2492)
|
||||
- Increase default size of transaction queue [#2489](https://github.com/paritytech/parity/pull/2489)
|
||||
- basiccoin v1 available [#2491](https://github.com/paritytech/parity/pull/2491)
|
||||
- Small EVM optimization [#2487](https://github.com/paritytech/parity/pull/2487)
|
||||
- Track dirty accounts in the state [#2461](https://github.com/paritytech/parity/pull/2461)
|
||||
- fix signature lookup address [#2480](https://github.com/paritytech/parity/pull/2480)
|
||||
- update registrar test with generic non-empty test [#2476](https://github.com/paritytech/parity/pull/2476)
|
||||
- Derive IPC interface only when ipc feature is on [#2463](https://github.com/paritytech/parity/pull/2463)
|
||||
- Fix ethstore opening all key files in the directory at once [#2471](https://github.com/paritytech/parity/pull/2471)
|
||||
- contract api event log fixes [#2469](https://github.com/paritytech/parity/pull/2469)
|
||||
- basiccoin base functionality in-place [#2468](https://github.com/paritytech/parity/pull/2468)
|
||||
- Merge IPC codegen attributes into one [#2460](https://github.com/paritytech/parity/pull/2460)
|
||||
- Close after importing keys from geth [#2464](https://github.com/paritytech/parity/pull/2464)
|
||||
- Port a couple more RPC APIs to the new auto args [#2325](https://github.com/paritytech/parity/pull/2325)
|
||||
- update rustc for appveyor to 1.12.0 [#2423](https://github.com/paritytech/parity/pull/2423)
|
||||
- dapp basiccoin send operations [#2456](https://github.com/paritytech/parity/pull/2456)
|
||||
- Better EVM informant & Slow transactions warning [#2436](https://github.com/paritytech/parity/pull/2436)
|
||||
- Fixing Signer token RPC API [#2437](https://github.com/paritytech/parity/pull/2437)
|
||||
- Fixed FatDB check [#2443](https://github.com/paritytech/parity/pull/2443)
|
||||
- dapp basiccoin structure [#2444](https://github.com/paritytech/parity/pull/2444)
|
||||
- Accounts bloom in master [#2426](https://github.com/paritytech/parity/pull/2426)
|
||||
- Polishing Actually enable fat db pr (#1974) [#2048](https://github.com/paritytech/parity/pull/2048)
|
||||
- Jumptable cache [#2427](https://github.com/paritytech/parity/pull/2427)
|
||||
- signaturereg registered, remove hardcoding [#2431](https://github.com/paritytech/parity/pull/2431)
|
||||
- tokenreg dapp fixes for non-null returns [#2430](https://github.com/paritytech/parity/pull/2430)
|
||||
- update ABI json to latest deployed versions [#2428](https://github.com/paritytech/parity/pull/2428)
|
||||
- update Morden registry address [#2417](https://github.com/paritytech/parity/pull/2417)
|
||||
- Make migration api more friendly [#2420](https://github.com/paritytech/parity/pull/2420)
|
||||
- Journaling bloom filter crate in util [#2395](https://github.com/paritytech/parity/pull/2395)
|
||||
- move abis from js/json to contracts/abi [#2418](https://github.com/paritytech/parity/pull/2418)
|
||||
- Fixing logs-receipt matching [#2403](https://github.com/paritytech/parity/pull/2403)
|
||||
- fix broken beta compilation [#2405](https://github.com/paritytech/parity/pull/2405)
|
||||
- registry dapp: transfer names [#2335](https://github.com/paritytech/parity/pull/2335)
|
||||
- manage firstRun better [#2398](https://github.com/paritytech/parity/pull/2398)
|
||||
- render contract deployment address [#2397](https://github.com/paritytech/parity/pull/2397)
|
||||
- Transaction Queue fix [#2392](https://github.com/paritytech/parity/pull/2392)
|
||||
- contracts abi types & execute value [#2394](https://github.com/paritytech/parity/pull/2394)
|
||||
- update styling with ParityBar overlay [#2390](https://github.com/paritytech/parity/pull/2390)
|
||||
- application Signer popup window [#2388](https://github.com/paritytech/parity/pull/2388)
|
||||
- Fixing Delegate Call in JIT [#2378](https://github.com/paritytech/parity/pull/2378)
|
||||
- Prioritizing re-imported transactions [#2372](https://github.com/paritytech/parity/pull/2372)
|
||||
- Revert #2172, pretty much. [#2387](https://github.com/paritytech/parity/pull/2387)
|
||||
- correct sync memory usage calculation [#2385](https://github.com/paritytech/parity/pull/2385)
|
||||
- fix migration system for post-consolidation migrations, better errors [#2334](https://github.com/paritytech/parity/pull/2334)
|
||||
- Fix the traceAddress field in transaction traces. [#2373](https://github.com/paritytech/parity/pull/2373)
|
||||
- Gavcoin utilises the popup box [#2381](https://github.com/paritytech/parity/pull/2381)
|
||||
- registry dapp: support dropping names [#2328](https://github.com/paritytech/parity/pull/2328)
|
||||
- settings view, set background & store views [#2380](https://github.com/paritytech/parity/pull/2380)
|
||||
- Removing extras data from retracted blocks. [#2375](https://github.com/paritytech/parity/pull/2375)
|
||||
- fixed #2263, geth keys with ciphertext shorter than 32 bytes [#2318](https://github.com/paritytech/parity/pull/2318)
|
||||
- Expanse compatibility [#2369](https://github.com/paritytech/parity/pull/2369)
|
||||
- Allow queries of constant functions on contracts [#2360](https://github.com/paritytech/parity/pull/2360)
|
||||
- Auto Open/Close the Signer window on new transaction request [#2362](https://github.com/paritytech/parity/pull/2362)
|
||||
- Specify column cache sizes explicitly; default fallback of 2MB [#2358](https://github.com/paritytech/parity/pull/2358)
|
||||
- Canonical state cache (master) [#2311](https://github.com/paritytech/parity/pull/2311)
|
||||
- method signature lookups, parameter decoding & management [#2313](https://github.com/paritytech/parity/pull/2313)
|
||||
- make block queue into a more generic verification queue and fix block heap size calculation [#2095](https://github.com/paritytech/parity/pull/2095)
|
||||
- Hash Content RPC method [#2355](https://github.com/paritytech/parity/pull/2355)
|
||||
- registry dapp: show reserved events by default [#2359](https://github.com/paritytech/parity/pull/2359)
|
||||
- Display timestamp in Signer requests details [#2324](https://github.com/paritytech/parity/pull/2324)
|
||||
- Reorder transaction_by_hash to favour canon search [#2332](https://github.com/paritytech/parity/pull/2332)
|
||||
- Optimize DIV for some common divisors [#2327](https://github.com/paritytech/parity/pull/2327)
|
||||
- Return error when deserializing invalid hex [#2339](https://github.com/paritytech/parity/pull/2339)
|
||||
- Changed http:// to https:// on some links [#2349](https://github.com/paritytech/parity/pull/2349)
|
||||
- user defaults [#2014](https://github.com/paritytech/parity/pull/2014)
|
||||
- Fixing jit feature compilation [#2310](https://github.com/paritytech/parity/pull/2310)
|
||||
- Tx Queue improvements [#2292](https://github.com/paritytech/parity/pull/2292)
|
||||
- Removing PropTypes on build [#2322](https://github.com/paritytech/parity/pull/2322)
|
||||
- Lenient bytes deserialization [#2036](https://github.com/paritytech/parity/pull/2036)
|
||||
- reverse call data decoding given transaction data & method [#2312](https://github.com/paritytech/parity/pull/2312)
|
||||
- add missing gpl headers to gavcoin dapp [#2317](https://github.com/paritytech/parity/pull/2317)
|
||||
- contract Events, Functions & Queries sub-components as well as Event log visual updates [#2306](https://github.com/paritytech/parity/pull/2306)
|
||||
- webpack config updates (really include babel-polyfill, rename npm steps) [#2305](https://github.com/paritytech/parity/pull/2305)
|
||||
- remove unneeded Form from Account header [#2302](https://github.com/paritytech/parity/pull/2302)
|
||||
- edit of metadata across accounts, addresses & contracts [#2300](https://github.com/paritytech/parity/pull/2300)
|
||||
- Adjust all modals for consistency & css DRY-ness [#2301](https://github.com/paritytech/parity/pull/2301)
|
||||
- update container spacing [#2296](https://github.com/paritytech/parity/pull/2296)
|
||||
- local cache of generated background (no allocation on each re-render) [#2298](https://github.com/paritytech/parity/pull/2298)
|
||||
- fix failing tests [#2290](https://github.com/paritytech/parity/pull/2290)
|
||||
- Respecting standards for tokenreg dapp [#2287](https://github.com/paritytech/parity/pull/2287)
|
||||
- Separate RPC serialization from implementation [#2072](https://github.com/paritytech/parity/pull/2072)
|
||||
- Webpack optimisations - Using DLL [#2264](https://github.com/paritytech/parity/pull/2264)
|
||||
- header background, theme adjustments (not that harsh) [#2273](https://github.com/paritytech/parity/pull/2273)
|
||||
- contract view (developer-centric) [#2259](https://github.com/paritytech/parity/pull/2259)
|
||||
- Add hash as CLI function [#1995](https://github.com/paritytech/parity/pull/1995)
|
||||
- registry: fix mined events showing as pending [#2267](https://github.com/paritytech/parity/pull/2267)
|
||||
- Dapp - Tokereg ; Query Tokens from TLA or Address [#2266](https://github.com/paritytech/parity/pull/2266)
|
||||
- Fixes to the Token Registration dApp [#2250](https://github.com/paritytech/parity/pull/2250)
|
||||
- remove abi *.json duplication, provide a single version of the truth [#2253](https://github.com/paritytech/parity/pull/2253)
|
||||
- Separate path for ext code size [#2251](https://github.com/paritytech/parity/pull/2251)
|
||||
- Snapshot format changes [#2234](https://github.com/paritytech/parity/pull/2234)
|
||||
- Serving content at /api/content/<hash> [#2248](https://github.com/paritytech/parity/pull/2248)
|
||||
- Fails when deserializing non-hex uints [#2247](https://github.com/paritytech/parity/pull/2247)
|
||||
- registry dapp: add GPL headers [#2252](https://github.com/paritytech/parity/pull/2252)
|
||||
- registry dapp: user-friendly lookup [#2229](https://github.com/paritytech/parity/pull/2229)
|
||||
- registry dapp: show DataChanged events [#2242](https://github.com/paritytech/parity/pull/2242)
|
||||
- fixups for deploys [#2249](https://github.com/paritytech/parity/pull/2249)
|
||||
- Fixing output of eth_call and Bytes deserialization [#2230](https://github.com/paritytech/parity/pull/2230)
|
||||
- Encryption, decryption and public key RPCs. [#1946](https://github.com/paritytech/parity/pull/1946)
|
||||
- limit number of event logs returned [#2231](https://github.com/paritytech/parity/pull/2231)
|
||||
- babel-polyfill [#2239](https://github.com/paritytech/parity/pull/2239)
|
||||
- procedurally generate background based on signer key [#2233](https://github.com/paritytech/parity/pull/2233)
|
||||
- UI fixes [#2238](https://github.com/paritytech/parity/pull/2238)
|
||||
- expose isConnected() from transport [#2225](https://github.com/paritytech/parity/pull/2225)
|
||||
- registry dapp: rename event log [#2227](https://github.com/paritytech/parity/pull/2227)
|
||||
- registry dapp: show pending events [#2223](https://github.com/paritytech/parity/pull/2223)
|
||||
- Handle RLP to string UTF-8 decoding errors [#2217](https://github.com/paritytech/parity/pull/2217)
|
||||
- Use WebSocket transport for all built-in calls [#2216](https://github.com/paritytech/parity/pull/2216)
|
||||
- Remove panickers from trie iterators [#2209](https://github.com/paritytech/parity/pull/2209)
|
||||
- Limit for logs filter. [#2180](https://github.com/paritytech/parity/pull/2180)
|
||||
- Various state copy optimizations [#2172](https://github.com/paritytech/parity/pull/2172)
|
||||
- New signer token RPC & Initial signer connection without token. [#2096](https://github.com/paritytech/parity/pull/2096)
|
||||
- signer ui fixes [#2219](https://github.com/paritytech/parity/pull/2219)
|
||||
- contract deploy ui [#2212](https://github.com/paritytech/parity/pull/2212)
|
||||
- registry dapp: fix propTypes [#2218](https://github.com/paritytech/parity/pull/2218)
|
||||
- registry: fix IdentityIcon in events log [#2206](https://github.com/paritytech/parity/pull/2206)
|
||||
- Fixing evm-debug [#2161](https://github.com/paritytech/parity/pull/2161)
|
||||
- Fix syncing with pv63 peers [#2204](https://github.com/paritytech/parity/pull/2204)
|
||||
- registry: show shortened hashes [#2205](https://github.com/paritytech/parity/pull/2205)
|
||||
- registry dapp: remove owner [#2203](https://github.com/paritytech/parity/pull/2203)
|
||||
- webpack proxy updates for /api* [#2175](https://github.com/paritytech/parity/pull/2175)
|
||||
- simplify personal event publishing, fix delete refresh issues [#2183](https://github.com/paritytech/parity/pull/2183)
|
||||
- fix global & initial states [#2160](https://github.com/paritytech/parity/pull/2160)
|
||||
- Allow selection & saving of available views [#2131](https://github.com/paritytech/parity/pull/2131)
|
||||
- global/contract events with promisy subscribe/unsubscribe [#2136](https://github.com/paritytech/parity/pull/2136)
|
||||
- Token Registry dApp [#2178](https://github.com/paritytech/parity/pull/2178)
|
||||
- re-usable bytesToHex exposed in api.util [#2174](https://github.com/paritytech/parity/pull/2174)
|
||||
- Webpack optimisations [#2179](https://github.com/paritytech/parity/pull/2179)
|
||||
- cleanup on contract event subscriptions [#2104](https://github.com/paritytech/parity/pull/2104)
|
||||
- move utility functions to api.util [#2105](https://github.com/paritytech/parity/pull/2105)
|
||||
- registry dapp [#2077](https://github.com/paritytech/parity/pull/2077)
|
||||
- mui/FlatButton to ui/Button [#2129](https://github.com/paritytech/parity/pull/2129)
|
||||
- address delete functionality [#2128](https://github.com/paritytech/parity/pull/2128)
|
||||
- contract deployment updates [#2106](https://github.com/paritytech/parity/pull/2106)
|
||||
- contract events, indexed string fix [#2108](https://github.com/paritytech/parity/pull/2108)
|
||||
- Bumping jsonrpc-core & jsonrpc-http-server [#2162](https://github.com/paritytech/parity/pull/2162)
|
||||
- gitlab testing & build processes [#2090](https://github.com/paritytech/parity/pull/2090)
|
||||
- Misc small UI fixes (recently broken) [#2101](https://github.com/paritytech/parity/pull/2101)
|
||||
- Bump clippy & Fix warnings [#2109](https://github.com/paritytech/parity/pull/2109)
|
||||
- Import command summary [#2102](https://github.com/paritytech/parity/pull/2102)
|
||||
- check for existence of deprecated ethash file before attempting delete [#2103](https://github.com/paritytech/parity/pull/2103)
|
||||
- shapeshift Promise API library [#2088](https://github.com/paritytech/parity/pull/2088)
|
||||
- fund account via ShapeShift [#2099](https://github.com/paritytech/parity/pull/2099)
|
||||
- Get bigint on crates.io [#2078](https://github.com/paritytech/parity/pull/2078)
|
||||
- Enable sealing if Engine provides internal sealing given author [#2084](https://github.com/paritytech/parity/pull/2084)
|
||||
- Config files [#2070](https://github.com/paritytech/parity/pull/2070)
|
||||
- re-add lodash plugin to babel config [#2092](https://github.com/paritytech/parity/pull/2092)
|
||||
- Remove old cache data [#2081](https://github.com/paritytech/parity/pull/2081)
|
||||
- Logs limit & log_index bug [#2073](https://github.com/paritytech/parity/pull/2073)
|
||||
- flatten store, muiTheme & api providers [#2087](https://github.com/paritytech/parity/pull/2087)
|
||||
- add babel es2016 & es2017 presets [#2083](https://github.com/paritytech/parity/pull/2083)
|
||||
- remove all '<name>/index' imports in API [#2089](https://github.com/paritytech/parity/pull/2089)
|
||||
- add missing GPL headers to all files [#2086](https://github.com/paritytech/parity/pull/2086)
|
||||
- readme cleanups [#2085](https://github.com/paritytech/parity/pull/2085)
|
||||
- gavcoin global import of parity api [#2082](https://github.com/paritytech/parity/pull/2082)
|
||||
- Fixing removal from gas price when moving future->current [#2076](https://github.com/paritytech/parity/pull/2076)
|
||||
- Split internal sealing from work preparation [#2071](https://github.com/paritytech/parity/pull/2071)
|
||||
- ensure the target folder doesn't exist before renaming [#2074](https://github.com/paritytech/parity/pull/2074)
|
||||
- Get rid of 'Dapp is being downloaded' page [#2055](https://github.com/paritytech/parity/pull/2055)
|
||||
- fix failing master build: update tests to new init_restore signature. [#2069](https://github.com/paritytech/parity/pull/2069)
|
||||
- Local snapshot restore [#2058](https://github.com/paritytech/parity/pull/2058)
|
||||
- import: keep informant going until finished [#2065](https://github.com/paritytech/parity/pull/2065)
|
||||
- Add a few tests for the snapshot service [#2059](https://github.com/paritytech/parity/pull/2059)
|
||||
- IPC tweaks [#2046](https://github.com/paritytech/parity/pull/2046)
|
||||
- Update arm* Docker [#2064](https://github.com/paritytech/parity/pull/2064)
|
||||
- Fetching any content-addressed content [#2050](https://github.com/paritytech/parity/pull/2050)
|
||||
- Use proper database configuration in snapshots. [#2052](https://github.com/paritytech/parity/pull/2052)
|
||||
- periodic snapshot tweaks [#2054](https://github.com/paritytech/parity/pull/2054)
|
||||
- ethkey-cli [#2057](https://github.com/paritytech/parity/pull/2057)
|
||||
- Forward ethstore-cli feature [#2056](https://github.com/paritytech/parity/pull/2056)
|
||||
- handling invalid spec jsons properly, additional tests, closes #1840 [#2049](https://github.com/paritytech/parity/pull/2049)
|
||||
- Periodic snapshots [#2044](https://github.com/paritytech/parity/pull/2044)
|
||||
- Snapshot sync [#2047](https://github.com/paritytech/parity/pull/2047)
|
||||
- Nice error pages for Dapps & Signer [#2033](https://github.com/paritytech/parity/pull/2033)
|
||||
- Add a few small snapshot tests [#2038](https://github.com/paritytech/parity/pull/2038)
|
||||
- facelift for traces, added errors [#2042](https://github.com/paritytech/parity/pull/2042)
|
||||
- Fetching content from HTTPS using `rustls` [#2024](https://github.com/paritytech/parity/pull/2024)
|
||||
- Skipping log when there are no transactions were sent [#2045](https://github.com/paritytech/parity/pull/2045)
|
||||
- rlp as separate crate [#2034](https://github.com/paritytech/parity/pull/2034)
|
||||
- Fixing uint serialization [#2037](https://github.com/paritytech/parity/pull/2037)
|
||||
- Fixing new transactions propagation [#2039](https://github.com/paritytech/parity/pull/2039)
|
||||
- Propagating transactions to peers on timer. [#2035](https://github.com/paritytech/parity/pull/2035)
|
||||
- Remove Populatable and BytesConvertable traits [#2019](https://github.com/paritytech/parity/pull/2019)
|
||||
- fixed #1933 [#1979](https://github.com/paritytech/parity/pull/1979)
|
||||
- Synchronization tweaks for IPC services [#2028](https://github.com/paritytech/parity/pull/2028)
|
||||
- Asynchronous RPC support [#2017](https://github.com/paritytech/parity/pull/2017)
|
||||
- Disable ArchiveDB counter check [#2016](https://github.com/paritytech/parity/pull/2016)
|
||||
- always process trie death row on commit, add more tracing [#2025](https://github.com/paritytech/parity/pull/2025)
|
||||
- fixed transaction addresses mapping, fixes #1971 [#2026](https://github.com/paritytech/parity/pull/2026)
|
||||
- Adding tests for dapps server. [#2021](https://github.com/paritytech/parity/pull/2021)
|
||||
- builtin trait refactoring [#2018](https://github.com/paritytech/parity/pull/2018)
|
||||
- Start parity with systemd [#1967](https://github.com/paritytech/parity/pull/1967)
|
||||
- Control service for IPC [#2013](https://github.com/paritytech/parity/pull/2013)
|
||||
- LRU cache for dapps [#2006](https://github.com/paritytech/parity/pull/2006)
|
||||
- CLI for valid hosts for dapps server [#2005](https://github.com/paritytech/parity/pull/2005)
|
||||
- Make the block header struct's internals private [#2000](https://github.com/paritytech/parity/pull/2000)
|
||||
- Take control of recovered snapshots, start restoration asynchronously [#2010](https://github.com/paritytech/parity/pull/2010)
|
||||
- remove internal locking from DBTransaction [#2003](https://github.com/paritytech/parity/pull/2003)
|
||||
- Snapshot optimizations [#1991](https://github.com/paritytech/parity/pull/1991)
|
||||
- Revert removing ecies [#2009](https://github.com/paritytech/parity/pull/2009)
|
||||
- small blooms optimization [#1998](https://github.com/paritytech/parity/pull/1998)
|
||||
- protection from adding empty traces && assertion in traces db [#1994](https://github.com/paritytech/parity/pull/1994)
|
||||
- Stratum IPC service [#1959](https://github.com/paritytech/parity/pull/1959)
|
||||
- Signature cleanup [#1921](https://github.com/paritytech/parity/pull/1921)
|
||||
- Fixed discovery skipping some nodes [#1996](https://github.com/paritytech/parity/pull/1996)
|
||||
- Trie query recording and AccountDB factory for no mangling [#1944](https://github.com/paritytech/parity/pull/1944)
|
||||
- Validating sha3 of a dapp bundle [#1993](https://github.com/paritytech/parity/pull/1993)
|
||||
- Improve eth_getWork timeout test rpc_get_work_should_timeout [#1992](https://github.com/paritytech/parity/pull/1992)
|
||||
- Resolving URLs from contract [#1964](https://github.com/paritytech/parity/pull/1964)
|
||||
- Add timeout for eth_getWork call [#1975](https://github.com/paritytech/parity/pull/1975)
|
||||
- CLI for Signer interface [#1980](https://github.com/paritytech/parity/pull/1980)
|
||||
- IPC timeout multiplied [#1990](https://github.com/paritytech/parity/pull/1990)
|
||||
- Use relative path for IPC sockets [#1983](https://github.com/paritytech/parity/pull/1983)
|
||||
- Market-orientated transaction pricing [#1963](https://github.com/paritytech/parity/pull/1963)
|
||||
- Bump clippy [#1982](https://github.com/paritytech/parity/pull/1982)
|
||||
- Fixing mutual recursive types serialization [#1977](https://github.com/paritytech/parity/pull/1977)
|
||||
- Fix open on FreeBSD [#1984](https://github.com/paritytech/parity/pull/1984)
|
||||
- Upgrade hyper dependency to 0.9 [#1973](https://github.com/paritytech/parity/pull/1973)
|
||||
- Create network-specific nodes files [#1970](https://github.com/paritytech/parity/pull/1970)
|
||||
- Getting rid of syntex [#1965](https://github.com/paritytech/parity/pull/1965)
|
||||
- Remove binary specification from hypervisor [#1960](https://github.com/paritytech/parity/pull/1960)
|
||||
- Stratum protocol general [#1954](https://github.com/paritytech/parity/pull/1954)
|
||||
- keep track of first block in blockchain [#1937](https://github.com/paritytech/parity/pull/1937)
|
||||
- introduce ethcore/state module [#1953](https://github.com/paritytech/parity/pull/1953)
|
||||
- Apply settings to column families [#1956](https://github.com/paritytech/parity/pull/1956)
|
||||
- move column family constants into db module [#1955](https://github.com/paritytech/parity/pull/1955)
|
||||
- ECIES without MAC [#1948](https://github.com/paritytech/parity/pull/1948)
|
||||
- Fix canny warnings [#1951](https://github.com/paritytech/parity/pull/1951)
|
||||
- Fetchable dapps [#1949](https://github.com/paritytech/parity/pull/1949)
|
||||
- remove impossible panickers related to infallible db transaction [#1947](https://github.com/paritytech/parity/pull/1947)
|
||||
- Minor optimizations [#1943](https://github.com/paritytech/parity/pull/1943)
|
||||
- remove randomness from bigint benches, fix warnings [#1945](https://github.com/paritytech/parity/pull/1945)
|
||||
- Fix several RPCs [#1926](https://github.com/paritytech/parity/pull/1926)
|
||||
- Bump clippy, fix warnings [#1939](https://github.com/paritytech/parity/pull/1939)
|
||||
- DB WAL size limit [#1935](https://github.com/paritytech/parity/pull/1935)
|
||||
- Use explicit global namespaces in codegen [#1928](https://github.com/paritytech/parity/pull/1928)
|
||||
- Fix build on master [#1934](https://github.com/paritytech/parity/pull/1934)
|
||||
- IPC on by default [#1927](https://github.com/paritytech/parity/pull/1927)
|
||||
- fix util benches compilation [#1931](https://github.com/paritytech/parity/pull/1931)
|
||||
- Update gitlab-ci [#1929](https://github.com/paritytech/parity/pull/1929)
|
||||
- ethkey and ethstore use hash structures from bigint [#1851](https://github.com/paritytech/parity/pull/1851)
|
||||
1015
docs/CHANGELOG-1.5.md
Normal file
1015
docs/CHANGELOG-1.5.md
Normal file
File diff suppressed because it is too large
Load Diff
614
docs/CHANGELOG-1.6.md
Normal file
614
docs/CHANGELOG-1.6.md
Normal file
@@ -0,0 +1,614 @@
|
||||
## Parity [v1.6.10](https://github.com/paritytech/parity/releases/tag/v1.6.10) (2017-07-25)
|
||||
|
||||
This is a hotfix release for the stable channel addressing the recent [multi-signature wallet vulnerability](https://blog.parity.io/security-alert-high-2/). Note, upgrading is not mandatory, and all future multi-sig wallets created by any version of Parity are secure.
|
||||
|
||||
All Changes:
|
||||
|
||||
- Backports for stable [#6116](https://github.com/paritytech/parity/pull/6116)
|
||||
- Remove chunk to restore from pending set only upon successful import [#6112](https://github.com/paritytech/parity/pull/6112)
|
||||
- Blacklist bad snapshot manifest hashes upon failure [#5874](https://github.com/paritytech/parity/pull/5874)
|
||||
- Bump snap version and tweak importing detection logic [#6079](https://github.com/paritytech/parity/pull/6079) (modified to work)
|
||||
- Fix docker build for stable [#6118](https://github.com/paritytech/parity/pull/6118)
|
||||
- Update wallet library binaries [#6108](https://github.com/paritytech/parity/pull/6108)
|
||||
- Backported wallet fix [#6104](https://github.com/paritytech/parity/pull/6104)
|
||||
- Fix initialisation bug. ([#6102](https://github.com/paritytech/parity/pull/6102))
|
||||
- Update wallet library modifiers ([#6103](https://github.com/paritytech/parity/pull/6103))
|
||||
- Bump to v1.6.10
|
||||
|
||||
## Parity [v1.6.9](https://github.com/paritytech/parity/releases/tag/v1.6.9) (2017-07-16)
|
||||
|
||||
This is a first stable release of 1.6 series. It contains a number of minor fixes and introduces the `--reseal-on-uncles` option for miners.
|
||||
|
||||
Full list of changes:
|
||||
|
||||
- Backports [#6061](https://github.com/paritytech/parity/pull/6061)
|
||||
- Ethereum Classic Monetary Policy [#5741](https://github.com/paritytech/parity/pull/5741)
|
||||
- Update rewards for uncle miners for ECIP1017
|
||||
- Fix an off-by-one error in ECIP1017 era calculation
|
||||
- `ecip1017_era_rounds` missing from EthashParams when run in build bot
|
||||
- strip out ecip1017_eras_block_reward function and add unit test
|
||||
- JS precompiled set to stable
|
||||
- Backports [#6060](https://github.com/paritytech/parity/pull/6060)
|
||||
- --reseal-on-uncle [#5940](https://github.com/paritytech/parity/pull/5940)
|
||||
- Optimized uncle check
|
||||
- Additional uncle check
|
||||
- Updated comment
|
||||
- Bump to v1.6.9
|
||||
- CLI: Export error message and less verbose peer counter. [#5870](https://github.com/paritytech/parity/pull/5870)
|
||||
- Removed numbed of active connections from informant
|
||||
- Print error message when fatdb is required
|
||||
- Remove peers from UI
|
||||
|
||||
## Parity [v1.6.8](https://github.com/paritytech/parity/releases/tag/v1.6.8) (2017-06-08)
|
||||
|
||||
This release addresses:
|
||||
|
||||
- a rare condition where quickly creating a new account was generating an account not matching the recovery phrase.
|
||||
- compressed RLP strings caused wrong/empty transaction receipts on Classic network.
|
||||
- blacklisting the _empty phrase_ account from UI and RPC on non-development chains. See also [this blog post](https://blog.parity.io/restoring-blank-seed-phrase/).
|
||||
- canceling transactions that didn't have a condition.
|
||||
- the updated Expanse fork block and chain ID.
|
||||
|
||||
Full changelog:
|
||||
|
||||
- Backporting to beta [#5791](https://github.com/paritytech/parity/pull/5791)
|
||||
- Bump to v1.6.8
|
||||
- Update expanse json with fork at block 600000 [#5351](https://github.com/paritytech/parity/pull/5351)
|
||||
- Update expanse json with fork at block 600000
|
||||
- Update exp chainID to 2
|
||||
- Bumped mio [#5763](https://github.com/paritytech/parity/pull/5763)
|
||||
- Fixed default UI port for mac installer [#5782](https://github.com/paritytech/parity/pull/5782)
|
||||
- Blacklist empty phrase account. [#5730](https://github.com/paritytech/parity/pull/5730)
|
||||
- Update Cid/multihash/ring/tinykeccak [#5785](https://github.com/paritytech/parity/pull/5785)
|
||||
- Updating ring,multihash,tiny-keccak
|
||||
- Updating CID in ipfs.
|
||||
- Disable compression for RLP strings [#5786](https://github.com/paritytech/parity/pull/5786)
|
||||
- Beta Backports [#5789](https://github.com/paritytech/parity/pull/5789)
|
||||
- Fix local transactions without condition. [#5716](https://github.com/paritytech/parity/pull/5716)
|
||||
- Block invalid account name creation [#5784](https://github.com/paritytech/parity/pull/5784)
|
||||
- Additional non-empty phrase check (fromNew)
|
||||
- Explicit canCreate check in create (not only on UI)
|
||||
- BN instance check (fixes Geth imports)
|
||||
- Fixup tests after better checks
|
||||
- Recover from empty phrase in dev mode [#5698](https://github.com/paritytech/parity/pull/5698)
|
||||
- Add dev chain to isTest
|
||||
- Fix signer
|
||||
- Fix no condition transactions
|
||||
- Fix case: old parity
|
||||
- Fix propTypes.
|
||||
|
||||
## Parity [v1.6.7](https://github.com/paritytech/parity/releases/tag/v1.6.7) (2017-05-18)
|
||||
|
||||
This release addresses:
|
||||
|
||||
- potential usability issues with [import and recovery of existing accounts](https://blog.parity.io/restoring-blank-seed-phrase/).
|
||||
- canceling scheduled transactions via RPC or UI.
|
||||
- warp sync issues with the Kovan network.
|
||||
|
||||
Full changelog:
|
||||
|
||||
- Backporting to beta [#5657](https://github.com/paritytech/parity/pull/5657)
|
||||
- Add CHANGELOG.md [#5513](https://github.com/paritytech/parity/pull/5513)
|
||||
- Reorg into blocks before minimum history [#5558](https://github.com/paritytech/parity/pull/5558)
|
||||
- Bump to v1.6.7
|
||||
- Cancel Transaction [#5656](https://github.com/paritytech/parity/pull/5656)
|
||||
- option to disable persistent txqueue [#5544](https://github.com/paritytech/parity/pull/5544)
|
||||
- Remove transaction RPC [#4949](https://github.com/paritytech/parity/pull/4949)
|
||||
- Cancel tx JS [#4958](https://github.com/paritytech/parity/pull/4958)
|
||||
- Updating documentation for RPCs [#5392](https://github.com/paritytech/parity/pull/5392)
|
||||
- Backport Recover button [#5654](https://github.com/paritytech/parity/pull/5654)
|
||||
- Backport [#5645](https://github.com/paritytech/parity/pull/5645)
|
||||
- Add monotonic step to Kovan [#5630](https://github.com/paritytech/parity/pull/5630)
|
||||
- Add monotonic transition to kovan [#5587](https://github.com/paritytech/parity/pull/5587)
|
||||
- Fix ethsign [#5600](https://github.com/paritytech/parity/pull/5600)
|
||||
- Registry backports [#5445](https://github.com/paritytech/parity/pull/5445)
|
||||
- Fixes to the Registry dapp [#4984](https://github.com/paritytech/parity/pull/4984)
|
||||
- Fix references to api outside of `parity.js` [#4981](https://github.com/paritytech/parity/pull/4981)
|
||||
|
||||
## Parity [v1.6.6](https://github.com/paritytech/parity/releases/tag/v1.6.6) (2017-04-11)
|
||||
|
||||
This release brings warp sync support for kovan network.
|
||||
|
||||
- Beta Backports [#5434](https://github.com/paritytech/parity/pull/5434)
|
||||
- Bump to v1.6.6
|
||||
- Strict validation transitions [#4988](https://github.com/paritytech/parity/pull/4988)
|
||||
- Ability to make validation stricter
|
||||
- Fix consensus
|
||||
- Remove logger
|
||||
- Fix eth_sign showing as wallet account [#5309](https://github.com/paritytech/parity/pull/5309)
|
||||
- DefaultProps for account
|
||||
- Pass signing account
|
||||
- Update tests for Connect(...)
|
||||
- Add new seed nodes [#5345](https://github.com/paritytech/parity/pull/5345)
|
||||
- Kovan warp sync fixed
|
||||
- Aura eip155 validation transition [#5363](https://github.com/paritytech/parity/pull/5363)
|
||||
- Add eip155 validation
|
||||
- Add transition block
|
||||
- Default eip155 validation [#5350](https://github.com/paritytech/parity/pull/5350)
|
||||
- Backport syntax libs update [#5316](https://github.com/paritytech/parity/pull/5316)
|
||||
|
||||
## Parity [v1.6.5](https://github.com/paritytech/parity/releases/tag/v1.6.5) (2017-03-28)
|
||||
|
||||
This release contains the following changes:
|
||||
|
||||
- Warp sync snapshot format improvements.
|
||||
- Fix for Firefox UI issues.
|
||||
- Fix for restoring from a file snapshot.
|
||||
- Fix for auto-updater error handling.
|
||||
- Updated configuration for [Ropsten revival](https://github.com/ethereum/ropsten/blob/master/revival.md). Make sure to delete old Ropsten blockchain first with `parity db kill --chain ropsten`. After that you can sync normally with `parity --chain ropsten`.
|
||||
|
||||
Full changes:
|
||||
|
||||
- Beta Backports [#5299](https://github.com/paritytech/parity/pull/5299)
|
||||
- Fix FireFox overflows [#5000](https://github.com/paritytech/parity/pull/5000)
|
||||
- Max width for container
|
||||
- Set min-width
|
||||
- Switching ValidatorSet [#4961](https://github.com/paritytech/parity/pull/4961)
|
||||
- Add multi validator set
|
||||
- Nicer comment
|
||||
- Validate in constructor
|
||||
- Reporting
|
||||
- Avoid clogging up tmp when updater dir has bad permissions. [#5024](https://github.com/paritytech/parity/pull/5024)
|
||||
- Force earliest era set in snapshot restore [#5021](https://github.com/paritytech/parity/pull/5021)
|
||||
- Bumb to v1.6.5
|
||||
- Fine grained snapshot chunking
|
||||
- Ropsten revival
|
||||
- Fix validator contract syncing [#4789](https://github.com/paritytech/parity/pull/4789) [#5011](https://github.com/paritytech/parity/pull/5011)
|
||||
- Make validator set aware of various states
|
||||
- Fix updater build
|
||||
- Clean up contract call
|
||||
- Failing sync test
|
||||
- Adjust tests
|
||||
- Nicer indent
|
||||
- Revert bound divisor
|
||||
|
||||
## Parity [v1.6.4](https://github.com/paritytech/parity/releases/tag/v1.6.4) (2017-03-22)
|
||||
|
||||
A number of issues fixed in this release:
|
||||
|
||||
- Ledger device connectivity issues for some users on Windows.
|
||||
- Improved vault usability.
|
||||
- Stratum mining no longer requires `--force-sealing`.
|
||||
- `evm` binary has been renamed to `parity-evm` to avoid conflict with cpp-ethereum package.
|
||||
|
||||
Full Changes:
|
||||
|
||||
- Backporting to beta [#4995](https://github.com/paritytech/parity/pull/4995)
|
||||
- Bump to v1.6.4
|
||||
- Ensure sealing work enabled if notifier registed
|
||||
- Fix condition check
|
||||
- Always send full chunks [#4960](https://github.com/paritytech/parity/pull/4960)
|
||||
- Bump nanomsg [#4965](https://github.com/paritytech/parity/pull/4965)
|
||||
- Renaming evm binary to avoid conflicts. [#4899](https://github.com/paritytech/parity/pull/4899)
|
||||
- Beta UI backports [#4993](https://github.com/paritytech/parity/pull/4993)
|
||||
- Update js-precompiled 20170314-121823
|
||||
- Attach hardware wallets already in addressbook [#4912](https://github.com/paritytech/parity/pull/4912)
|
||||
- Attach hardware wallets already in addressbook
|
||||
- Only set values changed
|
||||
- Add Vaults logic to First Run [#4894](https://github.com/paritytech/parity/issues/4894) [#4914](https://github.com/paritytech/parity/pull/4914)
|
||||
- Add ability to configure Secure API (for [#4885](https://github.com/paritytech/parity/issues/4885)) [#4922](https://github.com/paritytech/parity/pull/4922)
|
||||
- Add z-index to small modals as well [#4923](https://github.com/paritytech/parity/pull/4923)
|
||||
- Eth_sign where account === undefined [#4964](https://github.com/paritytech/parity/pull/4964)
|
||||
- Update for case where account === undefined
|
||||
- Update tests to not mask account === undefined
|
||||
- Default account = {} where undefined (thanks [@tomusdrw](https://github.com/tomusdrw))
|
||||
- Fix Password Dialog forms style issue [#4968](https://github.com/paritytech/parity/pull/4968)
|
||||
|
||||
## Parity [v1.6.3](https://github.com/paritytech/parity/releases/tag/v1.6.3) (2017-03-14)
|
||||
|
||||
This release fixes issue compatibility with Safari on MacOS.
|
||||
|
||||
- Safari fixes [#4902](https://github.com/paritytech/parity/pull/4902)
|
||||
- Add intitial max-width to sections
|
||||
- Move background z-index to -1
|
||||
|
||||
## Parity [v1.6.2](https://github.com/paritytech/parity/releases/tag/v1.6.2) (2017-03-13)
|
||||
|
||||
A major release introducing a few new features:
|
||||
|
||||
- Revamped UI.
|
||||
- Account Vaults.
|
||||
- Support for Ledger hardware wallet devices.
|
||||
- Stratum protocol for PoW mining.
|
||||
- A new MacOS installer. Parity for MacOS now includes a Menu Bar icon that allows controlling Parity service.
|
||||
- Disk backed transaction store. Pending transactions are now saved to disk and won't get lost when Parity is restarted.
|
||||
- Improved memory management.
|
||||
|
||||
See the [blog post](https://blog.parity.io/announcing-parity-1-6/) for more details.
|
||||
|
||||
Full Changes:
|
||||
|
||||
- Fix auto-updater beta [#4868](https://github.com/paritytech/parity/pull/4868)
|
||||
- Beta UI backports [#4855](https://github.com/paritytech/parity/pull/4855)
|
||||
- Added React Hot Reload to dapps + TokenDeplpoy fix ([#4846](https://github.com/paritytech/parity/pull/4846))
|
||||
- Fix method decoding ([#4845](https://github.com/paritytech/parity/pull/4845))
|
||||
- Fix contract deployment method decoding in Signer
|
||||
- Linting
|
||||
- Fix TxViewer when no `to` (contract deployment) ([#4847](https://github.com/paritytech/parity/pull/4847))
|
||||
- Added React Hot Reload to dapps + TokenDeplpoy fix
|
||||
- Fixes to the LocalTx dapp
|
||||
- Don't send the nonce for mined transactions
|
||||
- Don't encode empty to values for options
|
||||
- Pull steps from actual available steps ([#4848](https://github.com/paritytech/parity/pull/4848))
|
||||
- Wait for the value to have changed in the input ([#4844](https://github.com/paritytech/parity/pull/4844))
|
||||
- Backport Regsirty changes from [#4589](https://github.com/paritytech/parity/pull/4589)
|
||||
- Test fixes for [#4589](https://github.com/paritytech/parity/pull/4589)
|
||||
- Beta Simple score [#4852](https://github.com/paritytech/parity/pull/4852)
|
||||
- Simple score
|
||||
- Ignore part of a test
|
||||
- Backporting to beta [#4840](https://github.com/paritytech/parity/pull/4840)
|
||||
- Fixes to the Registry dapp ([#4838](https://github.com/paritytech/parity/pull/4838))
|
||||
- Fix wrong ABI methods
|
||||
- Fix comparison
|
||||
- Bump to v1.6.1
|
||||
- Show token icons on list summary pages ([#4826](https://github.com/paritytech/parity/pull/4826)) [#4827](https://github.com/paritytech/parity/pull/4827)
|
||||
- Adjust balance overlay margins (no jumps)
|
||||
- Img only balances, small verifications
|
||||
- Invalid tests removed
|
||||
- Always wrap display (Thanks [@ngotchac](https://github.com/ngotchac))
|
||||
- Update tests to reflect reality
|
||||
- Beta Engine backports [#4806](https://github.com/paritytech/parity/pull/4806)
|
||||
- Calibrate before rejection
|
||||
- Change flag name
|
||||
- Add eip155
|
||||
- Make network_id default
|
||||
- Beta UI backports [#4823](https://github.com/paritytech/parity/pull/4823)
|
||||
- Better logic for contract deployments ([#4821](https://github.com/paritytech/parity/pull/4821))
|
||||
- Beta UI backports [#4818](https://github.com/paritytech/parity/pull/4818)
|
||||
- Update the key ([#4817](https://github.com/paritytech/parity/pull/4817))
|
||||
- Adjust selection colours/display ([#4811](https://github.com/paritytech/parity/pull/4811))
|
||||
- Adjust selection colours to match with mui
|
||||
- allow -> disable (simplify selections)
|
||||
- Only use top-border
|
||||
- Overlay selection line
|
||||
- Slightly more muted unselected
|
||||
- Restore address icon
|
||||
- Fix default values for contract queries
|
||||
- Beta UI backports [#4809](https://github.com/paritytech/parity/pull/4809)
|
||||
- Update Wallet to new Wallet Code ([#4805](https://github.com/paritytech/parity/pull/4805))
|
||||
- Update Wallet Version
|
||||
- Update Wallet Library
|
||||
- Update Wallets Bytecodes
|
||||
- Typo
|
||||
- Separate Deploy in Contract API
|
||||
- Use the new Wallet ABI // Update wallet code
|
||||
- WIP .// Deploy from Wallet
|
||||
- Update Wallet contract
|
||||
- Contract Deployment for Wallet
|
||||
- Working deployments for Single Owned Wallet contracts
|
||||
- Linting
|
||||
- Create a Wallet from a Wallet
|
||||
- Linting
|
||||
- Fix Signer transactions // Add Gas Used for transactions
|
||||
- Deploy wallet contract fix
|
||||
- Fix too high gas estimate for Wallet Contract Deploys
|
||||
- Final piece ; deploying from Wallet owned by wallet
|
||||
- Update Wallet Code
|
||||
- Updated the Wallet Codes
|
||||
- Fixing Wallet Deployments
|
||||
- Add Support for older wallets
|
||||
- Linting
|
||||
- SMS Faucet ([#4774](https://github.com/paritytech/parity/pull/4774))
|
||||
- Faucet
|
||||
- Remove flakey button-index testing
|
||||
- Only display faucet when sms verified (mainnet)
|
||||
- Simplify availability checks
|
||||
- WIP
|
||||
- Resuest from verified -> verified
|
||||
- Update endpoint, display response text
|
||||
- Error icon on errors
|
||||
- Parse hash text response
|
||||
- Use /api/:address endpoint
|
||||
- Hash -> data
|
||||
- Adjust sms-certified message
|
||||
- Fix SectionList hovering issue ([#4749](https://github.com/paritytech/parity/pull/4749))
|
||||
- Fix SectionList Items hover when <3 items
|
||||
- Even easier...
|
||||
- Lint (new)
|
||||
- Update ETC bootnodes [#4794](https://github.com/paritytech/parity/pull/4794)
|
||||
- Update comments and reg ABI [#4787](https://github.com/paritytech/parity/pull/4787)
|
||||
- Optimize signature for fallback function. [#4780](https://github.com/paritytech/parity/pull/4780)
|
||||
- Rephrasing token generation screen. [#4777](https://github.com/paritytech/parity/pull/4777)
|
||||
- Etherscan links based on netVersion identifier [#4772](https://github.com/paritytech/parity/pull/4772)
|
||||
- Update README.md [#4762](https://github.com/paritytech/parity/pull/4762)
|
||||
- Fix invalid props to verification code [#4766](https://github.com/paritytech/parity/pull/4766)
|
||||
- Extend authority round consensus test [#4756](https://github.com/paritytech/parity/pull/4756)
|
||||
- Revert last hyper "fix" [#4752](https://github.com/paritytech/parity/pull/4752)
|
||||
- Vault Management UI (round 3) [#4652](https://github.com/paritytech/parity/pull/4652)
|
||||
- Update SelectionList indicators [#4736](https://github.com/paritytech/parity/pull/4736)
|
||||
- Update testnet detection [#4746](https://github.com/paritytech/parity/pull/4746)
|
||||
- Fix Portal in Portal ESC issue [#4745](https://github.com/paritytech/parity/pull/4745)
|
||||
- Update wiki [#4743](https://github.com/paritytech/parity/pull/4743)
|
||||
- Account selector close operations [#4728](https://github.com/paritytech/parity/pull/4728)
|
||||
- Fix Account Selection in Signer [#4744](https://github.com/paritytech/parity/pull/4744)
|
||||
- Support both V1 & V2 DataChanged events in registry [#4734](https://github.com/paritytech/parity/pull/4734)
|
||||
- Add info on forks. [#4733](https://github.com/paritytech/parity/pull/4733)
|
||||
- Add registry addr [#4732](https://github.com/paritytech/parity/pull/4732)
|
||||
- UI support for hardware wallets [#4539](https://github.com/paritytech/parity/pull/4539)
|
||||
- S/delete/forget/ for wallets [#4729](https://github.com/paritytech/parity/pull/4729)
|
||||
- New chains [#4720](https://github.com/paritytech/parity/pull/4720)
|
||||
- Enable --warp by default [#4719](https://github.com/paritytech/parity/pull/4719)
|
||||
- Update Uglify (fix to 2.8.2) to fix binary builds [#4723](https://github.com/paritytech/parity/pull/4723)
|
||||
- Extract i18n strings in modals/* [#4706](https://github.com/paritytech/parity/pull/4706)
|
||||
- Provide uncle size where available in RPC [#4713](https://github.com/paritytech/parity/pull/4713)
|
||||
- EC math functions [#4696](https://github.com/paritytech/parity/pull/4696)
|
||||
- Add registrar fields [#4716](https://github.com/paritytech/parity/pull/4716)
|
||||
- Extract i18n strings in views/* [#4695](https://github.com/paritytech/parity/pull/4695)
|
||||
- Removing network=disable from config files [#4715](https://github.com/paritytech/parity/pull/4715)
|
||||
- Fast in-place migration for adding and removing column families [#4687](https://github.com/paritytech/parity/pull/4687)
|
||||
- Display badges on summary view [#4689](https://github.com/paritytech/parity/pull/4689)
|
||||
- Consistent file uploads [#4699](https://github.com/paritytech/parity/pull/4699)
|
||||
- Rename https://mkr.market -> https://oasisdex.com [#4701](https://github.com/paritytech/parity/pull/4701)
|
||||
- Stop copy & clickthrough from list summaries [#4700](https://github.com/paritytech/parity/pull/4700)
|
||||
- Display ... for address summary overflows [#4691](https://github.com/paritytech/parity/pull/4691)
|
||||
- Less agressive grayscale/opacity in SelectionList [#4688](https://github.com/paritytech/parity/pull/4688)
|
||||
- Propagate trie errors upwards from State [#4655](https://github.com/paritytech/parity/pull/4655)
|
||||
- Generic state backend [#4632](https://github.com/paritytech/parity/pull/4632)
|
||||
- Enhance dialog layouts (round 1) [#4637](https://github.com/paritytech/parity/pull/4637)
|
||||
- Vault Management UI (round 2) [#4631](https://github.com/paritytech/parity/pull/4631)
|
||||
- Fix Portal broad event stopper [#4674](https://github.com/paritytech/parity/pull/4674)
|
||||
- Custom dev chain presets [#4671](https://github.com/paritytech/parity/pull/4671)
|
||||
- Max gas limit and min gas price [#4661](https://github.com/paritytech/parity/pull/4661)
|
||||
- Align list displays with SectionList (UI consistency) [#4621](https://github.com/paritytech/parity/pull/4621)
|
||||
- Add SelectionList component to DRY up [#4639](https://github.com/paritytech/parity/pull/4639)
|
||||
- I18n NL linting updates [#4662](https://github.com/paritytech/parity/pull/4662)
|
||||
- Misc. small UI fixes [#4657](https://github.com/paritytech/parity/pull/4657)
|
||||
- More CLI settings for IPFS API [#4608](https://github.com/paritytech/parity/pull/4608)
|
||||
- Fix Tendermint deadlock [#4654](https://github.com/paritytech/parity/pull/4654)
|
||||
- Nl translations [#4649](https://github.com/paritytech/parity/pull/4649)
|
||||
- Update transaction condition documentation [#4659](https://github.com/paritytech/parity/pull/4659)
|
||||
- Bump hyper versions [#4645](https://github.com/paritytech/parity/pull/4645)
|
||||
- Sane updater [#4658](https://github.com/paritytech/parity/pull/4658)
|
||||
- Remainder of RPC APIs implemented for the light client [#4594](https://github.com/paritytech/parity/pull/4594)
|
||||
- Preserve vault meta when changing pwd [#4650](https://github.com/paritytech/parity/pull/4650)
|
||||
- Fix Geth account import [#4641](https://github.com/paritytech/parity/pull/4641)
|
||||
- Tweak some checks. [#4633](https://github.com/paritytech/parity/pull/4633)
|
||||
- Attempt to fix subscribeToEvents test [#4638](https://github.com/paritytech/parity/pull/4638)
|
||||
- Fix selection value from RadioButtons [#4636](https://github.com/paritytech/parity/pull/4636)
|
||||
- Convert all remaining Modals to use Portal (UI consistency) [#4625](https://github.com/paritytech/parity/pull/4625)
|
||||
- Default account selection update [#4609](https://github.com/paritytech/parity/pull/4609)
|
||||
- Display ETH balance in overlay account selector [#4588](https://github.com/paritytech/parity/pull/4588)
|
||||
- Fixed minor grammar mistake in readme [#4627](https://github.com/paritytech/parity/pull/4627)
|
||||
- Extract newly available i18n strings [#4623](https://github.com/paritytech/parity/pull/4623)
|
||||
- Save pending local transactions in the database [#4566](https://github.com/paritytech/parity/pull/4566)
|
||||
- Bump CID version to allow compilation on all platforms [#4614](https://github.com/paritytech/parity/pull/4614)
|
||||
- Vault Management UI (first round) [#4446](https://github.com/paritytech/parity/pull/4446)
|
||||
- Let Engine decide if it seals internally [#4613](https://github.com/paritytech/parity/pull/4613)
|
||||
- Show only known accounts/wallets/addresses on Home [#4612](https://github.com/paritytech/parity/pull/4612)
|
||||
- Proper default accounts RPCs [#4580](https://github.com/paritytech/parity/pull/4580)
|
||||
- Hash-fetch errors in case upstream returns non-200 [#4599](https://github.com/paritytech/parity/pull/4599)
|
||||
- Added pending transaction info to eth_getTransactionByHash [#4570](https://github.com/paritytech/parity/pull/4570)
|
||||
- Secret store - initial version [#4567](https://github.com/paritytech/parity/pull/4567)
|
||||
- Handle invalid ABI retrieved from address_book gracefully [#4606](https://github.com/paritytech/parity/pull/4606)
|
||||
- Optimize key directory reloads [#4583](https://github.com/paritytech/parity/pull/4583)
|
||||
- Revert Double Click on Accounts to close in Signer Bar [#4590](https://github.com/paritytech/parity/pull/4590)
|
||||
- IPFS MVP [#4545](https://github.com/paritytech/parity/pull/4545)
|
||||
- Networking fixes [#4563](https://github.com/paritytech/parity/pull/4563)
|
||||
- Remove eth_compile* RPCs [#4577](https://github.com/paritytech/parity/pull/4577)
|
||||
- Ledger wallet signing fixed [#4578](https://github.com/paritytech/parity/pull/4578)
|
||||
- Remove vertx from Webpack config [#4576](https://github.com/paritytech/parity/pull/4576)
|
||||
- Better display of tags [#4564](https://github.com/paritytech/parity/pull/4564)
|
||||
- Added vaults support to `ethstore-cli` [#4532](https://github.com/paritytech/parity/pull/4532)
|
||||
- Fixed font URLs [#4579](https://github.com/paritytech/parity/pull/4579)
|
||||
- Explicitly set seconds to 0 from selector [#4559](https://github.com/paritytech/parity/pull/4559)
|
||||
- Fixes evmbin compilation and adding to standard build. [#4561](https://github.com/paritytech/parity/pull/4561)
|
||||
- Alias for personal_sendTransaction [#4554](https://github.com/paritytech/parity/pull/4554)
|
||||
- Key derivation in ethstore & rpc [#4515](https://github.com/paritytech/parity/pull/4515)
|
||||
- Skip OOG check for simple transfers [#4558](https://github.com/paritytech/parity/pull/4558)
|
||||
- Light Client transaction queue, initial LightDispatcher [#4501](https://github.com/paritytech/parity/pull/4501)
|
||||
- Fixes BadgeReg Middleware [#4556](https://github.com/paritytech/parity/pull/4556)
|
||||
- Fix pasting of value in Input fields [#4555](https://github.com/paritytech/parity/pull/4555)
|
||||
- Tooltips with react-intl [#4549](https://github.com/paritytech/parity/pull/4549)
|
||||
- Close on double-click for Signer Account selection [#4540](https://github.com/paritytech/parity/pull/4540)
|
||||
- Signer provenance [#4477](https://github.com/paritytech/parity/pull/4477)
|
||||
- Fix console dapp [#4544](https://github.com/paritytech/parity/pull/4544)
|
||||
- Extract i18n string into i18n/_defaults (base of translations) [#4514](https://github.com/paritytech/parity/pull/4514)
|
||||
- Fix contract queries bug [#4534](https://github.com/paritytech/parity/pull/4534)
|
||||
- Fixing namespace of couple methods in console. [#4538](https://github.com/paritytech/parity/pull/4538)
|
||||
- Home landing page [#4178](https://github.com/paritytech/parity/pull/4178)
|
||||
- Bump JSON RPC crates versions [#4530](https://github.com/paritytech/parity/pull/4530)
|
||||
- Update rust version in README [#4531](https://github.com/paritytech/parity/pull/4531)
|
||||
- Lower default pruning history and memory [#4528](https://github.com/paritytech/parity/pull/4528)
|
||||
- Serde 0.9 [#4508](https://github.com/paritytech/parity/pull/4508)
|
||||
- Fixes to Token Deploy dapp [#4513](https://github.com/paritytech/parity/pull/4513)
|
||||
- Fixed receipt decoding [#4521](https://github.com/paritytech/parity/pull/4521)
|
||||
- Several fixes to the Wallet in general [#4504](https://github.com/paritytech/parity/pull/4504)
|
||||
- Use the current contract name for Solidity compilation [#4510](https://github.com/paritytech/parity/pull/4510)
|
||||
- Preparation for Light client RPC [#4485](https://github.com/paritytech/parity/pull/4485)
|
||||
- Fix Dutch translation [#4509](https://github.com/paritytech/parity/pull/4509)
|
||||
- Fixed a warning and bumped libusb-sys [#4507](https://github.com/paritytech/parity/pull/4507)
|
||||
- Fix TnC overflows on small screens [#4505](https://github.com/paritytech/parity/pull/4505)
|
||||
- Fix no data sent in TxQueue dapp [#4502](https://github.com/paritytech/parity/pull/4502)
|
||||
- Ledger wallet support [#4486](https://github.com/paritytech/parity/pull/4486)
|
||||
- Add new Componennt for Token Images [#4498](https://github.com/paritytech/parity/pull/4498)
|
||||
- Fix address and accounts links [#4491](https://github.com/paritytech/parity/pull/4491)
|
||||
- Fix Token Reg Dapp issues in Firefox [#4489](https://github.com/paritytech/parity/pull/4489)
|
||||
- Parity.js interfaces for vaults [#4497](https://github.com/paritytech/parity/pull/4497)
|
||||
- Initial Dutch translations [#4484](https://github.com/paritytech/parity/pull/4484)
|
||||
- Fix key.meta.vault for root dir keys && read vault.meta without vault key [#4482](https://github.com/paritytech/parity/pull/4482)
|
||||
- Arbitrary labels for extended keys (u32, H256 built-in) [#4438](https://github.com/paritytech/parity/pull/4438)
|
||||
- Fix ethstore build [#4492](https://github.com/paritytech/parity/pull/4492)
|
||||
- Fixed compilation of ethstore-cli [#4493](https://github.com/paritytech/parity/pull/4493)
|
||||
- Build embedded Parity JS properly and separatly [#4426](https://github.com/paritytech/parity/pull/4426)
|
||||
- Static link for snappy [#4487](https://github.com/paritytech/parity/pull/4487)
|
||||
- Work with string numbers in contract (Fixes #4472) [#4478](https://github.com/paritytech/parity/pull/4478)
|
||||
- Metadata support for vaults [#4475](https://github.com/paritytech/parity/pull/4475)
|
||||
- Sort gas price corpus when hitting genesis [#4470](https://github.com/paritytech/parity/pull/4470)
|
||||
- Fixing CORS headers for parity.web3.site [#4461](https://github.com/paritytech/parity/pull/4461)
|
||||
- Make signing compatible with geth. [#4468](https://github.com/paritytech/parity/pull/4468)
|
||||
- Handle registry not found errors [#4465](https://github.com/paritytech/parity/pull/4465)
|
||||
- Fix Portal scrolling getting stuck [#4455](https://github.com/paritytech/parity/pull/4455)
|
||||
- Fix AccountCard stretch to 100% [#4450](https://github.com/paritytech/parity/pull/4450)
|
||||
- Include total difficulty in CHTs and hide implementation details from consumers [#4428](https://github.com/paritytech/parity/pull/4428)
|
||||
- Fix RLP encoding for types recursively calling `RlpStream::append` [#4362](https://github.com/paritytech/parity/pull/4362)
|
||||
- Open popup without attempting inline [#4440](https://github.com/paritytech/parity/pull/4440)
|
||||
- Fixing histogram again ([#4464](https://github.com/paritytech/parity/issues/4464)) port from beta [#4467](https://github.com/paritytech/parity/pull/4467)
|
||||
- Vaults RPCs [#4366](https://github.com/paritytech/parity/pull/4366)
|
||||
- Ethkey - extended keys [#4377](https://github.com/paritytech/parity/pull/4377)
|
||||
- Use secure websocket from HTTPS clients [#4436](https://github.com/paritytech/parity/pull/4436)
|
||||
- RPC middleware: Informant & Client.keep_alive [#4384](https://github.com/paritytech/parity/pull/4384)
|
||||
- Fix eth_sign/parity_postSign [#4432](https://github.com/paritytech/parity/pull/4432)
|
||||
- Web view with web3.site support [#4313](https://github.com/paritytech/parity/pull/4313)
|
||||
- Extend Portal component with title, buttons & steps (as per Modal) [#4392](https://github.com/paritytech/parity/pull/4392)
|
||||
- Extension installation overlay [#4423](https://github.com/paritytech/parity/pull/4423)
|
||||
- Add block & timestamp conditions to Signer [#4411](https://github.com/paritytech/parity/pull/4411)
|
||||
- Transaction timestamp condition [#4419](https://github.com/paritytech/parity/pull/4419)
|
||||
- Poll for defaultAccount to update dapp & overlay subscriptions [#4417](https://github.com/paritytech/parity/pull/4417)
|
||||
- Validate dapps accounts with address book [#4407](https://github.com/paritytech/parity/pull/4407)
|
||||
- Dapps use defaultAccount instead of own selectors [#4386](https://github.com/paritytech/parity/pull/4386)
|
||||
- Fix lock and rename tracing [#4403](https://github.com/paritytech/parity/pull/4403)
|
||||
- Restarting fetch client every now and then [#4399](https://github.com/paritytech/parity/pull/4399)
|
||||
- Perform a sync between Rust and JS when generating markdown instead of in spec tests [#4408](https://github.com/paritytech/parity/pull/4408)
|
||||
- Registry dapp: make lookup use lower case [#4409](https://github.com/paritytech/parity/pull/4409)
|
||||
- Available Dapp selection alignment with Permissions (Portal) [#4374](https://github.com/paritytech/parity/pull/4374)
|
||||
- More permissive verification process [#4317](https://github.com/paritytech/parity/pull/4317)
|
||||
- Fix ParityBar account selection overflows [#4405](https://github.com/paritytech/parity/pull/4405)
|
||||
- Mac binaries signing [#4397](https://github.com/paritytech/parity/pull/4397)
|
||||
- Revert "remove [ci skip]" [#4398](https://github.com/paritytech/parity/pull/4398)
|
||||
- Registry, s/a the owner/the owner/ [#4391](https://github.com/paritytech/parity/pull/4391)
|
||||
- Fixing invalid address in docs [#4388](https://github.com/paritytech/parity/pull/4388)
|
||||
- Remove [ci skip] [#4381](https://github.com/paritytech/parity/pull/4381)
|
||||
- Fixing estimate gas in case histogram is not available [#4387](https://github.com/paritytech/parity/pull/4387)
|
||||
- Default Account selector in Signer overlay [#4375](https://github.com/paritytech/parity/pull/4375)
|
||||
- Fixing web3 in console [#4382](https://github.com/paritytech/parity/pull/4382)
|
||||
- Add parity_defaultAccount RPC (with subscription) [#4383](https://github.com/paritytech/parity/pull/4383)
|
||||
- Full JSON-RPC docs + sync tests. [#4335](https://github.com/paritytech/parity/pull/4335)
|
||||
- Expose util as Api.util [#4372](https://github.com/paritytech/parity/pull/4372)
|
||||
- Dapp Account Selection & Defaults [#4355](https://github.com/paritytech/parity/pull/4355)
|
||||
- Publish @parity/jsonrpc [#4365](https://github.com/paritytech/parity/pull/4365)
|
||||
- Fix signing [#4363](https://github.com/paritytech/parity/pull/4363)
|
||||
- Fixing embedded bar not closing in chrome extension [#4367](https://github.com/paritytech/parity/pull/4367)
|
||||
- Update AccountCard for re-use [#4350](https://github.com/paritytech/parity/pull/4350)
|
||||
- Add proper event listener to Portal [#4359](https://github.com/paritytech/parity/pull/4359)
|
||||
- Optional from field in Transaction Requests [#4332](https://github.com/paritytech/parity/pull/4332)
|
||||
- Rust 1.14 in README [ci-skip] [#4361](https://github.com/paritytech/parity/pull/4361)
|
||||
- Fix JournalDB::earliest_era on empty database [#4316](https://github.com/paritytech/parity/pull/4316)
|
||||
- Fixed race condition deadlock on fetching enode URL [#4354](https://github.com/paritytech/parity/pull/4354)
|
||||
- Allow Portal to be used as top-level modal [#4338](https://github.com/paritytech/parity/pull/4338)
|
||||
- Fix postsign [#4347](https://github.com/paritytech/parity/pull/4347)
|
||||
- Renaming signAndSendTransaction to sendTransaction [#4351](https://github.com/paritytech/parity/pull/4351)
|
||||
- Add api.util.encodeMethodCall to parity.js [#4330](https://github.com/paritytech/parity/pull/4330)
|
||||
- Initial commit for vaults [#4312](https://github.com/paritytech/parity/pull/4312)
|
||||
- Returning default account as coinbase + allow altering sender in signer [#4323](https://github.com/paritytech/parity/pull/4323)
|
||||
- Persistent tracking of dapps [#4302](https://github.com/paritytech/parity/pull/4302)
|
||||
- Exposing all RPCs over dapps port as CLI option [#4346](https://github.com/paritytech/parity/pull/4346)
|
||||
- New macOS App [#4345](https://github.com/paritytech/parity/pull/4345)
|
||||
- Display QrCode for accounts, addresses & contracts [#4329](https://github.com/paritytech/parity/pull/4329)
|
||||
- Add QrCode & Copy to ShapeShift [#4322](https://github.com/paritytech/parity/pull/4322)
|
||||
- Parity.js api.parity.chainStatus should handle { blockGap: null } [#4327](https://github.com/paritytech/parity/pull/4327)
|
||||
- DeleteAccount & LoadContract modal updates [#4320](https://github.com/paritytech/parity/pull/4320)
|
||||
- Split Tab from TabBar [#4318](https://github.com/paritytech/parity/pull/4318)
|
||||
- Contracts interface expansion [#4307](https://github.com/paritytech/parity/pull/4307)
|
||||
- HistoryStore for tracking relevant routes [#4305](https://github.com/paritytech/parity/pull/4305)
|
||||
- Split Dapp icon into ui/DappIcon (re-use) [#4308](https://github.com/paritytech/parity/pull/4308)
|
||||
- Add a Playground for the UI Components [#4301](https://github.com/paritytech/parity/pull/4301)
|
||||
- Update CreateWallet with FormattedMessage [#4298](https://github.com/paritytech/parity/pull/4298)
|
||||
- Update dates for new PRs missed [#4306](https://github.com/paritytech/parity/pull/4306)
|
||||
- EIP-98: Optional transaction state root [#4296](https://github.com/paritytech/parity/pull/4296)
|
||||
- Fix whitespace [#4299](https://github.com/paritytech/parity/pull/4299)
|
||||
- Attempt to fix console. [#4294](https://github.com/paritytech/parity/pull/4294)
|
||||
- Ui/SectionList component [#4292](https://github.com/paritytech/parity/pull/4292)
|
||||
- Stratum up [#4233](https://github.com/paritytech/parity/pull/4233)
|
||||
- Logging transaction duration [#4297](https://github.com/paritytech/parity/pull/4297)
|
||||
- Generic engine utilities [#4258](https://github.com/paritytech/parity/pull/4258)
|
||||
- JSON-RPC interfaces with documentation [#4276](https://github.com/paritytech/parity/pull/4276)
|
||||
- Dont decode seal fields [#4263](https://github.com/paritytech/parity/pull/4263)
|
||||
- Skip misbehaving test until properly fixed [#4283](https://github.com/paritytech/parity/pull/4283)
|
||||
- Additional logs for own transactions [#4278](https://github.com/paritytech/parity/pull/4278)
|
||||
- Ensure write lock isn't held when calling handlers [#4285](https://github.com/paritytech/parity/pull/4285)
|
||||
- Feature selector [#4074](https://github.com/paritytech/parity/pull/4074)
|
||||
- AccountCreate updates [#3988](https://github.com/paritytech/parity/pull/3988)
|
||||
- Extended JS interface -> Markdown generator [#4275](https://github.com/paritytech/parity/pull/4275)
|
||||
- Added 3 warpnodes for ropsten [#4289](https://github.com/paritytech/parity/pull/4289)
|
||||
- Ledger Communication JS toolkit [#4268](https://github.com/paritytech/parity/pull/4268)
|
||||
- ValidatorSet reporting [#4208](https://github.com/paritytech/parity/pull/4208)
|
||||
- Add support for api.subscribe('parity_accountsInfo') [#4273](https://github.com/paritytech/parity/pull/4273)
|
||||
- Display AccountCard name via IdentityName [#4235](https://github.com/paritytech/parity/pull/4235)
|
||||
- Dapp visibility save/load tests [#4150](https://github.com/paritytech/parity/pull/4150)
|
||||
- Fix wrong output format of peers [#4270](https://github.com/paritytech/parity/pull/4270)
|
||||
- Chain scoring [#4218](https://github.com/paritytech/parity/pull/4218)
|
||||
- Rust 1.14 for windows builds [#4269](https://github.com/paritytech/parity/pull/4269)
|
||||
- Eslint formatting updates [#4234](https://github.com/paritytech/parity/pull/4234)
|
||||
- Embeddable ParityBar [#4222](https://github.com/paritytech/parity/pull/4222)
|
||||
- Update deb-build.sh to fix libssl dependency [#4260](https://github.com/paritytech/parity/pull/4260)
|
||||
- Integration with zgp whitelist contract [#4215](https://github.com/paritytech/parity/pull/4215)
|
||||
- Adjust the location of the signer snippet [#4155](https://github.com/paritytech/parity/pull/4155)
|
||||
- Fix wrong token handling [#4254](https://github.com/paritytech/parity/pull/4254)
|
||||
- Additional building-block UI components [#4239](https://github.com/paritytech/parity/pull/4239)
|
||||
- Bump package.json to 0.3.0 (1.6 track) [#4244](https://github.com/paritytech/parity/pull/4244)
|
||||
- Disable incoming ETH notifications [#4243](https://github.com/paritytech/parity/pull/4243)
|
||||
- Memory-based pruning history size [#4114](https://github.com/paritytech/parity/pull/4114)
|
||||
- Common EngineSigner [#4189](https://github.com/paritytech/parity/pull/4189)
|
||||
- Verification: don't request a code twice [#4221](https://github.com/paritytech/parity/pull/4221)
|
||||
- S/Delete Contract/Forget Contract/ [#4237](https://github.com/paritytech/parity/pull/4237)
|
||||
- Light protocol syncing improvements [#4212](https://github.com/paritytech/parity/pull/4212)
|
||||
- LES Peer Info [#4195](https://github.com/paritytech/parity/pull/4195)
|
||||
- Don't panic on uknown git commit hash [#4231](https://github.com/paritytech/parity/pull/4231)
|
||||
- Cache registry reverses in local storage [#4182](https://github.com/paritytech/parity/pull/4182)
|
||||
- Update version numbers in README [#4223](https://github.com/paritytech/parity/pull/4223)
|
||||
- CHT calculations for full nodes [#4181](https://github.com/paritytech/parity/pull/4181)
|
||||
- Use single source of info for dapp meta (build & display) [#4217](https://github.com/paritytech/parity/pull/4217)
|
||||
- Non-secure API for DappReg [#4216](https://github.com/paritytech/parity/pull/4216)
|
||||
- Console now has admin [#4220](https://github.com/paritytech/parity/pull/4220)
|
||||
- Verification: add mainnet BadgeReg ids [#4190](https://github.com/paritytech/parity/pull/4190)
|
||||
- Fixing minimal transaction queue price [#4204](https://github.com/paritytech/parity/pull/4204)
|
||||
- Remove unnecessary Engine method [#4184](https://github.com/paritytech/parity/pull/4184)
|
||||
- Fixed --base-path on windows [#4193](https://github.com/paritytech/parity/pull/4193)
|
||||
- Fixing etherscan price parsing [#4202](https://github.com/paritytech/parity/pull/4202)
|
||||
- LES: Better timeouts + Track failed requests [#4093](https://github.com/paritytech/parity/pull/4093)
|
||||
- ESLint additional rules [#4186](https://github.com/paritytech/parity/pull/4186)
|
||||
- JsonRPC bump for IPC fix [#4200](https://github.com/paritytech/parity/pull/4200)
|
||||
- Poll for upgrades as part of global status (long) [#4197](https://github.com/paritytech/parity/pull/4197)
|
||||
- Updater fixes [#4196](https://github.com/paritytech/parity/pull/4196)
|
||||
- Prevent duplicate incoming connections [#4180](https://github.com/paritytech/parity/pull/4180)
|
||||
- Minor typo to ensure it updates only when synced. [#4188](https://github.com/paritytech/parity/pull/4188)
|
||||
- Minor refactor for clarity [#4174](https://github.com/paritytech/parity/pull/4174)
|
||||
- Secret - from hash function, also validate data [#4159](https://github.com/paritytech/parity/pull/4159)
|
||||
- Gas_limit for blocks, mined by Parity will be divisible by 37 [#4154](https://github.com/paritytech/parity/pull/4154)
|
||||
- Support HTML5-routed dapps [#4173](https://github.com/paritytech/parity/pull/4173)
|
||||
- Fix subscribeToEvents test [#4166](https://github.com/paritytech/parity/pull/4166)
|
||||
- Fix dapps not loading [#4170](https://github.com/paritytech/parity/pull/4170)
|
||||
- Fix broken token images [#4169](https://github.com/paritytech/parity/pull/4169)
|
||||
- Bumping hyper [#4167](https://github.com/paritytech/parity/pull/4167)
|
||||
- Icarus -> update, increase web timeout. [#4165](https://github.com/paritytech/parity/pull/4165)
|
||||
- Add a password strength component [#4153](https://github.com/paritytech/parity/pull/4153)
|
||||
- Stop flickering + added loader in AddressSelector [#4149](https://github.com/paritytech/parity/pull/4149)
|
||||
- On demand LES request [#4036](https://github.com/paritytech/parity/pull/4036)
|
||||
- Ropsten fork detection [#4163](https://github.com/paritytech/parity/pull/4163)
|
||||
- Pull in console dapp as builtin [#4145](https://github.com/paritytech/parity/pull/4145)
|
||||
- Optimized hash lookups [#4144](https://github.com/paritytech/parity/pull/4144)
|
||||
- UnverifiedTransaction type [#4134](https://github.com/paritytech/parity/pull/4134)
|
||||
- Verification: check if server is running [#4140](https://github.com/paritytech/parity/pull/4140)
|
||||
- Remove onSubmit of current (no auto-change on password edit) [#4151](https://github.com/paritytech/parity/pull/4151)
|
||||
- Trim spaces from InputAddress [#4126](https://github.com/paritytech/parity/pull/4126)
|
||||
- Don't pop-up notifications after network switch [#4076](https://github.com/paritytech/parity/pull/4076)
|
||||
- Use estimateGas error (as per updated implementation) [#4131](https://github.com/paritytech/parity/pull/4131)
|
||||
- Improvements and optimisations to estimate_gas [#4142](https://github.com/paritytech/parity/pull/4142)
|
||||
- New jsonrpc-core with futures and metadata support [#3859](https://github.com/paritytech/parity/pull/3859)
|
||||
- Reenable mainnet update server. [#4137](https://github.com/paritytech/parity/pull/4137)
|
||||
- Temporarily skip failing test [#4138](https://github.com/paritytech/parity/pull/4138)
|
||||
- Refactor VoteCollector [#4101](https://github.com/paritytech/parity/pull/4101)
|
||||
- Another minor estimation fix [#4133](https://github.com/paritytech/parity/pull/4133)
|
||||
- Add proper label to method decoding inputs [#4136](https://github.com/paritytech/parity/pull/4136)
|
||||
- Remove bindActionCreators({}, dispatch) (empty, unneeded) [#4135](https://github.com/paritytech/parity/pull/4135)
|
||||
- Better contract error log reporting & handling [#4128](https://github.com/paritytech/parity/pull/4128)
|
||||
- Fix broken Transfer : total account balance [#4127](https://github.com/paritytech/parity/pull/4127)
|
||||
- Test harness for lightsync [#4109](https://github.com/paritytech/parity/pull/4109)
|
||||
- Fix call/estimate_gas [#4121](https://github.com/paritytech/parity/pull/4121)
|
||||
- Fixing decoding ABI with signatures in names [#4125](https://github.com/paritytech/parity/pull/4125)
|
||||
- Get rid of unsafe code in ethkey, propagate incorrect Secret errors. [#4119](https://github.com/paritytech/parity/pull/4119)
|
||||
- Basic tests for subscribeToEvents [#4115](https://github.com/paritytech/parity/pull/4115)
|
||||
- Auto-detect hex encoded bytes in sha3 [#4108](https://github.com/paritytech/parity/pull/4108)
|
||||
- Use binary chop to estimate gas accurately [#4100](https://github.com/paritytech/parity/pull/4100)
|
||||
- V1.6 in master [#4113](https://github.com/paritytech/parity/pull/4113)
|
||||
- Ignore get_price_info test by default. [#4112](https://github.com/paritytech/parity/pull/4112)
|
||||
- Fix wrong information logging [#4106](https://github.com/paritytech/parity/pull/4106)
|
||||
- Avoid comms with not-yet-active release update server. [#4111](https://github.com/paritytech/parity/pull/4111)
|
||||
- Update Transfer logic + Better logging [#4098](https://github.com/paritytech/parity/pull/4098)
|
||||
- Fix Signer : wrong account on reload [#4104](https://github.com/paritytech/parity/pull/4104)
|
||||
- Cache registry reverses, completion in address selector [#4066](https://github.com/paritytech/parity/pull/4066)
|
||||
- Validator/authority contract [#3937](https://github.com/paritytech/parity/pull/3937)
|
||||
- No reorg limit for ancient blocks [#4099](https://github.com/paritytech/parity/pull/4099)
|
||||
- Update registration after every write [#4102](https://github.com/paritytech/parity/pull/4102)
|
||||
- Default to no auto-update. [#4092](https://github.com/paritytech/parity/pull/4092)
|
||||
- Don't remove out of date local transactions [#4094](https://github.com/paritytech/parity/pull/4094)
|
||||
@@ -1,12 +1,18 @@
|
||||
[package]
|
||||
name = "ethash"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
log = "0.3"
|
||||
sha3 = { path = "../util/sha3" }
|
||||
hash = { path = "../util/hash" }
|
||||
primal = "0.2.3"
|
||||
parking_lot = "0.4"
|
||||
crunchy = "0.1.0"
|
||||
memmap = "0.5.2"
|
||||
either = "1.0.0"
|
||||
|
||||
[features]
|
||||
benches = []
|
||||
|
||||
352
ethash/src/cache.rs
Normal file
352
ethash/src/cache.rs
Normal file
@@ -0,0 +1,352 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use compute::Light;
|
||||
use either::Either;
|
||||
use keccak::{H256, keccak_512};
|
||||
use memmap::{Mmap, Protection};
|
||||
use parking_lot::Mutex;
|
||||
use seed_compute::SeedHashCompute;
|
||||
|
||||
use shared::{ETHASH_CACHE_ROUNDS, NODE_BYTES, NODE_DWORDS, Node, epoch, get_cache_size, to_hex};
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::fs;
|
||||
use std::io::{self, Read, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::slice;
|
||||
use std::sync::Arc;
|
||||
|
||||
type Cache = Either<Vec<Node>, Mmap>;
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
|
||||
pub enum OptimizeFor {
|
||||
Cpu,
|
||||
Memory,
|
||||
}
|
||||
|
||||
impl Default for OptimizeFor {
|
||||
fn default() -> Self {
|
||||
OptimizeFor::Cpu
|
||||
}
|
||||
}
|
||||
|
||||
fn byte_size(cache: &Cache) -> usize {
|
||||
use self::Either::{Left, Right};
|
||||
|
||||
match *cache {
|
||||
Left(ref vec) => vec.len() * NODE_BYTES,
|
||||
Right(ref mmap) => mmap.len(),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_buffer(path: &Path, num_nodes: usize, ident: &H256, optimize_for: OptimizeFor) -> Cache {
|
||||
let memmap = match optimize_for {
|
||||
OptimizeFor::Cpu => None,
|
||||
OptimizeFor::Memory => make_memmapped_cache(path, num_nodes, ident).ok(),
|
||||
};
|
||||
|
||||
memmap.map(Either::Right).unwrap_or_else(|| {
|
||||
Either::Left(make_memory_cache(num_nodes, ident))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NodeCacheBuilder {
|
||||
// TODO: Remove this locking and just use an `Rc`?
|
||||
seedhash: Arc<Mutex<SeedHashCompute>>,
|
||||
optimize_for: OptimizeFor,
|
||||
}
|
||||
|
||||
// TODO: Abstract the "optimize for" logic
|
||||
pub struct NodeCache {
|
||||
builder: NodeCacheBuilder,
|
||||
cache_dir: Cow<'static, Path>,
|
||||
cache_path: PathBuf,
|
||||
epoch: u64,
|
||||
cache: Cache,
|
||||
}
|
||||
|
||||
impl NodeCacheBuilder {
|
||||
pub fn light(&self, cache_dir: &Path, block_number: u64) -> Light {
|
||||
Light::new_with_builder(self, cache_dir, block_number)
|
||||
}
|
||||
|
||||
pub fn light_from_file(&self, cache_dir: &Path, block_number: u64) -> io::Result<Light> {
|
||||
Light::from_file_with_builder(self, cache_dir, block_number)
|
||||
}
|
||||
|
||||
pub fn new<T: Into<Option<OptimizeFor>>>(optimize_for: T) -> Self {
|
||||
NodeCacheBuilder {
|
||||
seedhash: Arc::new(Mutex::new(SeedHashCompute::new())),
|
||||
optimize_for: optimize_for.into().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn block_number_to_ident(&self, block_number: u64) -> H256 {
|
||||
self.seedhash.lock().hash_block_number(block_number)
|
||||
}
|
||||
|
||||
fn epoch_to_ident(&self, epoch: u64) -> H256 {
|
||||
self.seedhash.lock().hash_epoch(epoch)
|
||||
}
|
||||
|
||||
pub fn from_file<P: Into<Cow<'static, Path>>>(
|
||||
&self,
|
||||
cache_dir: P,
|
||||
block_number: u64,
|
||||
) -> io::Result<NodeCache> {
|
||||
let cache_dir = cache_dir.into();
|
||||
let ident = self.block_number_to_ident(block_number);
|
||||
|
||||
let path = cache_path(cache_dir.as_ref(), &ident);
|
||||
|
||||
let cache = cache_from_path(&path, self.optimize_for)?;
|
||||
let expected_cache_size = get_cache_size(block_number);
|
||||
|
||||
if byte_size(&cache) == expected_cache_size {
|
||||
Ok(NodeCache {
|
||||
builder: self.clone(),
|
||||
epoch: epoch(block_number),
|
||||
cache_dir: cache_dir,
|
||||
cache_path: path,
|
||||
cache: cache,
|
||||
})
|
||||
} else {
|
||||
Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
"Node cache is of incorrect size",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_cache<P: Into<Cow<'static, Path>>>(
|
||||
&self,
|
||||
cache_dir: P,
|
||||
block_number: u64,
|
||||
) -> NodeCache {
|
||||
let cache_dir = cache_dir.into();
|
||||
let ident = self.block_number_to_ident(block_number);
|
||||
|
||||
let cache_size = get_cache_size(block_number);
|
||||
|
||||
// We use `debug_assert` since it is impossible for `get_cache_size` to return an unaligned
|
||||
// value with the current implementation. If the implementation changes, CI will catch it.
|
||||
debug_assert!(cache_size % NODE_BYTES == 0, "Unaligned cache size");
|
||||
let num_nodes = cache_size / NODE_BYTES;
|
||||
|
||||
let path = cache_path(cache_dir.as_ref(), &ident);
|
||||
let nodes = new_buffer(&path, num_nodes, &ident, self.optimize_for);
|
||||
|
||||
NodeCache {
|
||||
builder: self.clone(),
|
||||
epoch: epoch(block_number),
|
||||
cache_dir: cache_dir.into(),
|
||||
cache_path: path,
|
||||
cache: nodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeCache {
|
||||
pub fn cache_path(&self) -> &Path {
|
||||
&self.cache_path
|
||||
}
|
||||
|
||||
pub fn flush(&mut self) -> io::Result<()> {
|
||||
if let Some(last) = self.epoch.checked_sub(2).map(|ep| {
|
||||
cache_path(self.cache_dir.as_ref(), &self.builder.epoch_to_ident(ep))
|
||||
})
|
||||
{
|
||||
fs::remove_file(last).unwrap_or_else(|error| match error.kind() {
|
||||
io::ErrorKind::NotFound => (),
|
||||
_ => warn!("Error removing stale DAG cache: {:?}", error),
|
||||
});
|
||||
}
|
||||
|
||||
consume_cache(&mut self.cache, &self.cache_path)
|
||||
}
|
||||
}
|
||||
|
||||
fn make_memmapped_cache(path: &Path, num_nodes: usize, ident: &H256) -> io::Result<Mmap> {
|
||||
use std::fs::OpenOptions;
|
||||
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(&path)?;
|
||||
file.set_len((num_nodes * NODE_BYTES) as _)?;
|
||||
|
||||
let mut memmap = Mmap::open(&file, Protection::ReadWrite)?;
|
||||
|
||||
unsafe { initialize_memory(memmap.mut_ptr() as *mut Node, num_nodes, ident) };
|
||||
|
||||
Ok(memmap)
|
||||
}
|
||||
|
||||
fn make_memory_cache(num_nodes: usize, ident: &H256) -> Vec<Node> {
|
||||
let mut nodes: Vec<Node> = Vec::with_capacity(num_nodes);
|
||||
// Use uninit instead of unnecessarily writing `size_of::<Node>() * num_nodes` 0s
|
||||
unsafe {
|
||||
initialize_memory(nodes.as_mut_ptr(), num_nodes, ident);
|
||||
nodes.set_len(num_nodes);
|
||||
}
|
||||
|
||||
nodes
|
||||
}
|
||||
|
||||
fn cache_path<'a, P: Into<Cow<'a, Path>>>(path: P, ident: &H256) -> PathBuf {
|
||||
let mut buf = path.into().into_owned();
|
||||
buf.push(to_hex(ident));
|
||||
buf
|
||||
}
|
||||
|
||||
fn consume_cache(cache: &mut Cache, path: &Path) -> io::Result<()> {
|
||||
use std::fs::OpenOptions;
|
||||
|
||||
match *cache {
|
||||
Either::Left(ref mut vec) => {
|
||||
let mut file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(&path)?;
|
||||
|
||||
let buf = unsafe {
|
||||
slice::from_raw_parts_mut(vec.as_mut_ptr() as *mut u8, vec.len() * NODE_BYTES)
|
||||
};
|
||||
|
||||
file.write_all(buf).map(|_| ())
|
||||
}
|
||||
Either::Right(ref mmap) => {
|
||||
mmap.flush()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cache_from_path(path: &Path, optimize_for: OptimizeFor) -> io::Result<Cache> {
|
||||
let memmap = match optimize_for {
|
||||
OptimizeFor::Cpu => None,
|
||||
OptimizeFor::Memory => Mmap::open_path(path, Protection::ReadWrite).ok(),
|
||||
};
|
||||
|
||||
memmap.map(Either::Right).ok_or(()).or_else(|_| {
|
||||
read_from_path(path).map(Either::Left)
|
||||
})
|
||||
}
|
||||
|
||||
fn read_from_path(path: &Path) -> io::Result<Vec<Node>> {
|
||||
use std::fs::File;
|
||||
use std::mem;
|
||||
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
let mut nodes: Vec<u8> = Vec::with_capacity(file.metadata().map(|m| m.len() as _).unwrap_or(
|
||||
NODE_BYTES * 1_000_000,
|
||||
));
|
||||
file.read_to_end(&mut nodes)?;
|
||||
|
||||
nodes.shrink_to_fit();
|
||||
|
||||
if nodes.len() % NODE_BYTES != 0 || nodes.capacity() % NODE_BYTES != 0 {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Node cache is not a multiple of node size",
|
||||
));
|
||||
}
|
||||
|
||||
let out: Vec<Node> = unsafe {
|
||||
Vec::from_raw_parts(
|
||||
nodes.as_mut_ptr() as *mut _,
|
||||
nodes.len() / NODE_BYTES,
|
||||
nodes.capacity() / NODE_BYTES,
|
||||
)
|
||||
};
|
||||
|
||||
mem::forget(nodes);
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
impl AsRef<[Node]> for NodeCache {
|
||||
fn as_ref(&self) -> &[Node] {
|
||||
match self.cache {
|
||||
Either::Left(ref vec) => vec,
|
||||
Either::Right(ref mmap) => unsafe {
|
||||
let bytes = mmap.ptr();
|
||||
// This isn't a safety issue, so we can keep this a debug lint. We don't care about
|
||||
// people manually messing with the files unless it can cause unsafety, but if we're
|
||||
// generating incorrect files then we want to catch that in CI.
|
||||
debug_assert_eq!(mmap.len() % NODE_BYTES, 0);
|
||||
slice::from_raw_parts(bytes as _, mmap.len() / NODE_BYTES)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This takes a raw pointer and a counter because `memory` may be uninitialized. `memory` _must_ be
|
||||
// a pointer to the beginning of an allocated but possibly-uninitialized block of
|
||||
// `num_nodes * NODE_BYTES` bytes
|
||||
//
|
||||
// We have to use raw pointers to read/write uninit, using "normal" indexing causes LLVM to freak
|
||||
// out. It counts as a read and causes all writes afterwards to be elided. Yes, really. I know, I
|
||||
// want to refactor this to use less `unsafe` as much as the next rustacean.
|
||||
unsafe fn initialize_memory(memory: *mut Node, num_nodes: usize, ident: &H256) {
|
||||
let dst = memory as *mut u8;
|
||||
|
||||
debug_assert_eq!(ident.len(), 32);
|
||||
keccak_512::unchecked(dst, NODE_BYTES, ident.as_ptr(), ident.len());
|
||||
|
||||
for i in 1..num_nodes {
|
||||
// We use raw pointers here, see above
|
||||
let dst = memory.offset(i as _) as *mut u8;
|
||||
let src = memory.offset(i as isize - 1) as *mut u8;
|
||||
|
||||
keccak_512::unchecked(dst, NODE_BYTES, src, NODE_BYTES);
|
||||
}
|
||||
|
||||
// Now this is initialized, we can treat it as a slice.
|
||||
let nodes: &mut [Node] = slice::from_raw_parts_mut(memory, num_nodes);
|
||||
|
||||
// For `unroll!`, see below. If the literal in `unroll!` is not the same as the RHS here then
|
||||
// these have got out of sync! Don't let this happen!
|
||||
debug_assert_eq!(NODE_DWORDS, 8);
|
||||
|
||||
// This _should_ get unrolled by the compiler, since it's not using the loop variable.
|
||||
for _ in 0..ETHASH_CACHE_ROUNDS {
|
||||
for i in 0..num_nodes {
|
||||
let data_idx = (num_nodes - 1 + i) % num_nodes;
|
||||
let idx = nodes.get_unchecked_mut(i).as_words()[0] as usize % num_nodes;
|
||||
|
||||
let data = {
|
||||
let mut data: Node = nodes.get_unchecked(data_idx).clone();
|
||||
let rhs: &Node = nodes.get_unchecked(idx);
|
||||
|
||||
unroll! {
|
||||
for w in 0..8 {
|
||||
*data.as_dwords_mut().get_unchecked_mut(w) ^=
|
||||
*rhs.as_dwords().get_unchecked(w);
|
||||
}
|
||||
}
|
||||
|
||||
data
|
||||
};
|
||||
|
||||
keccak_512::write(&data.bytes, &mut nodes.get_unchecked_mut(i).bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,30 +19,16 @@
|
||||
|
||||
// TODO: fix endianess for big endian
|
||||
|
||||
use primal::is_prime;
|
||||
use std::cell::Cell;
|
||||
use keccak::{keccak_512, keccak_256, H256};
|
||||
use cache::{NodeCache, NodeCacheBuilder};
|
||||
use seed_compute::SeedHashCompute;
|
||||
use shared::*;
|
||||
use std::io;
|
||||
|
||||
use std::mem;
|
||||
use std::path::Path;
|
||||
use std::ptr;
|
||||
use sha3;
|
||||
use std::slice;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::io::{self, Read, Write};
|
||||
use std::fs::{self, File};
|
||||
|
||||
use parking_lot::Mutex;
|
||||
|
||||
pub const ETHASH_EPOCH_LENGTH: u64 = 30000;
|
||||
pub const ETHASH_CACHE_ROUNDS: usize = 3;
|
||||
pub const ETHASH_MIX_BYTES: usize = 128;
|
||||
pub const ETHASH_ACCESSES: usize = 64;
|
||||
pub const ETHASH_DATASET_PARENTS: u32 = 256;
|
||||
|
||||
const DATASET_BYTES_INIT: u64 = 1 << 30;
|
||||
const DATASET_BYTES_GROWTH: u64 = 1 << 23;
|
||||
const CACHE_BYTES_INIT: u64 = 1 << 24;
|
||||
const CACHE_BYTES_GROWTH: u64 = 1 << 17;
|
||||
const NODE_WORDS: usize = 64 / 4;
|
||||
const NODE_BYTES: usize = 64;
|
||||
const MIX_WORDS: usize = ETHASH_MIX_BYTES / 4;
|
||||
const MIX_NODES: usize = MIX_WORDS / NODE_WORDS;
|
||||
const FNV_PRIME: u32 = 0x01000193;
|
||||
@@ -55,48 +41,24 @@ pub struct ProofOfWork {
|
||||
pub mix_hash: H256,
|
||||
}
|
||||
|
||||
struct Node {
|
||||
bytes: [u8; NODE_BYTES],
|
||||
}
|
||||
|
||||
impl Default for Node {
|
||||
fn default() -> Self {
|
||||
Node { bytes: [0u8; NODE_BYTES] }
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Node {
|
||||
fn clone(&self) -> Self {
|
||||
Node { bytes: *&self.bytes }
|
||||
}
|
||||
}
|
||||
|
||||
impl Node {
|
||||
#[inline]
|
||||
fn as_words(&self) -> &[u32; NODE_WORDS] {
|
||||
unsafe { mem::transmute(&self.bytes) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn as_words_mut(&mut self) -> &mut [u32; NODE_WORDS] {
|
||||
unsafe { mem::transmute(&mut self.bytes) }
|
||||
}
|
||||
}
|
||||
|
||||
pub type H256 = [u8; 32];
|
||||
|
||||
pub struct Light {
|
||||
cache_dir: PathBuf,
|
||||
block_number: u64,
|
||||
cache: Vec<Node>,
|
||||
seed_compute: Mutex<SeedHashCompute>,
|
||||
cache: NodeCache,
|
||||
}
|
||||
|
||||
/// Light cache structure
|
||||
impl Light {
|
||||
/// Create a new light cache for a given block number
|
||||
pub fn new<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> Light {
|
||||
light_new(cache_dir, block_number)
|
||||
pub fn new_with_builder(
|
||||
builder: &NodeCacheBuilder,
|
||||
cache_dir: &Path,
|
||||
block_number: u64,
|
||||
) -> Self {
|
||||
let cache = builder.new_cache(cache_dir.to_path_buf(), block_number);
|
||||
|
||||
Light {
|
||||
block_number: block_number,
|
||||
cache: cache,
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculate the light boundary data
|
||||
@@ -106,139 +68,32 @@ impl Light {
|
||||
light_compute(self, header_hash, nonce)
|
||||
}
|
||||
|
||||
pub fn file_path<T: AsRef<Path>>(cache_dir: T, seed_hash: H256) -> PathBuf {
|
||||
let mut cache_dir = cache_dir.as_ref().to_path_buf();
|
||||
cache_dir.push(to_hex(&seed_hash));
|
||||
cache_dir
|
||||
}
|
||||
|
||||
pub fn from_file<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> io::Result<Light> {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
let path = Light::file_path(&cache_dir, seed_compute.get_seedhash(block_number));
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
let cache_size = get_cache_size(block_number);
|
||||
if file.metadata()?.len() != cache_size as u64 {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "Cache file size mismatch"));
|
||||
}
|
||||
let num_nodes = cache_size / NODE_BYTES;
|
||||
let mut nodes: Vec<Node> = Vec::with_capacity(num_nodes);
|
||||
nodes.resize(num_nodes, unsafe { mem::uninitialized() });
|
||||
let buf = unsafe { slice::from_raw_parts_mut(nodes.as_mut_ptr() as *mut u8, cache_size) };
|
||||
file.read_exact(buf)?;
|
||||
pub fn from_file_with_builder(
|
||||
builder: &NodeCacheBuilder,
|
||||
cache_dir: &Path,
|
||||
block_number: u64,
|
||||
) -> io::Result<Self> {
|
||||
let cache = builder.from_file(cache_dir.to_path_buf(), block_number)?;
|
||||
Ok(Light {
|
||||
block_number,
|
||||
cache_dir: cache_dir.as_ref().to_path_buf(),
|
||||
cache: nodes,
|
||||
seed_compute: Mutex::new(seed_compute),
|
||||
block_number: block_number,
|
||||
cache: cache,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_file(&self) -> io::Result<PathBuf> {
|
||||
let seed_compute = self.seed_compute.lock();
|
||||
let path = Light::file_path(&self.cache_dir, seed_compute.get_seedhash(self.block_number));
|
||||
|
||||
if self.block_number >= ETHASH_EPOCH_LENGTH * 2 {
|
||||
let deprecated = Light::file_path(
|
||||
&self.cache_dir,
|
||||
seed_compute.get_seedhash(self.block_number - ETHASH_EPOCH_LENGTH * 2)
|
||||
);
|
||||
|
||||
if deprecated.exists() {
|
||||
debug!(target: "ethash", "removing: {:?}", &deprecated);
|
||||
fs::remove_file(deprecated)?;
|
||||
}
|
||||
}
|
||||
|
||||
fs::create_dir_all(path.parent().unwrap())?;
|
||||
let mut file = File::create(&path)?;
|
||||
|
||||
let cache_size = self.cache.len() * NODE_BYTES;
|
||||
let buf = unsafe { slice::from_raw_parts(self.cache.as_ptr() as *const u8, cache_size) };
|
||||
file.write(buf)?;
|
||||
Ok(path)
|
||||
pub fn to_file(&mut self) -> io::Result<&Path> {
|
||||
self.cache.flush()?;
|
||||
Ok(self.cache.cache_path())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SeedHashCompute {
|
||||
prev_epoch: Cell<u64>,
|
||||
prev_seedhash: Cell<H256>,
|
||||
}
|
||||
|
||||
impl SeedHashCompute {
|
||||
#[inline]
|
||||
pub fn new() -> SeedHashCompute {
|
||||
SeedHashCompute {
|
||||
prev_epoch: Cell::new(0),
|
||||
prev_seedhash: Cell::new([0u8; 32]),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reset_cache(&self) {
|
||||
self.prev_epoch.set(0);
|
||||
self.prev_seedhash.set([0u8; 32]);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_seedhash(&self, block_number: u64) -> H256 {
|
||||
let epoch = block_number / ETHASH_EPOCH_LENGTH;
|
||||
if epoch < self.prev_epoch.get() {
|
||||
// can't build on previous hash if requesting an older block
|
||||
self.reset_cache();
|
||||
}
|
||||
if epoch > self.prev_epoch.get() {
|
||||
let seed_hash = SeedHashCompute::resume_compute_seedhash(self.prev_seedhash.get(), self.prev_epoch.get(), epoch);
|
||||
self.prev_seedhash.set(seed_hash);
|
||||
self.prev_epoch.set(epoch);
|
||||
}
|
||||
self.prev_seedhash.get()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn resume_compute_seedhash(mut hash: H256, start_epoch: u64, end_epoch: u64) -> H256 {
|
||||
for _ in start_epoch..end_epoch {
|
||||
unsafe { sha3::sha3_256(hash[..].as_mut_ptr(), 32, hash[..].as_ptr(), 32) };
|
||||
}
|
||||
hash
|
||||
}
|
||||
}
|
||||
|
||||
pub fn slow_get_seedhash(block_number: u64) -> H256 {
|
||||
pub fn slow_hash_block_number(block_number: u64) -> H256 {
|
||||
SeedHashCompute::resume_compute_seedhash([0u8; 32], 0, block_number / ETHASH_EPOCH_LENGTH)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn fnv_hash(x: u32, y: u32) -> u32 {
|
||||
return x.wrapping_mul(FNV_PRIME) ^ y;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sha3_512(input: &[u8], output: &mut [u8]) {
|
||||
unsafe { sha3::sha3_512(output.as_mut_ptr(), output.len(), input.as_ptr(), input.len()) };
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn get_cache_size(block_number: u64) -> usize {
|
||||
let mut sz: u64 = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
|
||||
sz = sz - NODE_BYTES as u64;
|
||||
while !is_prime(sz / NODE_BYTES as u64) {
|
||||
sz = sz - 2 * NODE_BYTES as u64;
|
||||
}
|
||||
sz as usize
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn get_data_size(block_number: u64) -> usize {
|
||||
let mut sz: u64 = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
|
||||
sz = sz - ETHASH_MIX_BYTES as u64;
|
||||
while !is_prime(sz / ETHASH_MIX_BYTES as u64) {
|
||||
sz = sz - 2 * ETHASH_MIX_BYTES as u64;
|
||||
}
|
||||
sz as usize
|
||||
}
|
||||
|
||||
|
||||
/// Difficulty quick check for POW preverification
|
||||
///
|
||||
/// `header_hash` The hash of the header
|
||||
@@ -246,17 +101,27 @@ fn get_data_size(block_number: u64) -> usize {
|
||||
/// `mix_hash` The mix digest hash
|
||||
/// Boundary recovered from mix hash
|
||||
pub fn quick_get_difficulty(header_hash: &H256, nonce: u64, mix_hash: &H256) -> H256 {
|
||||
let mut buf = [0u8; 64 + 32];
|
||||
unsafe { ptr::copy_nonoverlapping(header_hash.as_ptr(), buf.as_mut_ptr(), 32) };
|
||||
unsafe { ptr::copy_nonoverlapping(mem::transmute(&nonce), buf[32..].as_mut_ptr(), 8) };
|
||||
unsafe {
|
||||
// This is safe - the `keccak_512` call below reads the first 40 bytes (which we explicitly set
|
||||
// with two `copy_nonoverlapping` calls) but writes the first 64, and then we explicitly write
|
||||
// the next 32 bytes before we read the whole thing with `keccak_256`.
|
||||
//
|
||||
// This cannot be elided by the compiler as it doesn't know the implementation of
|
||||
// `keccak_512`.
|
||||
let mut buf: [u8; 64 + 32] = mem::uninitialized();
|
||||
|
||||
unsafe { sha3::sha3_512(buf.as_mut_ptr(), 64, buf.as_ptr(), 40) };
|
||||
unsafe { ptr::copy_nonoverlapping(mix_hash.as_ptr(), buf[64..].as_mut_ptr(), 32) };
|
||||
ptr::copy_nonoverlapping(header_hash.as_ptr(), buf.as_mut_ptr(), 32);
|
||||
ptr::copy_nonoverlapping(mem::transmute(&nonce), buf[32..].as_mut_ptr(), 8);
|
||||
|
||||
let mut hash = [0u8; 32];
|
||||
unsafe { sha3::sha3_256(hash.as_mut_ptr(), hash.len(), buf.as_ptr(), buf.len()) };
|
||||
hash.as_mut_ptr();
|
||||
hash
|
||||
keccak_512::unchecked(buf.as_mut_ptr(), 64, buf.as_ptr(), 40);
|
||||
ptr::copy_nonoverlapping(mix_hash.as_ptr(), buf[64..].as_mut_ptr(), 32);
|
||||
|
||||
// This is initialized in `keccak_256`
|
||||
let mut hash: [u8; 32] = mem::uninitialized();
|
||||
keccak_256::unchecked(hash.as_mut_ptr(), hash.len(), buf.as_ptr(), buf.len());
|
||||
|
||||
hash
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculate the light client data
|
||||
@@ -269,219 +134,279 @@ pub fn light_compute(light: &Light, header_hash: &H256, nonce: u64) -> ProofOfWo
|
||||
}
|
||||
|
||||
fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64) -> ProofOfWork {
|
||||
macro_rules! make_const_array {
|
||||
($n:expr, $value:expr) => {{
|
||||
// We use explicit lifetimes to ensure that val's borrow is invalidated until the
|
||||
// transmuted val dies.
|
||||
unsafe fn make_const_array<'a, T, U>(val: &'a mut [T]) -> &'a mut [U; $n] {
|
||||
use ::std::mem;
|
||||
|
||||
debug_assert_eq!(val.len() * mem::size_of::<T>(), $n * mem::size_of::<U>());
|
||||
mem::transmute(val.as_mut_ptr())
|
||||
}
|
||||
|
||||
make_const_array($value)
|
||||
}}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
struct MixBuf {
|
||||
half_mix: Node,
|
||||
compress_bytes: [u8; MIX_WORDS],
|
||||
};
|
||||
|
||||
if full_size % MIX_WORDS != 0 {
|
||||
panic!("Unaligned full size");
|
||||
}
|
||||
// pack hash and nonce together into first 40 bytes of s_mix
|
||||
let mut s_mix: [Node; MIX_NODES + 1] = [Node::default(), Node::default(), Node::default()];
|
||||
unsafe { ptr::copy_nonoverlapping(header_hash.as_ptr(), s_mix.get_unchecked_mut(0).bytes.as_mut_ptr(), 32) };
|
||||
unsafe { ptr::copy_nonoverlapping(mem::transmute(&nonce), s_mix.get_unchecked_mut(0).bytes[32..].as_mut_ptr(), 8) };
|
||||
|
||||
// compute sha3-512 hash and replicate across mix
|
||||
unsafe {
|
||||
sha3::sha3_512(s_mix.get_unchecked_mut(0).bytes.as_mut_ptr(), NODE_BYTES, s_mix.get_unchecked(0).bytes.as_ptr(), 40);
|
||||
let (f_mix, mut mix) = s_mix.split_at_mut(1);
|
||||
for w in 0..MIX_WORDS {
|
||||
*mix.get_unchecked_mut(0).as_words_mut().get_unchecked_mut(w) = *f_mix.get_unchecked(0).as_words().get_unchecked(w % NODE_WORDS);
|
||||
}
|
||||
// You may be asking yourself: what in the name of Crypto Jesus is going on here? So: we need
|
||||
// `half_mix` and `compress_bytes` in a single array later down in the code (we hash them
|
||||
// together to create `value`) so that we can hash the full array. However, we do a bunch of
|
||||
// reading and writing to these variables first. We originally allocated two arrays and then
|
||||
// stuck them together with `ptr::copy_nonoverlapping` at the end, but this method is
|
||||
// _significantly_ faster - by my benchmarks, a consistent 3-5%. This is the most ridiculous
|
||||
// optimization I have ever done and I am so sorry. I can only chalk it up to cache locality
|
||||
// improvements, since I can't imagine that 3-5% of our runtime is taken up by catting two
|
||||
// arrays together.
|
||||
let mut buf: MixBuf = MixBuf {
|
||||
half_mix: unsafe {
|
||||
// Pack `header_hash` and `nonce` together
|
||||
// We explicitly write the first 40 bytes, leaving the last 24 as uninitialized. Then
|
||||
// `keccak_512` reads the first 40 bytes (4th parameter) and overwrites the entire array,
|
||||
// leaving it fully initialized.
|
||||
let mut out: [u8; NODE_BYTES] = mem::uninitialized();
|
||||
|
||||
let page_size = 4 * MIX_WORDS;
|
||||
let num_full_pages = (full_size / page_size) as u32;
|
||||
let cache: &[Node] = &light.cache; // deref once for better performance
|
||||
ptr::copy_nonoverlapping(header_hash.as_ptr(), out.as_mut_ptr(), header_hash.len());
|
||||
ptr::copy_nonoverlapping(
|
||||
mem::transmute(&nonce),
|
||||
out[header_hash.len()..].as_mut_ptr(),
|
||||
mem::size_of::<u64>(),
|
||||
);
|
||||
|
||||
for i in 0..(ETHASH_ACCESSES as u32) {
|
||||
let index = fnv_hash(f_mix.get_unchecked(0).as_words().get_unchecked(0) ^ i, *mix.get_unchecked(0).as_words().get_unchecked((i as usize) % MIX_WORDS)) % num_full_pages;
|
||||
for n in 0..MIX_NODES {
|
||||
let tmp_node = calculate_dag_item(index * MIX_NODES as u32 + n as u32, cache);
|
||||
for w in 0..NODE_WORDS {
|
||||
*mix.get_unchecked_mut(n).as_words_mut().get_unchecked_mut(w) = fnv_hash(*mix.get_unchecked(n).as_words().get_unchecked(w), *tmp_node.as_words().get_unchecked(w));
|
||||
// compute keccak-512 hash and replicate across mix
|
||||
keccak_512::unchecked(
|
||||
out.as_mut_ptr(),
|
||||
NODE_BYTES,
|
||||
out.as_ptr(),
|
||||
header_hash.len() + mem::size_of::<u64>(),
|
||||
);
|
||||
|
||||
Node { bytes: out }
|
||||
},
|
||||
// This is fully initialized before being read, see `let mut compress = ...` below
|
||||
compress_bytes: unsafe { mem::uninitialized() },
|
||||
};
|
||||
|
||||
let mut mix: [_; MIX_NODES] = [buf.half_mix.clone(), buf.half_mix.clone()];
|
||||
|
||||
let page_size = 4 * MIX_WORDS;
|
||||
let num_full_pages = (full_size / page_size) as u32;
|
||||
// deref once for better performance
|
||||
let cache: &[Node] = light.cache.as_ref();
|
||||
let first_val = buf.half_mix.as_words()[0];
|
||||
|
||||
debug_assert_eq!(MIX_NODES, 2);
|
||||
debug_assert_eq!(NODE_WORDS, 16);
|
||||
|
||||
for i in 0..ETHASH_ACCESSES as u32 {
|
||||
let index = {
|
||||
// This is trivially safe, but does not work on big-endian. The safety of this is
|
||||
// asserted in debug builds (see the definition of `make_const_array!`).
|
||||
let mix_words: &mut [u32; MIX_WORDS] =
|
||||
unsafe { make_const_array!(MIX_WORDS, &mut mix) };
|
||||
|
||||
fnv_hash(first_val ^ i, mix_words[i as usize % MIX_WORDS]) % num_full_pages
|
||||
};
|
||||
|
||||
unroll! {
|
||||
// MIX_NODES
|
||||
for n in 0..2 {
|
||||
let tmp_node = calculate_dag_item(
|
||||
index * MIX_NODES as u32 + n as u32,
|
||||
cache,
|
||||
);
|
||||
|
||||
unroll! {
|
||||
// NODE_WORDS
|
||||
for w in 0..16 {
|
||||
mix[n].as_words_mut()[w] =
|
||||
fnv_hash(
|
||||
mix[n].as_words()[w],
|
||||
tmp_node.as_words()[w],
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// compress mix
|
||||
for i in 0..(MIX_WORDS / 4) {
|
||||
let w = i * 4;
|
||||
let mut reduction = *mix.get_unchecked(0).as_words().get_unchecked(w + 0);
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ *mix.get_unchecked(0).as_words().get_unchecked(w + 1);
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ *mix.get_unchecked(0).as_words().get_unchecked(w + 2);
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ *mix.get_unchecked(0).as_words().get_unchecked(w + 3);
|
||||
*mix.get_unchecked_mut(0).as_words_mut().get_unchecked_mut(i) = reduction;
|
||||
}
|
||||
let mix_words: [u32; MIX_WORDS] = unsafe { mem::transmute(mix) };
|
||||
|
||||
let mut mix_hash = [0u8; 32];
|
||||
let mut buf = [0u8; 32 + 64];
|
||||
ptr::copy_nonoverlapping(f_mix.get_unchecked_mut(0).bytes.as_ptr(), buf.as_mut_ptr(), 64);
|
||||
ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), buf[64..].as_mut_ptr(), 32);
|
||||
ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), mix_hash.as_mut_ptr(), 32);
|
||||
let mut value: H256 = [0u8; 32];
|
||||
sha3::sha3_256(value.as_mut_ptr(), value.len(), buf.as_ptr(), buf.len());
|
||||
ProofOfWork {
|
||||
mix_hash: mix_hash,
|
||||
value: value,
|
||||
{
|
||||
// This is an uninitialized buffer to begin with, but we iterate precisely `compress.len()`
|
||||
// times and set each index, leaving the array fully initialized. THIS ONLY WORKS ON LITTLE-
|
||||
// ENDIAN MACHINES. See a future PR to make this and the rest of the code work correctly on
|
||||
// big-endian arches like mips.
|
||||
let compress: &mut [u32; MIX_WORDS / 4] =
|
||||
unsafe { make_const_array!(MIX_WORDS / 4, &mut buf.compress_bytes) };
|
||||
|
||||
// Compress mix
|
||||
debug_assert_eq!(MIX_WORDS / 4, 8);
|
||||
unroll! {
|
||||
for i in 0..8 {
|
||||
let w = i * 4;
|
||||
|
||||
let mut reduction = mix_words[w + 0];
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ mix_words[w + 1];
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ mix_words[w + 2];
|
||||
reduction = reduction.wrapping_mul(FNV_PRIME) ^ mix_words[w + 3];
|
||||
compress[i] = reduction;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mix_hash = buf.compress_bytes;
|
||||
|
||||
let value: H256 = unsafe {
|
||||
// We can interpret the buffer as an array of `u8`s, since it's `repr(C)`.
|
||||
let read_ptr: *const u8 = mem::transmute(&buf);
|
||||
// We overwrite the second half since `keccak_256` has an internal buffer and so allows
|
||||
// overlapping arrays as input.
|
||||
let write_ptr: *mut u8 = mem::transmute(&mut buf.compress_bytes);
|
||||
keccak_256::unchecked(
|
||||
write_ptr,
|
||||
buf.compress_bytes.len(),
|
||||
read_ptr,
|
||||
buf.half_mix.bytes.len() + buf.compress_bytes.len(),
|
||||
);
|
||||
buf.compress_bytes
|
||||
};
|
||||
|
||||
ProofOfWork { mix_hash: mix_hash, value: value }
|
||||
}
|
||||
|
||||
// TODO: Use the `simd` crate
|
||||
fn calculate_dag_item(node_index: u32, cache: &[Node]) -> Node {
|
||||
unsafe {
|
||||
let num_parent_nodes = cache.len();
|
||||
let init = cache.get_unchecked(node_index as usize % num_parent_nodes);
|
||||
let mut ret = init.clone();
|
||||
*ret.as_words_mut().get_unchecked_mut(0) ^= node_index;
|
||||
sha3::sha3_512(ret.bytes.as_mut_ptr(), ret.bytes.len(), ret.bytes.as_ptr(), ret.bytes.len());
|
||||
let num_parent_nodes = cache.len();
|
||||
let mut ret = cache[node_index as usize % num_parent_nodes].clone();
|
||||
ret.as_words_mut()[0] ^= node_index;
|
||||
|
||||
for i in 0..ETHASH_DATASET_PARENTS {
|
||||
let parent_index = fnv_hash(node_index ^ i, *ret.as_words().get_unchecked(i as usize % NODE_WORDS)) % num_parent_nodes as u32;
|
||||
let parent = cache.get_unchecked(parent_index as usize);
|
||||
for w in 0..NODE_WORDS {
|
||||
*ret.as_words_mut().get_unchecked_mut(w) = fnv_hash(*ret.as_words().get_unchecked(w), *parent.as_words().get_unchecked(w));
|
||||
}
|
||||
}
|
||||
sha3::sha3_512(ret.bytes.as_mut_ptr(), ret.bytes.len(), ret.bytes.as_ptr(), ret.bytes.len());
|
||||
ret
|
||||
}
|
||||
}
|
||||
keccak_512::inplace(ret.as_bytes_mut());
|
||||
|
||||
fn light_new<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> Light {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
let seedhash = seed_compute.get_seedhash(block_number);
|
||||
let cache_size = get_cache_size(block_number);
|
||||
debug_assert_eq!(NODE_WORDS, 16);
|
||||
for i in 0..ETHASH_DATASET_PARENTS as u32 {
|
||||
let parent_index = fnv_hash(node_index ^ i, ret.as_words()[i as usize % NODE_WORDS]) %
|
||||
num_parent_nodes as u32;
|
||||
let parent = &cache[parent_index as usize];
|
||||
|
||||
assert!(cache_size % NODE_BYTES == 0, "Unaligned cache size");
|
||||
let num_nodes = cache_size / NODE_BYTES;
|
||||
|
||||
let mut nodes = Vec::with_capacity(num_nodes);
|
||||
nodes.resize(num_nodes, Node::default());
|
||||
unsafe {
|
||||
sha3_512(&seedhash[0..32], &mut nodes.get_unchecked_mut(0).bytes);
|
||||
for i in 1..num_nodes {
|
||||
sha3::sha3_512(nodes.get_unchecked_mut(i).bytes.as_mut_ptr(), NODE_BYTES, nodes.get_unchecked(i - 1).bytes.as_ptr(), NODE_BYTES);
|
||||
}
|
||||
|
||||
for _ in 0..ETHASH_CACHE_ROUNDS {
|
||||
for i in 0..num_nodes {
|
||||
let idx = *nodes.get_unchecked_mut(i).as_words().get_unchecked(0) as usize % num_nodes;
|
||||
let mut data = nodes.get_unchecked((num_nodes - 1 + i) % num_nodes).clone();
|
||||
for w in 0..NODE_WORDS {
|
||||
*data.as_words_mut().get_unchecked_mut(w) ^= *nodes.get_unchecked(idx).as_words().get_unchecked(w);
|
||||
}
|
||||
sha3_512(&data.bytes, &mut nodes.get_unchecked_mut(i).bytes);
|
||||
unroll! {
|
||||
for w in 0..16 {
|
||||
ret.as_words_mut()[w] = fnv_hash(ret.as_words()[w], parent.as_words()[w]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Light {
|
||||
block_number,
|
||||
cache_dir: cache_dir.as_ref().to_path_buf(),
|
||||
cache: nodes,
|
||||
seed_compute: Mutex::new(seed_compute),
|
||||
}
|
||||
keccak_512::inplace(ret.as_bytes_mut());
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
static CHARS: &'static [u8] = b"0123456789abcdef";
|
||||
fn to_hex(bytes: &[u8]) -> String {
|
||||
let mut v = Vec::with_capacity(bytes.len() * 2);
|
||||
for &byte in bytes.iter() {
|
||||
v.push(CHARS[(byte >> 4) as usize]);
|
||||
v.push(CHARS[(byte & 0xf) as usize]);
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_size() {
|
||||
// https://github.com/ethereum/wiki/wiki/Ethash/ef6b93f9596746a088ea95d01ca2778be43ae68f#data-sizes
|
||||
assert_eq!(16776896usize, get_cache_size(0));
|
||||
assert_eq!(16776896usize, get_cache_size(1));
|
||||
assert_eq!(16776896usize, get_cache_size(ETHASH_EPOCH_LENGTH - 1));
|
||||
assert_eq!(16907456usize, get_cache_size(ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(16907456usize, get_cache_size(ETHASH_EPOCH_LENGTH + 1));
|
||||
assert_eq!(284950208usize, get_cache_size(2046 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(285081536usize, get_cache_size(2047 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(285081536usize, get_cache_size(2048 * ETHASH_EPOCH_LENGTH - 1));
|
||||
}
|
||||
|
||||
unsafe { String::from_utf8_unchecked(v) }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_size() {
|
||||
// https://github.com/ethereum/wiki/wiki/Ethash/ef6b93f9596746a088ea95d01ca2778be43ae68f#data-sizes
|
||||
assert_eq!(16776896usize, get_cache_size(0));
|
||||
assert_eq!(16776896usize, get_cache_size(1));
|
||||
assert_eq!(16776896usize, get_cache_size(ETHASH_EPOCH_LENGTH - 1));
|
||||
assert_eq!(16907456usize, get_cache_size(ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(16907456usize, get_cache_size(ETHASH_EPOCH_LENGTH + 1));
|
||||
assert_eq!(284950208usize, get_cache_size(2046 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(285081536usize, get_cache_size(2047 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(285081536usize, get_cache_size(2048 * ETHASH_EPOCH_LENGTH - 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_data_size() {
|
||||
// https://github.com/ethereum/wiki/wiki/Ethash/ef6b93f9596746a088ea95d01ca2778be43ae68f#data-sizes
|
||||
assert_eq!(1073739904usize, get_data_size(0));
|
||||
assert_eq!(1073739904usize, get_data_size(1));
|
||||
assert_eq!(1073739904usize, get_data_size(ETHASH_EPOCH_LENGTH - 1));
|
||||
assert_eq!(1082130304usize, get_data_size(ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(1082130304usize, get_data_size(ETHASH_EPOCH_LENGTH + 1));
|
||||
assert_eq!(18236833408usize, get_data_size(2046 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(18245220736usize, get_data_size(2047 * ETHASH_EPOCH_LENGTH));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_difficulty_test() {
|
||||
let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
|
||||
let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d];
|
||||
let nonce = 0xd7b3ac70a301a249;
|
||||
let boundary_good = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
|
||||
assert_eq!(quick_get_difficulty(&hash, nonce, &mix_hash)[..], boundary_good[..]);
|
||||
let boundary_bad = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3a, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
|
||||
assert!(quick_get_difficulty(&hash, nonce, &mix_hash)[..] != boundary_bad[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_light_compute() {
|
||||
let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
|
||||
let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d];
|
||||
let boundary = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
|
||||
let nonce = 0xd7b3ac70a301a249;
|
||||
// difficulty = 0x085657254bd9u64;
|
||||
let light = Light::new(&::std::env::temp_dir(), 486382);
|
||||
let result = light_compute(&light, &hash, nonce);
|
||||
assert_eq!(result.mix_hash[..], mix_hash[..]);
|
||||
assert_eq!(result.value[..], boundary[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_once() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.get_seedhash(486382), hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_zero() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
assert_eq!(seed_compute.get_seedhash(0), [0u8; 32]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_after_older() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
// calculating an older value first shouldn't affect the result
|
||||
let _ = seed_compute.get_seedhash(50000);
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.get_seedhash(486382), hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_after_newer() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
// calculating an newer value first shouldn't affect the result
|
||||
let _ = seed_compute.get_seedhash(972764);
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.get_seedhash(486382), hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_drop_old_data() {
|
||||
let path = ::std::env::temp_dir();
|
||||
let first = Light::new(&path, 0).to_file().unwrap();
|
||||
|
||||
let second = Light::new(&path, ETHASH_EPOCH_LENGTH).to_file().unwrap();
|
||||
assert!(fs::metadata(&first).is_ok());
|
||||
|
||||
let _ = Light::new(&path, ETHASH_EPOCH_LENGTH * 2).to_file();
|
||||
assert!(fs::metadata(&first).is_err());
|
||||
assert!(fs::metadata(&second).is_ok());
|
||||
|
||||
let _ = Light::new(&path, ETHASH_EPOCH_LENGTH * 3).to_file();
|
||||
assert!(fs::metadata(&second).is_err());
|
||||
#[test]
|
||||
fn test_get_data_size() {
|
||||
// https://github.com/ethereum/wiki/wiki/Ethash/ef6b93f9596746a088ea95d01ca2778be43ae68f#data-sizes
|
||||
assert_eq!(1073739904usize, get_data_size(0));
|
||||
assert_eq!(1073739904usize, get_data_size(1));
|
||||
assert_eq!(1073739904usize, get_data_size(ETHASH_EPOCH_LENGTH - 1));
|
||||
assert_eq!(1082130304usize, get_data_size(ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(1082130304usize, get_data_size(ETHASH_EPOCH_LENGTH + 1));
|
||||
assert_eq!(18236833408usize, get_data_size(2046 * ETHASH_EPOCH_LENGTH));
|
||||
assert_eq!(18245220736usize, get_data_size(2047 * ETHASH_EPOCH_LENGTH));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_difficulty_test() {
|
||||
let hash = [
|
||||
0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3,
|
||||
0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94,
|
||||
0x05, 0x52, 0x7d, 0x72,
|
||||
];
|
||||
let mix_hash = [
|
||||
0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce,
|
||||
0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a,
|
||||
0x64, 0x31, 0xab, 0x6d,
|
||||
];
|
||||
let nonce = 0xd7b3ac70a301a249;
|
||||
let boundary_good = [
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2,
|
||||
0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a,
|
||||
0xe9, 0x7e, 0x53, 0x84,
|
||||
];
|
||||
assert_eq!(quick_get_difficulty(&hash, nonce, &mix_hash)[..], boundary_good[..]);
|
||||
let boundary_bad = [
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3a, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2,
|
||||
0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a,
|
||||
0xe9, 0x7e, 0x53, 0x84,
|
||||
];
|
||||
assert!(quick_get_difficulty(&hash, nonce, &mix_hash)[..] != boundary_bad[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_light_compute() {
|
||||
let hash = [
|
||||
0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3,
|
||||
0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94,
|
||||
0x05, 0x52, 0x7d, 0x72,
|
||||
];
|
||||
let mix_hash = [
|
||||
0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce,
|
||||
0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a,
|
||||
0x64, 0x31, 0xab, 0x6d,
|
||||
];
|
||||
let boundary = [
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2,
|
||||
0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a,
|
||||
0xe9, 0x7e, 0x53, 0x84,
|
||||
];
|
||||
let nonce = 0xd7b3ac70a301a249;
|
||||
// difficulty = 0x085657254bd9u64;
|
||||
let light = NodeCacheBuilder::new(None).light(&::std::env::temp_dir(), 486382);
|
||||
let result = light_compute(&light, &hash, nonce);
|
||||
assert_eq!(result.mix_hash[..], mix_hash[..]);
|
||||
assert_eq!(result.value[..], boundary[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_drop_old_data() {
|
||||
let path = ::std::env::temp_dir();
|
||||
let builder = NodeCacheBuilder::new(None);
|
||||
let first = builder.light(&path, 0).to_file().unwrap().to_owned();
|
||||
|
||||
let second = builder.light(&path, ETHASH_EPOCH_LENGTH).to_file().unwrap().to_owned();
|
||||
assert!(fs::metadata(&first).is_ok());
|
||||
|
||||
let _ = builder.light(&path, ETHASH_EPOCH_LENGTH * 2).to_file();
|
||||
assert!(fs::metadata(&first).is_err());
|
||||
assert!(fs::metadata(&second).is_ok());
|
||||
|
||||
let _ = builder.light(&path, ETHASH_EPOCH_LENGTH * 3).to_file();
|
||||
assert!(fs::metadata(&second).is_err());
|
||||
}
|
||||
}
|
||||
|
||||
52
ethash/src/keccak.rs
Normal file
52
ethash/src/keccak.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
extern crate hash;
|
||||
|
||||
pub type H256 = [u8; 32];
|
||||
|
||||
pub mod keccak_512 {
|
||||
use super::hash;
|
||||
|
||||
pub use self::hash::keccak_512 as unchecked;
|
||||
|
||||
pub fn write(input: &[u8], output: &mut [u8]) {
|
||||
unsafe { hash::keccak_512(output.as_mut_ptr(), output.len(), input.as_ptr(), input.len()) };
|
||||
}
|
||||
|
||||
pub fn inplace(input: &mut [u8]) {
|
||||
// This is safe since `sha3_*` uses an internal buffer and copies the result to the output. This
|
||||
// means that we can reuse the input buffer for both input and output.
|
||||
unsafe { hash::keccak_512(input.as_mut_ptr(), input.len(), input.as_ptr(), input.len()) };
|
||||
}
|
||||
}
|
||||
|
||||
pub mod keccak_256 {
|
||||
use super::hash;
|
||||
|
||||
pub use self::hash::keccak_256 as unchecked;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn write(input: &[u8], output: &mut [u8]) {
|
||||
unsafe { hash::keccak_256(output.as_mut_ptr(), output.len(), input.as_ptr(), input.len()) };
|
||||
}
|
||||
|
||||
pub fn inplace(input: &mut [u8]) {
|
||||
// This is safe since `sha3_*` uses an internal buffer and copies the result to the output. This
|
||||
// means that we can reuse the input buffer for both input and output.
|
||||
unsafe { hash::keccak_256(input.as_mut_ptr(), input.len(), input.as_ptr(), input.len()) };
|
||||
}
|
||||
}
|
||||
@@ -14,23 +14,35 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Ethash implementation
|
||||
//! See https://github.com/ethereum/wiki/wiki/Ethash
|
||||
#![cfg_attr(feature = "benches", feature(test))]
|
||||
|
||||
extern crate primal;
|
||||
extern crate sha3;
|
||||
extern crate parking_lot;
|
||||
extern crate either;
|
||||
extern crate memmap;
|
||||
|
||||
#[macro_use]
|
||||
extern crate crunchy;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
mod compute;
|
||||
|
||||
mod compute;
|
||||
mod seed_compute;
|
||||
mod cache;
|
||||
mod keccak;
|
||||
mod shared;
|
||||
|
||||
pub use cache::{NodeCacheBuilder, OptimizeFor};
|
||||
pub use compute::{ProofOfWork, quick_get_difficulty, slow_hash_block_number};
|
||||
use compute::Light;
|
||||
use keccak::H256;
|
||||
use parking_lot::Mutex;
|
||||
pub use seed_compute::SeedHashCompute;
|
||||
pub use shared::ETHASH_EPOCH_LENGTH;
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
use compute::Light;
|
||||
pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty, slow_get_seedhash};
|
||||
|
||||
use std::sync::Arc;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
struct LightCache {
|
||||
recent_epoch: Option<u64>,
|
||||
@@ -41,15 +53,17 @@ struct LightCache {
|
||||
|
||||
/// Light/Full cache manager.
|
||||
pub struct EthashManager {
|
||||
nodecache_builder: NodeCacheBuilder,
|
||||
cache: Mutex<LightCache>,
|
||||
cache_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl EthashManager {
|
||||
/// Create a new new instance of ethash manager
|
||||
pub fn new<T: AsRef<Path>>(cache_dir: T) -> EthashManager {
|
||||
pub fn new<T: Into<Option<OptimizeFor>>>(cache_dir: &Path, optimize_for: T) -> EthashManager {
|
||||
EthashManager {
|
||||
cache_dir: cache_dir.as_ref().to_path_buf(),
|
||||
cache_dir: cache_dir.to_path_buf(),
|
||||
nodecache_builder: NodeCacheBuilder::new(optimize_for.into().unwrap_or_default()),
|
||||
cache: Mutex::new(LightCache {
|
||||
recent_epoch: None,
|
||||
recent: None,
|
||||
@@ -91,11 +105,19 @@ impl EthashManager {
|
||||
};
|
||||
match light {
|
||||
None => {
|
||||
let light = match Light::from_file(&self.cache_dir, block_number) {
|
||||
let light = match Light::from_file_with_builder(
|
||||
&self.nodecache_builder,
|
||||
&self.cache_dir,
|
||||
block_number,
|
||||
) {
|
||||
Ok(light) => Arc::new(light),
|
||||
Err(e) => {
|
||||
debug!("Light cache file not found for {}:{}", block_number, e);
|
||||
let light = Light::new(&self.cache_dir, block_number);
|
||||
let mut light = Light::new_with_builder(
|
||||
&self.nodecache_builder,
|
||||
&self.cache_dir,
|
||||
block_number,
|
||||
);
|
||||
if let Err(e) = light.to_file() {
|
||||
warn!("Light cache file write error: {}", e);
|
||||
}
|
||||
@@ -115,7 +137,7 @@ impl EthashManager {
|
||||
|
||||
#[test]
|
||||
fn test_lru() {
|
||||
let ethash = EthashManager::new(&::std::env::temp_dir());
|
||||
let ethash = EthashManager::new(&::std::env::temp_dir(), None);
|
||||
let hash = [0u8; 32];
|
||||
ethash.compute_light(1, &hash, 1);
|
||||
ethash.compute_light(50000, &hash, 1);
|
||||
@@ -128,3 +150,94 @@ fn test_lru() {
|
||||
assert_eq!(ethash.cache.lock().recent_epoch.unwrap(), 2);
|
||||
assert_eq!(ethash.cache.lock().prev_epoch.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[cfg(feature = "benches")]
|
||||
mod benchmarks {
|
||||
extern crate test;
|
||||
|
||||
use self::test::Bencher;
|
||||
use cache::{NodeCacheBuilder, OptimizeFor};
|
||||
use compute::{Light, light_compute};
|
||||
|
||||
const HASH: [u8; 32] = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe,
|
||||
0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f,
|
||||
0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
|
||||
const NONCE: u64 = 0xd7b3ac70a301a249;
|
||||
|
||||
#[bench]
|
||||
fn bench_light_compute_memmap(b: &mut Bencher) {
|
||||
use std::env;
|
||||
|
||||
let builder = NodeCacheBuilder::new(OptimizeFor::Memory);
|
||||
let light = Light::new_with_builder(&builder, &env::temp_dir(), 486382);
|
||||
|
||||
b.iter(|| light_compute(&light, &HASH, NONCE));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_light_compute_memory(b: &mut Bencher) {
|
||||
use std::env;
|
||||
|
||||
let light = Light::new(&env::temp_dir(), 486382);
|
||||
|
||||
b.iter(|| light_compute(&light, &HASH, NONCE));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
#[ignore]
|
||||
fn bench_light_new_round_trip_memmap(b: &mut Bencher) {
|
||||
use std::env;
|
||||
|
||||
b.iter(|| {
|
||||
let builder = NodeCacheBuilder::new(OptimizeFor::Memory);
|
||||
let light = Light::new_with_builder(&builder, &env::temp_dir(), 486382);
|
||||
light_compute(&light, &HASH, NONCE);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
#[ignore]
|
||||
fn bench_light_new_round_trip_memory(b: &mut Bencher) {
|
||||
use std::env;
|
||||
b.iter(|| {
|
||||
let light = Light::new(&env::temp_dir(), 486382);
|
||||
light_compute(&light, &HASH, NONCE);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_light_from_file_round_trip_memory(b: &mut Bencher) {
|
||||
use std::env;
|
||||
|
||||
let dir = env::temp_dir();
|
||||
let height = 486382;
|
||||
{
|
||||
let mut dummy = Light::new(&dir, height);
|
||||
dummy.to_file().unwrap();
|
||||
}
|
||||
|
||||
b.iter(|| {
|
||||
let light = Light::from_file(&dir, 486382).unwrap();
|
||||
light_compute(&light, &HASH, NONCE);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_light_from_file_round_trip_memmap(b: &mut Bencher) {
|
||||
use std::env;
|
||||
|
||||
let dir = env::temp_dir();
|
||||
let height = 486382;
|
||||
{
|
||||
let builder = NodeCacheBuilder::new(OptimizeFor::Memory);
|
||||
let mut dummy = Light::new_with_builder(&builder, &dir, height);
|
||||
dummy.to_file().unwrap();
|
||||
}
|
||||
|
||||
b.iter(|| {
|
||||
let builder = NodeCacheBuilder::new(OptimizeFor::Memory);
|
||||
let light = Light::from_file_with_builder(&builder, &dir, 486382).unwrap();
|
||||
light_compute(&light, &HASH, NONCE);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
109
ethash/src/seed_compute.rs
Normal file
109
ethash/src/seed_compute.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use shared;
|
||||
use keccak::{keccak_256, H256};
|
||||
|
||||
use std::cell::Cell;
|
||||
|
||||
pub struct SeedHashCompute {
|
||||
prev_epoch: Cell<u64>,
|
||||
prev_seedhash: Cell<H256>,
|
||||
}
|
||||
|
||||
impl SeedHashCompute {
|
||||
#[inline]
|
||||
pub fn new() -> SeedHashCompute {
|
||||
SeedHashCompute {
|
||||
prev_epoch: Cell::new(0),
|
||||
prev_seedhash: Cell::new([0u8; 32]),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reset_cache(&self) {
|
||||
self.prev_epoch.set(0);
|
||||
self.prev_seedhash.set([0u8; 32]);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn hash_block_number(&self, block_number: u64) -> H256 {
|
||||
self.hash_epoch(shared::epoch(block_number))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn hash_epoch(&self, epoch: u64) -> H256 {
|
||||
if epoch < self.prev_epoch.get() {
|
||||
// can't build on previous hash if requesting an older block
|
||||
self.reset_cache();
|
||||
}
|
||||
if epoch > self.prev_epoch.get() {
|
||||
let seed_hash = SeedHashCompute::resume_compute_seedhash(
|
||||
self.prev_seedhash.get(),
|
||||
self.prev_epoch.get(),
|
||||
epoch,
|
||||
);
|
||||
self.prev_seedhash.set(seed_hash);
|
||||
self.prev_epoch.set(epoch);
|
||||
}
|
||||
self.prev_seedhash.get()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn resume_compute_seedhash(mut hash: H256, start_epoch: u64, end_epoch: u64) -> H256 {
|
||||
for _ in start_epoch..end_epoch {
|
||||
keccak_256::inplace(&mut hash);
|
||||
}
|
||||
hash
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SeedHashCompute;
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_once() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.hash_block_number(486382), hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_zero() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
assert_eq!(seed_compute.hash_block_number(0), [0u8; 32]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_after_older() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
// calculating an older value first shouldn't affect the result
|
||||
let _ = seed_compute.hash_block_number(50000);
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.hash_block_number(486382), hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seed_compute_after_newer() {
|
||||
let seed_compute = SeedHashCompute::new();
|
||||
// calculating an newer value first shouldn't affect the result
|
||||
let _ = seed_compute.hash_block_number(972764);
|
||||
let hash = [241, 175, 44, 134, 39, 121, 245, 239, 228, 236, 43, 160, 195, 152, 46, 7, 199, 5, 253, 147, 241, 206, 98, 43, 3, 104, 17, 40, 192, 79, 106, 162];
|
||||
assert_eq!(seed_compute.hash_block_number(486382), hash);
|
||||
}
|
||||
|
||||
}
|
||||
149
ethash/src/shared.rs
Normal file
149
ethash/src/shared.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use primal::is_prime;
|
||||
|
||||
pub const DATASET_BYTES_INIT: u64 = 1 << 30;
|
||||
pub const DATASET_BYTES_GROWTH: u64 = 1 << 23;
|
||||
pub const CACHE_BYTES_INIT: u64 = 1 << 24;
|
||||
pub const CACHE_BYTES_GROWTH: u64 = 1 << 17;
|
||||
|
||||
pub const ETHASH_EPOCH_LENGTH: u64 = 30000;
|
||||
pub const ETHASH_CACHE_ROUNDS: usize = 3;
|
||||
pub const ETHASH_MIX_BYTES: usize = 128;
|
||||
pub const ETHASH_ACCESSES: usize = 64;
|
||||
pub const ETHASH_DATASET_PARENTS: u32 = 256;
|
||||
pub const NODE_DWORDS: usize = NODE_WORDS / 2;
|
||||
pub const NODE_WORDS: usize = NODE_BYTES / 4;
|
||||
pub const NODE_BYTES: usize = 64;
|
||||
|
||||
pub fn epoch(block_number: u64) -> u64 {
|
||||
block_number / ETHASH_EPOCH_LENGTH
|
||||
}
|
||||
|
||||
static CHARS: &'static [u8] = b"0123456789abcdef";
|
||||
pub fn to_hex(bytes: &[u8]) -> String {
|
||||
let mut v = Vec::with_capacity(bytes.len() * 2);
|
||||
for &byte in bytes.iter() {
|
||||
v.push(CHARS[(byte >> 4) as usize]);
|
||||
v.push(CHARS[(byte & 0xf) as usize]);
|
||||
}
|
||||
|
||||
unsafe { String::from_utf8_unchecked(v) }
|
||||
}
|
||||
|
||||
pub fn get_cache_size(block_number: u64) -> usize {
|
||||
// TODO: Memoise
|
||||
let mut sz: u64 = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
|
||||
sz = sz - NODE_BYTES as u64;
|
||||
while !is_prime(sz / NODE_BYTES as u64) {
|
||||
sz = sz - 2 * NODE_BYTES as u64;
|
||||
}
|
||||
sz as usize
|
||||
}
|
||||
|
||||
pub fn get_data_size(block_number: u64) -> usize {
|
||||
// TODO: Memoise
|
||||
let mut sz: u64 = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
|
||||
sz = sz - ETHASH_MIX_BYTES as u64;
|
||||
while !is_prime(sz / ETHASH_MIX_BYTES as u64) {
|
||||
sz = sz - 2 * ETHASH_MIX_BYTES as u64;
|
||||
}
|
||||
sz as usize
|
||||
}
|
||||
|
||||
pub type NodeBytes = [u8; NODE_BYTES];
|
||||
pub type NodeWords = [u32; NODE_WORDS];
|
||||
pub type NodeDwords = [u64; NODE_DWORDS];
|
||||
|
||||
macro_rules! static_assert_size_eq {
|
||||
(@inner $a:ty, $b:ty, $($rest:ty),*) => {
|
||||
fn first() {
|
||||
static_assert_size_eq!($a, $b);
|
||||
}
|
||||
|
||||
fn second() {
|
||||
static_assert_size_eq!($b, $($rest),*);
|
||||
}
|
||||
};
|
||||
(@inner $a:ty, $b:ty) => {
|
||||
unsafe {
|
||||
let val: $b = ::std::mem::uninitialized();
|
||||
let _: $a = ::std::mem::transmute(val);
|
||||
}
|
||||
};
|
||||
($($rest:ty),*) => {
|
||||
static_assert_size_eq!(size_eq: $($rest),*);
|
||||
};
|
||||
($name:ident : $($rest:ty),*) => {
|
||||
#[allow(dead_code)]
|
||||
fn $name() {
|
||||
static_assert_size_eq!(@inner $($rest),*);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static_assert_size_eq!(Node, NodeBytes, NodeWords, NodeDwords);
|
||||
|
||||
#[repr(C)]
|
||||
pub union Node {
|
||||
pub dwords: NodeDwords,
|
||||
pub words: NodeWords,
|
||||
pub bytes: NodeBytes,
|
||||
}
|
||||
|
||||
impl Clone for Node {
|
||||
fn clone(&self) -> Self {
|
||||
unsafe { Node { bytes: *&self.bytes } }
|
||||
}
|
||||
}
|
||||
|
||||
// We use `inline(always)` because I was experiencing an 100% slowdown and `perf` showed that these
|
||||
// calls were taking up ~30% of the runtime. Adding these annotations fixes the issue. Remove at
|
||||
// your peril, if and only if you have benchmarks to prove that this doesn't reintroduce the
|
||||
// performance regression. It's not caused by the `debug_assert_eq!` either, your guess is as good
|
||||
// as mine.
|
||||
impl Node {
|
||||
#[inline(always)]
|
||||
pub fn as_bytes(&self) -> &NodeBytes {
|
||||
unsafe { &self.bytes }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_bytes_mut(&mut self) -> &mut NodeBytes {
|
||||
unsafe { &mut self.bytes }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_words(&self) -> &NodeWords {
|
||||
unsafe { &self.words }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_words_mut(&mut self) -> &mut NodeWords {
|
||||
unsafe { &mut self.words }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_dwords(&self) -> &NodeDwords {
|
||||
unsafe { &self.dwords }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn as_dwords_mut(&mut self) -> &mut NodeDwords {
|
||||
unsafe { &mut self.dwords }
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ description = "Ethcore library"
|
||||
homepage = "http://parity.io"
|
||||
license = "GPL-3.0"
|
||||
name = "ethcore"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
|
||||
@@ -11,17 +11,19 @@ build = "build.rs"
|
||||
"ethcore-ipc-codegen" = { path = "../ipc/codegen" }
|
||||
|
||||
[dependencies]
|
||||
bit-set = "0.4"
|
||||
ansi_term = "0.9"
|
||||
bloomchain = "0.1"
|
||||
bn = { git = "https://github.com/paritytech/bn" }
|
||||
byteorder = "1.0"
|
||||
clippy = { version = "0.0.103", optional = true}
|
||||
common-types = { path = "types" }
|
||||
crossbeam = "0.2.9"
|
||||
env_logger = "0.4"
|
||||
ethabi = "2.0"
|
||||
ethash = { path = "../ethash" }
|
||||
ethcore-bloom-journal = { path = "../util/bloom" }
|
||||
ethcore-bytes = { path = "../util/bytes" }
|
||||
hashdb = { path = "../util/hashdb" }
|
||||
memorydb = { path = "../util/memorydb" }
|
||||
patricia_trie = { path = "../util/patricia_trie" }
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
ethcore-io = { path = "../util/io" }
|
||||
ethcore-ipc = { path = "../ipc/rpc" }
|
||||
@@ -29,30 +31,50 @@ ethcore-ipc-nano = { path = "../ipc/nano" }
|
||||
ethcore-logger = { path = "../logger" }
|
||||
ethcore-stratum = { path = "../stratum" }
|
||||
ethcore-util = { path = "../util" }
|
||||
ethcore-bigint = { path = "../util/bigint" }
|
||||
ethjson = { path = "../json" }
|
||||
ethkey = { path = "../ethkey" }
|
||||
ethstore = { path = "../ethstore" }
|
||||
evm = { path = "evm" }
|
||||
futures = "0.1"
|
||||
hardware-wallet = { path = "../hw" }
|
||||
heapsize = "0.4"
|
||||
hyper = { git = "https://github.com/paritytech/hyper", default-features = false }
|
||||
itertools = "0.5"
|
||||
lazy_static = "0.2"
|
||||
linked-hash-map = "0.3.0"
|
||||
linked-hash-map = "0.5"
|
||||
log = "0.3"
|
||||
lru-cache = "0.1.0"
|
||||
lru-cache = "0.1"
|
||||
native-contracts = { path = "native_contracts" }
|
||||
num = "0.1"
|
||||
num_cpus = "1.2"
|
||||
parity-machine = { path = "../machine" }
|
||||
parking_lot = "0.4"
|
||||
price-info = { path = "../price-info" }
|
||||
rayon = "0.8"
|
||||
rand = "0.3"
|
||||
rlp = { path = "../util/rlp" }
|
||||
rlp_derive = { path = "../util/rlp_derive" }
|
||||
kvdb = { path = "../util/kvdb" }
|
||||
kvdb-rocksdb = { path = "../util/kvdb-rocksdb" }
|
||||
kvdb-memorydb = { path = "../util/kvdb-memorydb" }
|
||||
util-error = { path = "../util/error" }
|
||||
snappy = { path = "../util/snappy" }
|
||||
migration = { path = "../util/migration" }
|
||||
macros = { path = "../util/macros" }
|
||||
rust-crypto = "0.2.34"
|
||||
rustc-hex = "1.0"
|
||||
rustc-serialize = "0.3"
|
||||
semver = "0.6"
|
||||
stats = { path = "../util/stats" }
|
||||
time = "0.1"
|
||||
transient-hashmap = "0.4"
|
||||
using_queue = { path = "../util/using_queue" }
|
||||
table = { path = "../util/table" }
|
||||
bloomable = { path = "../util/bloomable" }
|
||||
vm = { path = "vm" }
|
||||
wasm = { path = "wasm" }
|
||||
hash = { path = "../util/hash" }
|
||||
triehash = { path = "../util/triehash" }
|
||||
unexpected = { path = "../util/unexpected" }
|
||||
|
||||
[dev-dependencies]
|
||||
native-contracts = { path = "native_contracts", features = ["test_contracts"] }
|
||||
@@ -60,7 +82,7 @@ native-contracts = { path = "native_contracts", features = ["test_contracts"] }
|
||||
[features]
|
||||
jit = ["evm/jit"]
|
||||
evm-debug = ["slow-blocks"]
|
||||
evm-debug-tests = ["evm-debug"]
|
||||
evm-debug-tests = ["evm-debug", "evm/evm-debug-tests"]
|
||||
slow-blocks = [] # Use SLOW_TX_DURATION="50" (compile time!) to track transactions over 50ms
|
||||
json-tests = []
|
||||
test-heavy = []
|
||||
|
||||
98
ethcore/benches/evm.rs
Normal file
98
ethcore/benches/evm.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate rand;
|
||||
extern crate bn;
|
||||
extern crate crypto;
|
||||
extern crate ethkey;
|
||||
extern crate rustc_hex;
|
||||
extern crate ethcore_bigint;
|
||||
|
||||
use self::test::{Bencher};
|
||||
use rand::{StdRng};
|
||||
|
||||
|
||||
#[bench]
|
||||
fn bn_128_pairing(b: &mut Bencher) {
|
||||
use bn::{pairing, G1, G2, Fr, Group};
|
||||
|
||||
let rng = &mut ::rand::thread_rng();
|
||||
|
||||
let sk0 = Fr::random(rng);
|
||||
let sk1 = Fr::random(rng);
|
||||
|
||||
let pk0 = G1::one() * sk0;
|
||||
let pk1 = G2::one() * sk1;
|
||||
|
||||
b.iter(|| {
|
||||
let _ = pairing(pk0, pk1);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bn_128_mul(b: &mut Bencher) {
|
||||
use bn::{AffineG1, G1, Fr, Group};
|
||||
|
||||
let mut rng = StdRng::new().unwrap();
|
||||
let p: G1 = G1::random(&mut rng);
|
||||
let fr = Fr::random(&mut rng);
|
||||
|
||||
b.iter(|| {
|
||||
let _ = AffineG1::from_jacobian(p * fr);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn sha256(b: &mut Bencher) {
|
||||
use crypto::sha2::Sha256;
|
||||
use crypto::digest::Digest;
|
||||
|
||||
let mut input: [u8; 256] = [0; 256];
|
||||
let mut out = [0; 32];
|
||||
|
||||
b.iter(|| {
|
||||
let mut sha = Sha256::new();
|
||||
sha.input(&input);
|
||||
sha.result(&mut input[0..32]);
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn ecrecover(b: &mut Bencher) {
|
||||
use rustc_hex::FromHex;
|
||||
use ethkey::{Signature, recover as ec_recover};
|
||||
use ethcore_bigint::hash::H256;
|
||||
let input = FromHex::from_hex("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad000000000000000000000000000000000000000000000000000000000000001b650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e03").unwrap();
|
||||
let hash = H256::from_slice(&input[0..32]);
|
||||
let v = H256::from_slice(&input[32..64]);
|
||||
let r = H256::from_slice(&input[64..96]);
|
||||
let s = H256::from_slice(&input[96..128]);
|
||||
|
||||
let bit = match v[31] {
|
||||
27 | 28 if &v.0[..31] == &[0; 31] => v[31] - 27,
|
||||
_ => { return; },
|
||||
};
|
||||
|
||||
let s = Signature::from_rsv(&r, &s, bit);
|
||||
b.iter(|| {
|
||||
let _ = ec_recover(&s, &hash);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -8,16 +8,23 @@ bit-set = "0.4"
|
||||
byteorder = "1.0"
|
||||
common-types = { path = "../types" }
|
||||
ethcore-util = { path = "../../util" }
|
||||
ethcore-bigint = { path = "../../util/bigint" }
|
||||
evmjit = { path = "../../evmjit", optional = true }
|
||||
ethjson = { path = "../../json" }
|
||||
heapsize = "0.4"
|
||||
lazy_static = "0.2"
|
||||
log = "0.3"
|
||||
rlp = { path = "../../util/rlp" }
|
||||
parity-wasm = "0.12"
|
||||
vm = { path = "../vm" }
|
||||
ethcore-logger = { path = "../../logger" }
|
||||
wasm-utils = { git = "https://github.com/paritytech/wasm-utils" }
|
||||
hash = { path = "../../util/hash" }
|
||||
parking_lot = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
rustc-hex = "1.0"
|
||||
|
||||
[features]
|
||||
jit = ["evmjit"]
|
||||
evm-debug = []
|
||||
evm-debug-tests = ["evm-debug"]
|
||||
|
||||
@@ -24,8 +24,10 @@ extern crate test;
|
||||
|
||||
use self::test::{Bencher, black_box};
|
||||
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use util::*;
|
||||
use evm::action_params::ActionParams;
|
||||
use vm::ActionParams;
|
||||
use evm::{self, Factory, VMType};
|
||||
use evm::tests::FakeExt;
|
||||
|
||||
|
||||
@@ -17,142 +17,8 @@
|
||||
//! Evm interface.
|
||||
|
||||
use std::{ops, cmp, fmt};
|
||||
use util::{U128, U256, U512, trie};
|
||||
use action_params::ActionParams;
|
||||
use {Ext};
|
||||
|
||||
use super::wasm;
|
||||
|
||||
/// Evm errors.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Error {
|
||||
/// `OutOfGas` is returned when transaction execution runs out of gas.
|
||||
/// The state should be reverted to the state from before the
|
||||
/// transaction execution. But it does not mean that transaction
|
||||
/// was invalid. Balance still should be transfered and nonce
|
||||
/// should be increased.
|
||||
OutOfGas,
|
||||
/// `BadJumpDestination` is returned when execution tried to move
|
||||
/// to position that wasn't marked with JUMPDEST instruction
|
||||
BadJumpDestination {
|
||||
/// Position the code tried to jump to.
|
||||
destination: usize
|
||||
},
|
||||
/// `BadInstructions` is returned when given instruction is not supported
|
||||
BadInstruction {
|
||||
/// Unrecognized opcode
|
||||
instruction: u8,
|
||||
},
|
||||
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
||||
StackUnderflow {
|
||||
/// Invoked instruction
|
||||
instruction: &'static str,
|
||||
/// How many stack elements was requested by instruction
|
||||
wanted: usize,
|
||||
/// How many elements were on stack
|
||||
on_stack: usize
|
||||
},
|
||||
/// When execution would exceed defined Stack Limit
|
||||
OutOfStack {
|
||||
/// Invoked instruction
|
||||
instruction: &'static str,
|
||||
/// How many stack elements instruction wanted to push
|
||||
wanted: usize,
|
||||
/// What was the stack limit
|
||||
limit: usize
|
||||
},
|
||||
/// Built-in contract failed on given input
|
||||
BuiltIn(&'static str),
|
||||
/// When execution tries to modify the state in static context
|
||||
MutableCallInStaticContext,
|
||||
/// Likely to cause consensus issues.
|
||||
Internal(String),
|
||||
/// Wasm runtime error
|
||||
Wasm(String),
|
||||
}
|
||||
|
||||
impl From<Box<trie::TrieError>> for Error {
|
||||
fn from(err: Box<trie::TrieError>) -> Self {
|
||||
Error::Internal(format!("Internal error: {}", err))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<wasm::RuntimeError> for Error {
|
||||
fn from(err: wasm::RuntimeError) -> Self {
|
||||
Error::Wasm(format!("Runtime error: {:?}", err))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Error::*;
|
||||
match *self {
|
||||
OutOfGas => write!(f, "Out of gas"),
|
||||
BadJumpDestination { destination } => write!(f, "Bad jump destination {:x}", destination),
|
||||
BadInstruction { instruction } => write!(f, "Bad instruction {:x}", instruction),
|
||||
StackUnderflow { instruction, wanted, on_stack } => write!(f, "Stack underflow {} {}/{}", instruction, wanted, on_stack),
|
||||
OutOfStack { instruction, wanted, limit } => write!(f, "Out of stack {} {}/{}", instruction, wanted, limit),
|
||||
BuiltIn(name) => write!(f, "Built-in failed: {}", name),
|
||||
Internal(ref msg) => write!(f, "Internal error: {}", msg),
|
||||
MutableCallInStaticContext => write!(f, "Mutable call in static context"),
|
||||
Wasm(ref msg) => write!(f, "Internal error: {}", msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A specialized version of Result over EVM errors.
|
||||
pub type Result<T> = ::std::result::Result<T, Error>;
|
||||
|
||||
/// Return data buffer. Holds memory from a previous call and a slice into that memory.
|
||||
#[derive(Debug)]
|
||||
pub struct ReturnData {
|
||||
mem: Vec<u8>,
|
||||
offset: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
impl ::std::ops::Deref for ReturnData {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
&self.mem[self.offset..self.offset + self.size]
|
||||
}
|
||||
}
|
||||
|
||||
impl ReturnData {
|
||||
/// Create empty `ReturnData`.
|
||||
pub fn empty() -> Self {
|
||||
ReturnData {
|
||||
mem: Vec::new(),
|
||||
offset: 0,
|
||||
size: 0,
|
||||
}
|
||||
}
|
||||
/// Create `ReturnData` from give buffer and slice.
|
||||
pub fn new(mem: Vec<u8>, offset: usize, size: usize) -> Self {
|
||||
ReturnData {
|
||||
mem: mem,
|
||||
offset: offset,
|
||||
size: size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gas Left: either it is a known value, or it needs to be computed by processing
|
||||
/// a return instruction.
|
||||
#[derive(Debug)]
|
||||
pub enum GasLeft {
|
||||
/// Known gas left
|
||||
Known(U256),
|
||||
/// Return or Revert instruction must be processed.
|
||||
NeedsReturn {
|
||||
/// Amount of gas left.
|
||||
gas_left: U256,
|
||||
/// Return data buffer.
|
||||
data: ReturnData,
|
||||
/// Apply or revert state changes on revert.
|
||||
apply_state: bool
|
||||
},
|
||||
}
|
||||
use bigint::prelude::{U128, U256, U512};
|
||||
use vm::{Ext, Result, ReturnData, GasLeft, Error};
|
||||
|
||||
/// Finalization result. Gas Left: either it is a known value, or it needs to be computed by processing
|
||||
/// a return instruction.
|
||||
@@ -179,7 +45,7 @@ impl Finalize for Result<GasLeft> {
|
||||
fn finalize<E: Ext>(self, ext: E) -> Result<FinalizationResult> {
|
||||
match self {
|
||||
Ok(GasLeft::Known(gas_left)) => Ok(FinalizationResult { gas_left: gas_left, apply_state: true, return_data: ReturnData::empty() }),
|
||||
Ok(GasLeft::NeedsReturn {gas_left, data, apply_state}) => ext.ret(&gas_left, &data).map(|gas_left| FinalizationResult {
|
||||
Ok(GasLeft::NeedsReturn {gas_left, data, apply_state}) => ext.ret(&gas_left, &data, apply_state).map(|gas_left| FinalizationResult {
|
||||
gas_left: gas_left,
|
||||
apply_state: apply_state,
|
||||
return_data: data,
|
||||
@@ -281,18 +147,9 @@ impl CostType for usize {
|
||||
}
|
||||
}
|
||||
|
||||
/// Evm interface
|
||||
pub trait Evm {
|
||||
/// This function should be used to execute transaction.
|
||||
///
|
||||
/// It returns either an error, a known amount of gas left, or parameters to be used
|
||||
/// to compute the final gas left.
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut Ext) -> Result<GasLeft>;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use util::U256;
|
||||
use bigint::prelude::U256;
|
||||
use super::CostType;
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
//! Evm factory.
|
||||
//!
|
||||
use std::sync::Arc;
|
||||
use evm::Evm;
|
||||
use util::U256;
|
||||
use vm::Vm;
|
||||
use bigint::prelude::U256;
|
||||
use super::interpreter::SharedCache;
|
||||
use super::vmtype::VMType;
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Factory {
|
||||
/// Create fresh instance of VM
|
||||
/// Might choose implementation depending on supplied gas.
|
||||
#[cfg(feature = "jit")]
|
||||
pub fn create(&self, gas: U256) -> Box<Evm> {
|
||||
pub fn create(&self, gas: U256) -> Box<Vm> {
|
||||
match self.evm {
|
||||
VMType::Jit => {
|
||||
Box::new(super::jit::JitEvm::default())
|
||||
@@ -49,7 +49,7 @@ impl Factory {
|
||||
/// Create fresh instance of VM
|
||||
/// Might choose implementation depending on supplied gas.
|
||||
#[cfg(not(feature = "jit"))]
|
||||
pub fn create(&self, gas: U256) -> Box<Evm> {
|
||||
pub fn create(&self, gas: U256) -> Box<Vm> {
|
||||
match self.evm {
|
||||
VMType::Interpreter => if Self::can_fit_in_usize(gas) {
|
||||
Box::new(super::interpreter::Interpreter::<usize>::new(self.evm_cache.clone()))
|
||||
|
||||
@@ -14,18 +14,20 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use util::*;
|
||||
use std::cmp;
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use super::u256_to_address;
|
||||
|
||||
use {evm, ext};
|
||||
use {evm, vm};
|
||||
use instructions::{self, Instruction, InstructionInfo};
|
||||
use interpreter::stack::Stack;
|
||||
use schedule::Schedule;
|
||||
use vm::Schedule;
|
||||
|
||||
macro_rules! overflowing {
|
||||
($x: expr) => {{
|
||||
let (v, overflow) = $x;
|
||||
if overflow { return Err(evm::Error::OutOfGas); }
|
||||
if overflow { return Err(vm::Error::OutOfGas); }
|
||||
v
|
||||
}}
|
||||
}
|
||||
@@ -59,16 +61,16 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn verify_gas(&self, gas_cost: &Gas) -> evm::Result<()> {
|
||||
pub fn verify_gas(&self, gas_cost: &Gas) -> vm::Result<()> {
|
||||
match &self.current_gas < gas_cost {
|
||||
true => Err(evm::Error::OutOfGas),
|
||||
true => Err(vm::Error::OutOfGas),
|
||||
false => Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation
|
||||
/// and that we `requested` some.
|
||||
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<U256>) -> evm::Result<Gas> {
|
||||
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<U256>) -> vm::Result<Gas> {
|
||||
// Try converting requested gas to `Gas` (`U256/u64`)
|
||||
// but in EIP150 even if we request more we should never fail from OOG
|
||||
let requested = requested.map(Gas::from_u256);
|
||||
@@ -82,7 +84,7 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
||||
};
|
||||
|
||||
if let Some(Ok(r)) = requested {
|
||||
Ok(min(r, max_gas_provided))
|
||||
Ok(cmp::min(r, max_gas_provided))
|
||||
} else {
|
||||
Ok(max_gas_provided)
|
||||
}
|
||||
@@ -107,12 +109,12 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
||||
/// it will be the amount of gas that the current context provides to the child context.
|
||||
pub fn requirements(
|
||||
&mut self,
|
||||
ext: &ext::Ext,
|
||||
ext: &vm::Ext,
|
||||
instruction: Instruction,
|
||||
info: &InstructionInfo,
|
||||
stack: &Stack<U256>,
|
||||
current_mem_size: usize,
|
||||
) -> evm::Result<InstructionRequirements<Gas>> {
|
||||
) -> vm::Result<InstructionRequirements<Gas>> {
|
||||
let schedule = ext.schedule();
|
||||
let tier = instructions::get_tier_idx(info.tier);
|
||||
let default_gas = Gas::from(schedule.tier_step_gas[tier]);
|
||||
@@ -291,7 +293,7 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
||||
})
|
||||
}
|
||||
|
||||
fn mem_gas_cost(&self, schedule: &Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> {
|
||||
fn mem_gas_cost(&self, schedule: &Schedule, current_mem_size: usize, mem_size: &Gas) -> vm::Result<(Gas, Gas, usize)> {
|
||||
let gas_for_mem = |mem_size: Gas| {
|
||||
let s = mem_size >> 5;
|
||||
// s * memory_gas + s * s / quad_coeff_div
|
||||
@@ -319,12 +321,12 @@ impl<Gas: evm::CostType> Gasometer<Gas> {
|
||||
|
||||
|
||||
#[inline]
|
||||
fn mem_needed_const<Gas: evm::CostType>(mem: &U256, add: usize) -> evm::Result<Gas> {
|
||||
fn mem_needed_const<Gas: evm::CostType>(mem: &U256, add: usize) -> vm::Result<Gas> {
|
||||
Gas::from_u256(overflowing!(mem.overflowing_add(U256::from(add))))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn mem_needed<Gas: evm::CostType>(offset: &U256, size: &U256) -> evm::Result<Gas> {
|
||||
fn mem_needed<Gas: evm::CostType>(offset: &U256, size: &U256) -> vm::Result<Gas> {
|
||||
if size.is_zero() {
|
||||
return Ok(Gas::from(0));
|
||||
}
|
||||
|
||||
@@ -39,11 +39,11 @@ mod inner {
|
||||
use std::collections::HashMap;
|
||||
use std::time::{Instant, Duration};
|
||||
|
||||
use evm::interpreter::stack::Stack;
|
||||
use evm::instructions::{Instruction, InstructionInfo, INSTRUCTIONS};
|
||||
use evm::{CostType};
|
||||
use bigint::prelude::U256;
|
||||
|
||||
use util::U256;
|
||||
use interpreter::stack::Stack;
|
||||
use instructions::{Instruction, InstructionInfo, INSTRUCTIONS};
|
||||
use CostType;
|
||||
|
||||
macro_rules! evm_debug {
|
||||
($x: expr) => {
|
||||
@@ -110,7 +110,7 @@ mod inner {
|
||||
}
|
||||
|
||||
pub fn after_instruction(&mut self, instruction: Instruction) {
|
||||
let mut stats = self.stats.entry(instruction).or_insert_with(|| Stats::default());
|
||||
let stats = self.stats.entry(instruction).or_insert_with(|| Stats::default());
|
||||
let took = self.last_instruction.elapsed();
|
||||
stats.note(took);
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use util::U256;
|
||||
use {ReturnData};
|
||||
use bigint::prelude::U256;
|
||||
use vm::ReturnData;
|
||||
|
||||
const MAX_RETURN_WASTE_BYTES: usize = 16384;
|
||||
|
||||
@@ -44,7 +44,7 @@ pub trait Memory {
|
||||
}
|
||||
|
||||
/// Checks whether offset and size is valid memory range
|
||||
fn is_valid_range(off: usize, size: usize) -> bool {
|
||||
pub fn is_valid_range(off: usize, size: usize) -> bool {
|
||||
// When size is zero we haven't actually expanded the memory
|
||||
let overflow = off.overflowing_add(size).1;
|
||||
size > 0 && !overflow
|
||||
@@ -134,7 +134,7 @@ impl Memory for Vec<u8> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use util::U256;
|
||||
use bigint::prelude::U256;
|
||||
use super::Memory;
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -23,17 +23,26 @@ mod stack;
|
||||
mod memory;
|
||||
mod shared_cache;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::{cmp, mem};
|
||||
use std::sync::Arc;
|
||||
use hash::keccak;
|
||||
use bigint::prelude::{U256, U512};
|
||||
use bigint::hash::H256;
|
||||
|
||||
use vm::{
|
||||
self, ActionParams, ActionValue, CallType, MessageCallResult,
|
||||
ContractCreateResult, CreateContractAddress, ReturnData, GasLeft
|
||||
};
|
||||
|
||||
use evm::CostType;
|
||||
use instructions::{self, Instruction, InstructionInfo};
|
||||
|
||||
use self::gasometer::Gasometer;
|
||||
use self::stack::{Stack, VecStack};
|
||||
use self::memory::Memory;
|
||||
pub use self::shared_cache::SharedCache;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use action_params::{ActionParams, ActionValue};
|
||||
use call_type::CallType;
|
||||
use instructions::{self, Instruction, InstructionInfo};
|
||||
use evm::{self, GasLeft, CostType, ReturnData};
|
||||
use ext::{self, MessageCallResult, ContractCreateResult, CreateContractAddress};
|
||||
use bit_set::BitSet;
|
||||
|
||||
use util::*;
|
||||
@@ -107,8 +116,8 @@ pub struct Interpreter<Cost: CostType> {
|
||||
_type: PhantomData<Cost>,
|
||||
}
|
||||
|
||||
impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut ext::Ext) -> evm::Result<GasLeft> {
|
||||
impl<Cost: CostType> vm::Vm for Interpreter<Cost> {
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut vm::Ext) -> vm::Result<GasLeft> {
|
||||
self.mem.clear();
|
||||
|
||||
let mut informant = informant::EvmInformant::new(ext.depth());
|
||||
@@ -162,14 +171,19 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
|
||||
}
|
||||
|
||||
if do_trace {
|
||||
ext.trace_executed(gasometer.current_gas.as_u256(), stack.peek_top(info.ret), mem_written.map(|(o, s)| (o, &(self.mem[o..(o + s)]))), store_written);
|
||||
ext.trace_executed(
|
||||
gasometer.current_gas.as_u256(),
|
||||
stack.peek_top(info.ret),
|
||||
mem_written.map(|(o, s)| (o, &(self.mem[o..o+s]))),
|
||||
store_written,
|
||||
);
|
||||
}
|
||||
|
||||
// Advance
|
||||
match result {
|
||||
InstructionResult::JumpToPosition(position) => {
|
||||
if valid_jump_destinations.is_none() {
|
||||
let code_hash = params.code_hash.clone().unwrap_or_else(|| code.sha3());
|
||||
let code_hash = params.code_hash.clone().unwrap_or_else(|| keccak(code.as_ref()));
|
||||
valid_jump_destinations = Some(self.cache.jump_destinations(&code_hash, code));
|
||||
}
|
||||
let jump_destinations = valid_jump_destinations.as_ref().expect("jump_destinations are initialized on first jump; qed");
|
||||
@@ -205,33 +219,34 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_instruction(&self, ext: &ext::Ext, instruction: Instruction, info: &InstructionInfo, stack: &Stack<U256>) -> evm::Result<()> {
|
||||
fn verify_instruction(&self, ext: &vm::Ext, instruction: Instruction, info: &InstructionInfo, stack: &Stack<U256>) -> vm::Result<()> {
|
||||
let schedule = ext.schedule();
|
||||
|
||||
if (instruction == instructions::DELEGATECALL && !schedule.have_delegate_call) ||
|
||||
(instruction == instructions::CREATE2 && !schedule.have_create2) ||
|
||||
(instruction == instructions::STATICCALL && !schedule.have_static_call) ||
|
||||
((instruction == instructions::RETURNDATACOPY || instruction == instructions::RETURNDATASIZE) && !schedule.have_return_data) ||
|
||||
(instruction == instructions::REVERT && !schedule.have_revert) {
|
||||
|
||||
return Err(evm::Error::BadInstruction {
|
||||
return Err(vm::Error::BadInstruction {
|
||||
instruction: instruction
|
||||
});
|
||||
}
|
||||
|
||||
if info.tier == instructions::GasPriceTier::Invalid {
|
||||
return Err(evm::Error::BadInstruction {
|
||||
return Err(vm::Error::BadInstruction {
|
||||
instruction: instruction
|
||||
});
|
||||
}
|
||||
|
||||
if !stack.has(info.args) {
|
||||
Err(evm::Error::StackUnderflow {
|
||||
Err(vm::Error::StackUnderflow {
|
||||
instruction: info.name,
|
||||
wanted: info.args,
|
||||
on_stack: stack.size()
|
||||
})
|
||||
} else if stack.size() - info.args + info.ret > schedule.stack_limit {
|
||||
Err(evm::Error::OutOfStack {
|
||||
Err(vm::Error::OutOfStack {
|
||||
instruction: info.name,
|
||||
wanted: info.ret - info.args,
|
||||
limit: schedule.stack_limit
|
||||
@@ -245,14 +260,20 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
instruction: Instruction,
|
||||
stack: &Stack<U256>
|
||||
) -> Option<(usize, usize)> {
|
||||
match instruction {
|
||||
instructions::MSTORE | instructions::MLOAD => Some((stack.peek(0).low_u64() as usize, 32)),
|
||||
instructions::MSTORE8 => Some((stack.peek(0).low_u64() as usize, 1)),
|
||||
instructions::CALLDATACOPY | instructions::CODECOPY | instructions::RETURNDATACOPY => Some((stack.peek(0).low_u64() as usize, stack.peek(2).low_u64() as usize)),
|
||||
instructions::EXTCODECOPY => Some((stack.peek(1).low_u64() as usize, stack.peek(3).low_u64() as usize)),
|
||||
instructions::CALL | instructions::CALLCODE => Some((stack.peek(5).low_u64() as usize, stack.peek(6).low_u64() as usize)),
|
||||
instructions::DELEGATECALL => Some((stack.peek(4).low_u64() as usize, stack.peek(5).low_u64() as usize)),
|
||||
let read = |pos| stack.peek(pos).low_u64() as usize;
|
||||
let written = match instruction {
|
||||
instructions::MSTORE | instructions::MLOAD => Some((read(0), 32)),
|
||||
instructions::MSTORE8 => Some((read(0), 1)),
|
||||
instructions::CALLDATACOPY | instructions::CODECOPY | instructions::RETURNDATACOPY => Some((read(0), read(2))),
|
||||
instructions::EXTCODECOPY => Some((read(1), read(3))),
|
||||
instructions::CALL | instructions::CALLCODE => Some((read(5), read(6))),
|
||||
instructions::DELEGATECALL | instructions::STATICCALL => Some((read(4), read(5))),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
match written {
|
||||
Some((offset, size)) if !memory::is_valid_range(offset, size) => None,
|
||||
written => written,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,12 +292,12 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
&mut self,
|
||||
gas: Cost,
|
||||
params: &ActionParams,
|
||||
ext: &mut ext::Ext,
|
||||
ext: &mut vm::Ext,
|
||||
instruction: Instruction,
|
||||
code: &mut CodeReader,
|
||||
stack: &mut Stack<U256>,
|
||||
provided: Option<Cost>
|
||||
) -> evm::Result<InstructionResult<Cost>> {
|
||||
) -> vm::Result<InstructionResult<Cost>> {
|
||||
match instruction {
|
||||
instructions::JUMP => {
|
||||
let jump = stack.pop_back();
|
||||
@@ -301,27 +322,39 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
let init_off = stack.pop_back();
|
||||
let init_size = stack.pop_back();
|
||||
|
||||
let address_scheme = if instruction == instructions::CREATE { CreateContractAddress::FromSenderAndNonce } else { CreateContractAddress::FromSenderAndCodeHash };
|
||||
let create_gas = provided.expect("`provided` comes through Self::exec from `Gasometer::get_gas_cost_mem`; `gas_gas_mem_cost` guarantees `Some` when instruction is `CALL`/`CALLCODE`/`DELEGATECALL`/`CREATE`; this is `CREATE`; qed");
|
||||
|
||||
let contract_code = self.mem.read_slice(init_off, init_size);
|
||||
let can_create = ext.balance(¶ms.address)? >= endowment && ext.depth() < ext.schedule().max_depth;
|
||||
if ext.is_static() {
|
||||
return Err(vm::Error::MutableCallInStaticContext);
|
||||
}
|
||||
|
||||
// clear return data buffer before creating new call frame.
|
||||
self.return_data = ReturnData::empty();
|
||||
|
||||
let can_create = ext.balance(¶ms.address)? >= endowment && ext.depth() < ext.schedule().max_depth;
|
||||
if !can_create {
|
||||
stack.push(U256::zero());
|
||||
return Ok(InstructionResult::UnusedGas(create_gas));
|
||||
}
|
||||
|
||||
let contract_code = self.mem.read_slice(init_off, init_size);
|
||||
let address_scheme = if instruction == instructions::CREATE { CreateContractAddress::FromSenderAndNonce } else { CreateContractAddress::FromSenderAndCodeHash };
|
||||
|
||||
let create_result = ext.create(&create_gas.as_u256(), &endowment, contract_code, address_scheme);
|
||||
return match create_result {
|
||||
ContractCreateResult::Created(address, gas_left) => {
|
||||
stack.push(address_to_u256(address));
|
||||
Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater.")))
|
||||
},
|
||||
ContractCreateResult::Reverted(gas_left, return_data) => {
|
||||
stack.push(U256::zero());
|
||||
self.return_data = return_data;
|
||||
Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater.")))
|
||||
},
|
||||
ContractCreateResult::Failed => {
|
||||
stack.push(U256::zero());
|
||||
Ok(InstructionResult::Ok)
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
instructions::CALL | instructions::CALLCODE | instructions::DELEGATECALL | instructions::STATICCALL => {
|
||||
@@ -332,8 +365,10 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
let code_address = stack.pop_back();
|
||||
let code_address = u256_to_address(&code_address);
|
||||
|
||||
let value = if instruction == instructions::DELEGATECALL || instruction == instructions::STATICCALL {
|
||||
let value = if instruction == instructions::DELEGATECALL {
|
||||
None
|
||||
} else if instruction == instructions::STATICCALL {
|
||||
Some(U256::zero())
|
||||
} else {
|
||||
Some(stack.pop_back())
|
||||
};
|
||||
@@ -352,6 +387,9 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
// Get sender & receive addresses, check if we have balance
|
||||
let (sender_address, receive_address, has_balance, call_type) = match instruction {
|
||||
instructions::CALL => {
|
||||
if ext.is_static() && value.map_or(false, |v| !v.is_zero()) {
|
||||
return Err(vm::Error::MutableCallInStaticContext);
|
||||
}
|
||||
let has_balance = ext.balance(¶ms.address)? >= value.expect("value set for all but delegate call; qed");
|
||||
(¶ms.address, &code_address, has_balance, CallType::Call)
|
||||
},
|
||||
@@ -360,10 +398,13 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
(¶ms.address, ¶ms.address, has_balance, CallType::CallCode)
|
||||
},
|
||||
instructions::DELEGATECALL => (¶ms.sender, ¶ms.address, true, CallType::DelegateCall),
|
||||
instructions::STATICCALL => (¶ms.sender, ¶ms.address, true, CallType::StaticCall),
|
||||
instructions::STATICCALL => (¶ms.address, &code_address, true, CallType::StaticCall),
|
||||
_ => panic!(format!("Unexpected instruction {} in CALL branch.", instruction))
|
||||
};
|
||||
|
||||
// clear return data buffer before creating new call frame.
|
||||
self.return_data = ReturnData::empty();
|
||||
|
||||
let can_call = has_balance && ext.depth() < ext.schedule().max_depth;
|
||||
if !can_call {
|
||||
stack.push(U256::zero());
|
||||
@@ -382,12 +423,17 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
MessageCallResult::Success(gas_left, data) => {
|
||||
stack.push(U256::one());
|
||||
self.return_data = data;
|
||||
Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater then current one")))
|
||||
Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater than current one")))
|
||||
},
|
||||
MessageCallResult::Reverted(gas_left, data) => {
|
||||
stack.push(U256::zero());
|
||||
self.return_data = data;
|
||||
Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater than current one")))
|
||||
},
|
||||
MessageCallResult::Failed => {
|
||||
stack.push(U256::zero());
|
||||
Ok(InstructionResult::Ok)
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
instructions::RETURN => {
|
||||
@@ -446,8 +492,8 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
instructions::SHA3 => {
|
||||
let offset = stack.pop_back();
|
||||
let size = stack.pop_back();
|
||||
let sha3 = self.mem.read_slice(offset, size).sha3();
|
||||
stack.push(U256::from(&*sha3));
|
||||
let k = keccak(self.mem.read_slice(offset, size));
|
||||
stack.push(U256::from(&*k));
|
||||
},
|
||||
instructions::SLOAD => {
|
||||
let key = H256::from(&stack.pop_back());
|
||||
@@ -525,6 +571,14 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
Self::copy_data_to_memory(&mut self.mem, stack, params.data.as_ref().map_or_else(|| &[] as &[u8], |d| &*d as &[u8]));
|
||||
},
|
||||
instructions::RETURNDATACOPY => {
|
||||
{
|
||||
let source_offset = stack.peek(1);
|
||||
let size = stack.peek(2);
|
||||
let return_data_len = U256::from(self.return_data.len());
|
||||
if source_offset.saturating_add(*size) > return_data_len {
|
||||
return Err(vm::Error::OutOfBounds);
|
||||
}
|
||||
}
|
||||
Self::copy_data_to_memory(&mut self.mem, stack, &*self.return_data);
|
||||
},
|
||||
instructions::CODECOPY => {
|
||||
@@ -592,13 +646,13 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_jump(&self, jump_u: U256, valid_jump_destinations: &BitSet) -> evm::Result<usize> {
|
||||
fn verify_jump(&self, jump_u: U256, valid_jump_destinations: &BitSet) -> vm::Result<usize> {
|
||||
let jump = jump_u.low_u64() as usize;
|
||||
|
||||
if valid_jump_destinations.contains(jump) && U256::from(jump) == jump_u {
|
||||
Ok(jump)
|
||||
} else {
|
||||
Err(evm::Error::BadJumpDestination {
|
||||
Err(vm::Error::BadJumpDestination {
|
||||
destination: jump
|
||||
})
|
||||
}
|
||||
@@ -616,7 +670,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
}
|
||||
}
|
||||
|
||||
fn exec_stack_instruction(&self, instruction: Instruction, stack: &mut Stack<U256>) -> evm::Result<()> {
|
||||
fn exec_stack_instruction(&self, instruction: Instruction, stack: &mut Stack<U256>) -> vm::Result<()> {
|
||||
match instruction {
|
||||
instructions::DUP1...instructions::DUP16 => {
|
||||
let position = instructions::get_dup_position(instruction);
|
||||
@@ -821,7 +875,7 @@ impl<Cost: CostType> Interpreter<Cost> {
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(evm::Error::BadInstruction {
|
||||
return Err(vm::Error::BadInstruction {
|
||||
instruction: instruction
|
||||
});
|
||||
}
|
||||
@@ -855,3 +909,57 @@ fn address_to_u256(value: Address) -> U256 {
|
||||
U256::from(&*H256::from(value))
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
use rustc_hex::FromHex;
|
||||
use vmtype::VMType;
|
||||
use factory::Factory;
|
||||
use vm::{ActionParams, ActionValue};
|
||||
use vm::tests::{FakeExt, test_finalize};
|
||||
|
||||
#[test]
|
||||
fn should_not_fail_on_tracing_mem() {
|
||||
let code = "7feeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff006000527faaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffaa6020526000620f120660406000601773945304eb96065b2a98b57a48a06ae28d285a71b56101f4f1600055".from_hex().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = 5.into();
|
||||
params.gas = 300_000.into();
|
||||
params.gas_price = 1.into();
|
||||
params.value = ActionValue::Transfer(100_000.into());
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new();
|
||||
ext.balances.insert(5.into(), 1_000_000_000.into());
|
||||
ext.tracing = true;
|
||||
|
||||
let gas_left = {
|
||||
let mut vm = Factory::new(VMType::Interpreter, 1).create(params.gas);
|
||||
test_finalize(vm.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(ext.calls.len(), 1);
|
||||
assert_eq!(gas_left, 248_212.into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_not_overflow_returndata() {
|
||||
let code = "6001600160000360003e00".from_hex().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = 5.into();
|
||||
params.gas = 300_000.into();
|
||||
params.gas_price = 1.into();
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new_byzantium();
|
||||
ext.balances.insert(5.into(), 1_000_000_000.into());
|
||||
ext.tracing = true;
|
||||
|
||||
let err = {
|
||||
let mut vm = Factory::new(VMType::Interpreter, 1).create(params.gas);
|
||||
test_finalize(vm.exec(params, &mut ext)).err().unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(err, ::vm::Error::OutOfBounds);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,10 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::sync::Arc;
|
||||
use util::{H256, HeapSizeOf, Mutex};
|
||||
use util::sha3::*;
|
||||
use hash::KECCAK_EMPTY;
|
||||
use heapsize::HeapSizeOf;
|
||||
use bigint::hash::H256;
|
||||
use parking_lot::Mutex;
|
||||
use util::cache::MemoryLruCache;
|
||||
use bit_set::BitSet;
|
||||
use super::super::instructions;
|
||||
@@ -49,7 +51,7 @@ impl SharedCache {
|
||||
|
||||
/// Get jump destinations bitmap for a contract.
|
||||
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
|
||||
if code_hash == &SHA3_EMPTY {
|
||||
if code_hash == &KECCAK_EMPTY {
|
||||
return Self::find_jump_destinations(code);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,10 +15,13 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Just in time compiler execution environment.
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use util::*;
|
||||
use evmjit;
|
||||
use evm::{self, GasLeft};
|
||||
use evm::CallType;
|
||||
use vm::{self, Vm};
|
||||
|
||||
/// Should be used to convert jit types to ethcore
|
||||
trait FromJit<T>: Sized {
|
||||
@@ -318,7 +321,7 @@ pub struct JitEvm {
|
||||
context: Option<evmjit::ContextHandle>,
|
||||
}
|
||||
|
||||
impl evm::Evm for JitEvm {
|
||||
impl vm::Vm for JitEvm {
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut evm::Ext) -> evm::Result<GasLeft> {
|
||||
// Dirty hack. This is unsafe, but we interact with ffi, so it's justified.
|
||||
let ext_adapter: ExtAdapter<'static> = unsafe { ::std::mem::transmute(ExtAdapter::new(ext, params.address.clone())) };
|
||||
@@ -370,8 +373,8 @@ impl evm::Evm for JitEvm {
|
||||
ext.suicide(&Address::from_jit(&context.suicide_refund_address()));
|
||||
Ok(GasLeft::Known(U256::from(context.gas_left())))
|
||||
},
|
||||
evmjit::ReturnCode::OutOfGas => Err(evm::Error::OutOfGas),
|
||||
_err => Err(evm::Error::Internal)
|
||||
evmjit::ReturnCode::OutOfGas => Err(vm::Error::OutOfGas),
|
||||
_err => Err(vm::Error::Internal)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,15 +20,20 @@ extern crate byteorder;
|
||||
extern crate bit_set;
|
||||
extern crate common_types as types;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
extern crate ethjson;
|
||||
extern crate rlp;
|
||||
extern crate parity_wasm;
|
||||
extern crate parking_lot;
|
||||
extern crate wasm_utils;
|
||||
extern crate ethcore_logger;
|
||||
extern crate heapsize;
|
||||
extern crate vm;
|
||||
extern crate hash;
|
||||
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
#[macro_use]
|
||||
#[cfg_attr(feature = "evm-debug", macro_use)]
|
||||
extern crate log;
|
||||
|
||||
#[cfg(feature = "jit")]
|
||||
@@ -37,14 +42,8 @@ extern crate evmjit;
|
||||
#[cfg(test)]
|
||||
extern crate rustc_hex;
|
||||
|
||||
pub mod action_params;
|
||||
pub mod call_type;
|
||||
pub mod env_info;
|
||||
pub mod ext;
|
||||
pub mod evm;
|
||||
pub mod interpreter;
|
||||
pub mod schedule;
|
||||
pub mod wasm;
|
||||
|
||||
#[macro_use]
|
||||
pub mod factory;
|
||||
@@ -59,12 +58,12 @@ mod tests;
|
||||
#[cfg(all(feature="benches", test))]
|
||||
mod benches;
|
||||
|
||||
pub use self::action_params::ActionParams;
|
||||
pub use self::call_type::CallType;
|
||||
pub use self::env_info::EnvInfo;
|
||||
pub use self::evm::{Evm, Error, Finalize, FinalizationResult, GasLeft, Result, CostType, ReturnData};
|
||||
pub use self::ext::{Ext, ContractCreateResult, MessageCallResult, CreateContractAddress};
|
||||
pub use vm::{
|
||||
Schedule, CleanDustMode, EnvInfo, CallType, ActionParams, Ext,
|
||||
ContractCreateResult, MessageCallResult, CreateContractAddress,
|
||||
GasLeft, ReturnData
|
||||
};
|
||||
pub use self::evm::{Finalize, FinalizationResult, CostType};
|
||||
pub use self::instructions::{InstructionInfo, INSTRUCTIONS, push_bytes};
|
||||
pub use self::vmtype::VMType;
|
||||
pub use self::factory::Factory;
|
||||
pub use self::schedule::{Schedule, CleanDustMode};
|
||||
|
||||
@@ -15,212 +15,19 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::str::FromStr;
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use rustc_hex::FromHex;
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use util::*;
|
||||
use action_params::{ActionParams, ActionValue};
|
||||
use env_info::EnvInfo;
|
||||
use call_type::CallType;
|
||||
use schedule::Schedule;
|
||||
use evm::{self, GasLeft, ReturnData};
|
||||
use ext::{Ext, ContractCreateResult, MessageCallResult, CreateContractAddress};
|
||||
use vm::{self, ActionParams, ActionValue};
|
||||
use vm::tests::{FakeExt, FakeCall, FakeCallType, test_finalize};
|
||||
use factory::Factory;
|
||||
use vmtype::VMType;
|
||||
|
||||
pub struct FakeLogEntry {
|
||||
topics: Vec<H256>,
|
||||
data: Bytes
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
pub enum FakeCallType {
|
||||
Call, Create
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
pub struct FakeCall {
|
||||
pub call_type: FakeCallType,
|
||||
pub gas: U256,
|
||||
pub sender_address: Option<Address>,
|
||||
pub receive_address: Option<Address>,
|
||||
pub value: Option<U256>,
|
||||
pub data: Bytes,
|
||||
pub code_address: Option<Address>,
|
||||
}
|
||||
|
||||
/// Fake externalities test structure.
|
||||
///
|
||||
/// Can't do recursive calls.
|
||||
#[derive(Default)]
|
||||
pub struct FakeExt {
|
||||
pub store: HashMap<H256, H256>,
|
||||
pub suicides: HashSet<Address>,
|
||||
pub calls: HashSet<FakeCall>,
|
||||
sstore_clears: usize,
|
||||
depth: usize,
|
||||
blockhashes: HashMap<U256, H256>,
|
||||
codes: HashMap<Address, Arc<Bytes>>,
|
||||
logs: Vec<FakeLogEntry>,
|
||||
info: EnvInfo,
|
||||
schedule: Schedule,
|
||||
balances: HashMap<Address, U256>,
|
||||
}
|
||||
|
||||
// similar to the normal `finalize` function, but ignoring NeedsReturn.
|
||||
fn test_finalize(res: Result<GasLeft, evm::Error>) -> Result<U256, evm::Error> {
|
||||
match res {
|
||||
Ok(GasLeft::Known(gas)) => Ok(gas),
|
||||
Ok(GasLeft::NeedsReturn{..}) => unimplemented!(), // since ret is unimplemented.
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
impl FakeExt {
|
||||
pub fn new() -> Self {
|
||||
FakeExt::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Schedule {
|
||||
fn default() -> Self {
|
||||
Schedule::new_frontier()
|
||||
}
|
||||
}
|
||||
|
||||
impl Ext for FakeExt {
|
||||
fn storage_at(&self, key: &H256) -> evm::Result<H256> {
|
||||
Ok(self.store.get(key).unwrap_or(&H256::new()).clone())
|
||||
}
|
||||
|
||||
fn set_storage(&mut self, key: H256, value: H256) -> evm::Result<()> {
|
||||
self.store.insert(key, value);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exists(&self, address: &Address) -> evm::Result<bool> {
|
||||
Ok(self.balances.contains_key(address))
|
||||
}
|
||||
|
||||
fn exists_and_not_null(&self, address: &Address) -> evm::Result<bool> {
|
||||
Ok(self.balances.get(address).map_or(false, |b| !b.is_zero()))
|
||||
}
|
||||
|
||||
fn origin_balance(&self) -> evm::Result<U256> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn balance(&self, address: &Address) -> evm::Result<U256> {
|
||||
Ok(self.balances[address])
|
||||
}
|
||||
|
||||
fn blockhash(&mut self, number: &U256) -> H256 {
|
||||
self.blockhashes.get(number).unwrap_or(&H256::new()).clone()
|
||||
}
|
||||
|
||||
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], _address: CreateContractAddress) -> ContractCreateResult {
|
||||
self.calls.insert(FakeCall {
|
||||
call_type: FakeCallType::Create,
|
||||
gas: *gas,
|
||||
sender_address: None,
|
||||
receive_address: None,
|
||||
value: Some(*value),
|
||||
data: code.to_vec(),
|
||||
code_address: None
|
||||
});
|
||||
ContractCreateResult::Failed
|
||||
}
|
||||
|
||||
fn call(&mut self,
|
||||
gas: &U256,
|
||||
sender_address: &Address,
|
||||
receive_address: &Address,
|
||||
value: Option<U256>,
|
||||
data: &[u8],
|
||||
code_address: &Address,
|
||||
_output: &mut [u8],
|
||||
_call_type: CallType
|
||||
) -> MessageCallResult {
|
||||
|
||||
self.calls.insert(FakeCall {
|
||||
call_type: FakeCallType::Call,
|
||||
gas: *gas,
|
||||
sender_address: Some(sender_address.clone()),
|
||||
receive_address: Some(receive_address.clone()),
|
||||
value: value,
|
||||
data: data.to_vec(),
|
||||
code_address: Some(code_address.clone())
|
||||
});
|
||||
MessageCallResult::Success(*gas, ReturnData::empty())
|
||||
}
|
||||
|
||||
fn extcode(&self, address: &Address) -> evm::Result<Arc<Bytes>> {
|
||||
Ok(self.codes.get(address).unwrap_or(&Arc::new(Bytes::new())).clone())
|
||||
}
|
||||
|
||||
fn extcodesize(&self, address: &Address) -> evm::Result<usize> {
|
||||
Ok(self.codes.get(address).map_or(0, |c| c.len()))
|
||||
}
|
||||
|
||||
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> evm::Result<()> {
|
||||
self.logs.push(FakeLogEntry {
|
||||
topics: topics,
|
||||
data: data.to_vec()
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ret(self, _gas: &U256, _data: &ReturnData) -> evm::Result<U256> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn suicide(&mut self, refund_address: &Address) -> evm::Result<()> {
|
||||
self.suicides.insert(refund_address.clone());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn schedule(&self) -> &Schedule {
|
||||
&self.schedule
|
||||
}
|
||||
|
||||
fn env_info(&self) -> &EnvInfo {
|
||||
&self.info
|
||||
}
|
||||
|
||||
fn depth(&self) -> usize {
|
||||
self.depth
|
||||
}
|
||||
|
||||
fn inc_sstore_clears(&mut self) {
|
||||
self.sstore_clears += 1;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stack_underflow() {
|
||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||
let code = "01600055".from_hex().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = address.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let err = {
|
||||
let mut vm : Box<evm::Evm> = Box::new(super::interpreter::Interpreter::<usize>::new(Arc::new(super::interpreter::SharedCache::default())));
|
||||
test_finalize(vm.exec(params, &mut ext)).unwrap_err()
|
||||
};
|
||||
|
||||
match err {
|
||||
evm::Error::StackUnderflow {wanted, on_stack, ..} => {
|
||||
assert_eq!(wanted, 2);
|
||||
assert_eq!(on_stack, 0);
|
||||
}
|
||||
_ => {
|
||||
assert!(false, "Expected StackUndeflow")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
evm_test!{test_add: test_add_jit, test_add_int}
|
||||
fn test_add(factory: super::Factory) {
|
||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||
@@ -849,7 +656,7 @@ fn test_badinstruction_int() {
|
||||
};
|
||||
|
||||
match err {
|
||||
evm::Error::BadInstruction { instruction: 0xaf } => (),
|
||||
vm::Error::BadInstruction { instruction: 0xaf } => (),
|
||||
_ => assert!(false, "Expected bad instruction")
|
||||
}
|
||||
}
|
||||
@@ -917,7 +724,6 @@ fn test_jumps(factory: super::Factory) {
|
||||
assert_eq!(gas_left, U256::from(54_117));
|
||||
}
|
||||
|
||||
|
||||
evm_test!{test_calls: test_calls_jit, test_calls_int}
|
||||
fn test_calls(factory: super::Factory) {
|
||||
let code = "600054602d57600160005560006000600060006050610998610100f160006000600060006050610998610100f25b".from_hex().unwrap();
|
||||
@@ -962,6 +768,27 @@ fn test_calls(factory: super::Factory) {
|
||||
assert_eq!(ext.calls.len(), 2);
|
||||
}
|
||||
|
||||
evm_test!{test_create_in_staticcall: test_create_in_staticcall_jit, test_create_in_staticcall_int}
|
||||
fn test_create_in_staticcall(factory: super::Factory) {
|
||||
let code = "600060006064f000".from_hex().unwrap();
|
||||
|
||||
let address = Address::from(0x155);
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.address = address.clone();
|
||||
let mut ext = FakeExt::new_byzantium();
|
||||
ext.is_static = true;
|
||||
|
||||
let err = {
|
||||
let mut vm = factory.create(params.gas);
|
||||
test_finalize(vm.exec(params, &mut ext)).unwrap_err()
|
||||
};
|
||||
|
||||
assert_eq!(err, vm::Error::MutableCallInStaticContext);
|
||||
assert_eq!(ext.calls.len(), 0);
|
||||
}
|
||||
|
||||
fn assert_set_contains<T : Debug + Eq + PartialEq + Hash>(set: &HashSet<T>, val: &T) {
|
||||
let contains = set.contains(val);
|
||||
if !contains {
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm evm call arguments helper
|
||||
|
||||
use util::{U256, H160};
|
||||
|
||||
/// Input part of the wasm call descriptor
|
||||
pub struct CallArgs {
|
||||
/// Receiver of the transaction
|
||||
pub address: [u8; 20],
|
||||
|
||||
/// Sender of the transaction
|
||||
pub sender: [u8; 20],
|
||||
|
||||
/// Original transaction initiator
|
||||
pub origin: [u8; 20],
|
||||
|
||||
/// Transfer value
|
||||
pub value: [u8; 32],
|
||||
|
||||
/// call/create params
|
||||
pub data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl CallArgs {
|
||||
/// New contract call payload with known parameters
|
||||
pub fn new(address: H160, sender: H160, origin: H160, value: U256, data: Vec<u8>) -> Self {
|
||||
let mut descriptor = CallArgs {
|
||||
address: [0u8; 20],
|
||||
sender: [0u8; 20],
|
||||
origin: [0u8; 20],
|
||||
value: [0u8; 32],
|
||||
data: data,
|
||||
};
|
||||
|
||||
descriptor.address.copy_from_slice(&*address);
|
||||
descriptor.sender.copy_from_slice(&*sender);
|
||||
descriptor.origin.copy_from_slice(&*origin);
|
||||
value.to_big_endian(&mut descriptor.value);
|
||||
|
||||
descriptor
|
||||
}
|
||||
|
||||
/// Total call payload length in linear memory
|
||||
pub fn len(&self) -> u32 {
|
||||
self.data.len() as u32 + 92
|
||||
}
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm Interpreter
|
||||
|
||||
mod runtime;
|
||||
mod ptr;
|
||||
mod call_args;
|
||||
mod result;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod env;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
const DEFAULT_STACK_SPACE: u32 = 5 * 1024 * 1024;
|
||||
|
||||
use parity_wasm::{interpreter, elements};
|
||||
use parity_wasm::interpreter::ModuleInstanceInterface;
|
||||
use wasm_utils;
|
||||
|
||||
use evm::{self, GasLeft, ReturnData};
|
||||
use action_params::ActionParams;
|
||||
use self::runtime::Runtime;
|
||||
|
||||
pub use self::runtime::Error as RuntimeError;
|
||||
|
||||
const DEFAULT_RESULT_BUFFER: usize = 1024;
|
||||
|
||||
/// Wasm interpreter instance
|
||||
pub struct WasmInterpreter {
|
||||
program: interpreter::ProgramInstance,
|
||||
result: Vec<u8>,
|
||||
}
|
||||
|
||||
impl WasmInterpreter {
|
||||
/// New wasm interpreter instance
|
||||
pub fn new() -> Result<WasmInterpreter, RuntimeError> {
|
||||
Ok(WasmInterpreter {
|
||||
program: interpreter::ProgramInstance::new()?,
|
||||
result: Vec::with_capacity(DEFAULT_RESULT_BUFFER),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl evm::Evm for WasmInterpreter {
|
||||
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut ::ext::Ext) -> evm::Result<GasLeft> {
|
||||
use parity_wasm::elements::Deserialize;
|
||||
|
||||
let code = params.code.expect("exec is only called on contract with code; qed");
|
||||
|
||||
trace!(target: "wasm", "Started wasm interpreter with code.len={:?}", code.len());
|
||||
|
||||
let env_instance = self.program.module("env")
|
||||
// prefer explicit panic here
|
||||
.expect("Wasm program to contain env module");
|
||||
|
||||
let env_memory = env_instance.memory(interpreter::ItemIndex::Internal(0))
|
||||
// prefer explicit panic here
|
||||
.expect("Linear memory to exist in wasm runtime");
|
||||
|
||||
if params.gas > ::std::u64::MAX.into() {
|
||||
return Err(evm::Error::Wasm("Wasm interpreter cannot run contracts with gas >= 2^64".to_owned()));
|
||||
}
|
||||
|
||||
let mut runtime = Runtime::with_params(
|
||||
ext,
|
||||
env_memory,
|
||||
DEFAULT_STACK_SPACE,
|
||||
params.gas.low_u64(),
|
||||
);
|
||||
|
||||
let mut cursor = ::std::io::Cursor::new(&*code);
|
||||
|
||||
let contract_module = wasm_utils::inject_gas_counter(
|
||||
elements::Module::deserialize(
|
||||
&mut cursor
|
||||
).map_err(|err| {
|
||||
evm::Error::Wasm(format!("Error deserializing contract code ({:?})", err))
|
||||
})?
|
||||
);
|
||||
|
||||
let d_ptr = runtime.write_descriptor(
|
||||
call_args::CallArgs::new(
|
||||
params.address,
|
||||
params.sender,
|
||||
params.origin,
|
||||
params.value.value(),
|
||||
params.data.unwrap_or(Vec::with_capacity(0)),
|
||||
)
|
||||
)?;
|
||||
|
||||
{
|
||||
let execution_params = interpreter::ExecutionParams::with_external(
|
||||
"env".into(),
|
||||
Arc::new(
|
||||
interpreter::env_native_module(env_instance, native_bindings(&mut runtime))
|
||||
.map_err(|err| {
|
||||
// todo: prefer explicit panic here also?
|
||||
evm::Error::Wasm(format!("Error instantiating native bindings: {:?}", err))
|
||||
})?
|
||||
)
|
||||
).add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
|
||||
|
||||
let module_instance = self.program.add_module("contract", contract_module, Some(&execution_params.externals))
|
||||
.map_err(|err| {
|
||||
trace!(target: "wasm", "Error adding contract module: {:?}", err);
|
||||
evm::Error::from(RuntimeError::Interpreter(err))
|
||||
})?;
|
||||
|
||||
module_instance.execute_export("_call", execution_params)
|
||||
.map_err(|err| {
|
||||
trace!(target: "wasm", "Error executing contract: {:?}", err);
|
||||
evm::Error::from(RuntimeError::Interpreter(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
let result = result::WasmResult::new(d_ptr);
|
||||
if result.peek_empty(&*runtime.memory())? {
|
||||
trace!(target: "wasm", "Contract execution result is empty.");
|
||||
Ok(GasLeft::Known(runtime.gas_left()?.into()))
|
||||
} else {
|
||||
self.result.clear();
|
||||
// todo: use memory views to avoid copy
|
||||
self.result.extend(result.pop(&*runtime.memory())?);
|
||||
let len = self.result.len();
|
||||
Ok(GasLeft::NeedsReturn {
|
||||
gas_left: runtime.gas_left()?.into(),
|
||||
data: ReturnData::new(
|
||||
::std::mem::replace(&mut self.result, Vec::with_capacity(DEFAULT_RESULT_BUFFER)),
|
||||
0,
|
||||
len,
|
||||
),
|
||||
apply_state: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserFunctions<'a> {
|
||||
interpreter::UserFunctions {
|
||||
executor: runtime,
|
||||
functions: ::std::borrow::Cow::from(env::SIGNATURES),
|
||||
}
|
||||
}
|
||||
@@ -1,356 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Wasm evm program runtime intstance
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use byteorder::{LittleEndian, ByteOrder};
|
||||
|
||||
use ext;
|
||||
|
||||
use parity_wasm::interpreter;
|
||||
use util::{Address, H256, U256};
|
||||
|
||||
use super::ptr::{WasmPtr, Error as PtrError};
|
||||
use super::call_args::CallArgs;
|
||||
|
||||
/// Wasm runtime error
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// Storage error
|
||||
Storage,
|
||||
/// Allocator error
|
||||
Allocator,
|
||||
/// Invalid gas state during the call
|
||||
InvalidGasState,
|
||||
/// Memory access violation
|
||||
AccessViolation,
|
||||
/// Interpreter runtime error
|
||||
Interpreter(interpreter::Error),
|
||||
}
|
||||
|
||||
impl From<interpreter::Error> for Error {
|
||||
fn from(err: interpreter::Error) -> Self {
|
||||
Error::Interpreter(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PtrError> for Error {
|
||||
fn from(err: PtrError) -> Self {
|
||||
match err {
|
||||
PtrError::AccessViolation => Error::AccessViolation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Runtime enviroment data for wasm contract execution
|
||||
pub struct Runtime<'a> {
|
||||
gas_counter: u64,
|
||||
gas_limit: u64,
|
||||
dynamic_top: u32,
|
||||
ext: &'a mut ext::Ext,
|
||||
memory: Arc<interpreter::MemoryInstance>,
|
||||
}
|
||||
|
||||
impl<'a> Runtime<'a> {
|
||||
/// New runtime for wasm contract with specified params
|
||||
pub fn with_params<'b>(
|
||||
ext: &'b mut ext::Ext,
|
||||
memory: Arc<interpreter::MemoryInstance>,
|
||||
stack_space: u32,
|
||||
gas_limit: u64,
|
||||
) -> Runtime<'b> {
|
||||
Runtime {
|
||||
gas_counter: 0,
|
||||
gas_limit: gas_limit,
|
||||
dynamic_top: stack_space,
|
||||
memory: memory,
|
||||
ext: ext,
|
||||
}
|
||||
}
|
||||
|
||||
/// Write to the storage from wasm memory
|
||||
pub fn storage_write(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let mut context = context;
|
||||
let val = self.pop_h256(&mut context)?;
|
||||
let key = self.pop_h256(&mut context)?;
|
||||
trace!(target: "wasm", "storage_write: value {} at @{}", &val, &key);
|
||||
|
||||
self.ext.set_storage(key, val)
|
||||
.map_err(|_| interpreter::Error::Trap("Storage update error".to_owned()))?;
|
||||
|
||||
Ok(Some(0i32.into()))
|
||||
}
|
||||
|
||||
/// Read from the storage to wasm memory
|
||||
pub fn storage_read(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let mut context = context;
|
||||
let val_ptr = context.value_stack.pop_as::<i32>()?;
|
||||
let key = self.pop_h256(&mut context)?;
|
||||
|
||||
let val = self.ext.storage_at(&key)
|
||||
.map_err(|_| interpreter::Error::Trap("Storage read error".to_owned()))?;
|
||||
|
||||
self.memory.set(val_ptr as u32, &*val)?;
|
||||
|
||||
Ok(Some(0.into()))
|
||||
}
|
||||
|
||||
/// Pass suicide to state runtime
|
||||
pub fn suicide(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let mut context = context;
|
||||
let refund_address = self.pop_address(&mut context)?;
|
||||
|
||||
self.ext.suicide(&refund_address)
|
||||
.map_err(|_| interpreter::Error::Trap("Suicide error".to_owned()))?;
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Invoke create in the state runtime
|
||||
pub fn create(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
//
|
||||
// method signature:
|
||||
// fn create(endowment: *const u8, code_ptr: *const u8, code_len: u32, result_ptr: *mut u8) -> i32;
|
||||
//
|
||||
|
||||
trace!(target: "wasm", "runtime: create contract");
|
||||
let mut context = context;
|
||||
let result_ptr = context.value_stack.pop_as::<i32>()? as u32;
|
||||
trace!(target: "wasm", " result_ptr: {:?}", result_ptr);
|
||||
let code_len = context.value_stack.pop_as::<i32>()? as u32;
|
||||
trace!(target: "wasm", " code_len: {:?}", code_len);
|
||||
let code_ptr = context.value_stack.pop_as::<i32>()? as u32;
|
||||
trace!(target: "wasm", " code_ptr: {:?}", code_ptr);
|
||||
let endowment = self.pop_u256(&mut context)?;
|
||||
trace!(target: "wasm", " val: {:?}", endowment);
|
||||
|
||||
let code = self.memory.get(code_ptr, code_len as usize)?;
|
||||
|
||||
let gas_left = self.gas_left()
|
||||
.map_err(|_| interpreter::Error::Trap("Gas state error".to_owned()))?
|
||||
.into();
|
||||
|
||||
match self.ext.create(&gas_left, &endowment, &code, ext::CreateContractAddress::FromSenderAndCodeHash) {
|
||||
ext::ContractCreateResult::Created(address, gas_left) => {
|
||||
self.memory.set(result_ptr, &*address)?;
|
||||
self.gas_counter = self.gas_limit - gas_left.low_u64();
|
||||
trace!(target: "wasm", "runtime: create contract success (@{:?})", address);
|
||||
Ok(Some(0i32.into()))
|
||||
},
|
||||
ext::ContractCreateResult::Failed => {
|
||||
trace!(target: "wasm", "runtime: create contract fail");
|
||||
Ok(Some((-1i32).into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate memory using the wasm stack params
|
||||
pub fn malloc(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let amount = context.value_stack.pop_as::<i32>()? as u32;
|
||||
let previous_top = self.dynamic_top;
|
||||
self.dynamic_top = previous_top + amount;
|
||||
Ok(Some((previous_top as i32).into()))
|
||||
}
|
||||
|
||||
/// Allocate memory in wasm memory instance
|
||||
pub fn alloc(&mut self, amount: u32) -> Result<u32, Error> {
|
||||
let previous_top = self.dynamic_top;
|
||||
self.dynamic_top = previous_top + amount;
|
||||
Ok(previous_top.into())
|
||||
}
|
||||
|
||||
/// Report gas cost with the params passed in wasm stack
|
||||
fn gas(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let amount = context.value_stack.pop_as::<i32>()? as u64;
|
||||
if self.charge_gas(amount) {
|
||||
Ok(None)
|
||||
} else {
|
||||
Err(interpreter::Error::Trap(format!("Gas exceeds limits of {}", self.gas_limit)))
|
||||
}
|
||||
}
|
||||
|
||||
fn charge_gas(&mut self, amount: u64) -> bool {
|
||||
let prev = self.gas_counter;
|
||||
if prev + amount > self.gas_limit {
|
||||
// exceeds gas
|
||||
false
|
||||
} else {
|
||||
self.gas_counter = prev + amount;
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn h256_at(&self, ptr: WasmPtr) -> Result<H256, interpreter::Error> {
|
||||
Ok(H256::from_slice(&ptr.slice(32, &*self.memory)
|
||||
.map_err(|_| interpreter::Error::Trap("Memory access violation".to_owned()))?
|
||||
))
|
||||
}
|
||||
|
||||
fn pop_h256(&self, context: &mut interpreter::CallerContext) -> Result<H256, interpreter::Error> {
|
||||
let ptr = WasmPtr::from_i32(context.value_stack.pop_as::<i32>()?)
|
||||
.map_err(|_| interpreter::Error::Trap("Memory access violation".to_owned()))?;
|
||||
self.h256_at(ptr)
|
||||
}
|
||||
|
||||
fn pop_u256(&self, context: &mut interpreter::CallerContext) -> Result<U256, interpreter::Error> {
|
||||
let ptr = WasmPtr::from_i32(context.value_stack.pop_as::<i32>()?)
|
||||
.map_err(|_| interpreter::Error::Trap("Memory access violation".to_owned()))?;
|
||||
self.h256_at(ptr).map(Into::into)
|
||||
}
|
||||
|
||||
fn address_at(&self, ptr: WasmPtr) -> Result<Address, interpreter::Error> {
|
||||
Ok(Address::from_slice(&ptr.slice(20, &*self.memory)
|
||||
.map_err(|_| interpreter::Error::Trap("Memory access violation".to_owned()))?
|
||||
))
|
||||
}
|
||||
|
||||
fn pop_address(&self, context: &mut interpreter::CallerContext) -> Result<Address, interpreter::Error> {
|
||||
let ptr = WasmPtr::from_i32(context.value_stack.pop_as::<i32>()?)
|
||||
.map_err(|_| interpreter::Error::Trap("Memory access violation".to_owned()))?;
|
||||
self.address_at(ptr)
|
||||
}
|
||||
|
||||
fn user_trap(&mut self, _context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
Err(interpreter::Error::Trap("unknown trap".to_owned()))
|
||||
}
|
||||
|
||||
fn user_noop(&mut self,
|
||||
_context: interpreter::CallerContext
|
||||
) -> Result<Option<interpreter::RuntimeValue>, interpreter::Error> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Write call descriptor to wasm memory
|
||||
pub fn write_descriptor(&mut self, call_args: CallArgs) -> Result<WasmPtr, Error> {
|
||||
let d_ptr = self.alloc(16)?;
|
||||
|
||||
let args_len = call_args.len();
|
||||
let args_ptr = self.alloc(args_len)?;
|
||||
|
||||
// write call descriptor
|
||||
// call descriptor is [args_ptr, args_len, return_ptr, return_len]
|
||||
// all are 4 byte length, last 2 are zeroed
|
||||
let mut d_buf = [0u8; 16];
|
||||
LittleEndian::write_u32(&mut d_buf[0..4], args_ptr);
|
||||
LittleEndian::write_u32(&mut d_buf[4..8], args_len);
|
||||
self.memory.set(d_ptr, &d_buf)?;
|
||||
|
||||
// write call args to memory
|
||||
self.memory.set(args_ptr, &call_args.address)?;
|
||||
self.memory.set(args_ptr+20, &call_args.sender)?;
|
||||
self.memory.set(args_ptr+40, &call_args.origin)?;
|
||||
self.memory.set(args_ptr+60, &call_args.value)?;
|
||||
self.memory.set(args_ptr+92, &call_args.data)?;
|
||||
|
||||
Ok(d_ptr.into())
|
||||
}
|
||||
|
||||
fn debug_log(&mut self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let msg_len = context.value_stack.pop_as::<i32>()? as u32;
|
||||
let msg_ptr = context.value_stack.pop_as::<i32>()? as u32;
|
||||
|
||||
let msg = String::from_utf8(self.memory.get(msg_ptr, msg_len as usize)?)
|
||||
.map_err(|_| interpreter::Error::Trap("Debug log utf-8 decoding error".to_owned()))?;
|
||||
|
||||
trace!(target: "wasm", "Contract debug message: {}", msg);
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Query current gas left for execution
|
||||
pub fn gas_left(&self) -> Result<u64, Error> {
|
||||
if self.gas_counter > self.gas_limit { return Err(Error::InvalidGasState); }
|
||||
Ok(self.gas_limit - self.gas_counter)
|
||||
}
|
||||
|
||||
/// Shared memory reference
|
||||
pub fn memory(&self) -> &interpreter::MemoryInstance {
|
||||
&*self.memory
|
||||
}
|
||||
|
||||
fn mem_copy(&self, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
let len = context.value_stack.pop_as::<i32>()? as u32;
|
||||
let dst = context.value_stack.pop_as::<i32>()? as u32;
|
||||
let src = context.value_stack.pop_as::<i32>()? as u32;
|
||||
|
||||
let mem = self.memory().get(src, len as usize)?;
|
||||
self.memory().set(dst, &mem)?;
|
||||
|
||||
Ok(Some(0i32.into()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> interpreter::UserFunctionExecutor for Runtime<'a> {
|
||||
fn execute(&mut self, name: &str, context: interpreter::CallerContext)
|
||||
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
|
||||
{
|
||||
match name {
|
||||
"_malloc" => {
|
||||
self.malloc(context)
|
||||
},
|
||||
"_free" => {
|
||||
// Since it is arena allocator, free does nothing
|
||||
// todo: update if changed
|
||||
self.user_noop(context)
|
||||
},
|
||||
"_storage_read" => {
|
||||
self.storage_read(context)
|
||||
},
|
||||
"_storage_write" => {
|
||||
self.storage_write(context)
|
||||
},
|
||||
"_suicide" => {
|
||||
self.suicide(context)
|
||||
},
|
||||
"_create" => {
|
||||
self.create(context)
|
||||
},
|
||||
"_debug" => {
|
||||
self.debug_log(context)
|
||||
},
|
||||
"gas" => {
|
||||
self.gas(context)
|
||||
},
|
||||
"_emscripten_memcpy_big" => {
|
||||
self.mem_copy(context)
|
||||
},
|
||||
_ => {
|
||||
trace!("Unknown env func: '{}'", name);
|
||||
self.user_trap(context)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,274 +0,0 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::super::tests::{FakeExt, FakeCall, FakeCallType};
|
||||
use super::WasmInterpreter;
|
||||
use evm::{self, Evm, GasLeft};
|
||||
use action_params::{ActionParams, ActionValue};
|
||||
use util::{U256, H256, Address};
|
||||
|
||||
macro_rules! load_sample {
|
||||
($name: expr) => {
|
||||
include_bytes!(concat!("../../../res/wasm-tests/compiled/", $name)).to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
fn test_finalize(res: Result<GasLeft, evm::Error>) -> Result<U256, evm::Error> {
|
||||
match res {
|
||||
Ok(GasLeft::Known(gas)) => Ok(gas),
|
||||
Ok(GasLeft::NeedsReturn{..}) => unimplemented!(), // since ret is unimplemented.
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn wasm_interpreter() -> WasmInterpreter {
|
||||
WasmInterpreter::new().expect("wasm interpreter to create without errors")
|
||||
}
|
||||
|
||||
/// Empty contract does almost nothing except producing 1 (one) local node debug log message
|
||||
#[test]
|
||||
fn empty() {
|
||||
let code = load_sample!("empty.wasm");
|
||||
let address: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = address.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(99_996));
|
||||
}
|
||||
|
||||
// This test checks if the contract deserializes payload header properly.
|
||||
// Contract is provided with receiver(address), sender, origin and transaction value
|
||||
// logger.wasm writes all these provided fixed header fields to some arbitrary storage keys.
|
||||
#[test]
|
||||
fn logger() {
|
||||
let code = load_sample!("logger.wasm");
|
||||
let address: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap();
|
||||
let sender: Address = "0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d0d".parse().unwrap();
|
||||
let origin: Address = "0102030405060708090a0b0c0d0e0f1011121314".parse().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.address = address.clone();
|
||||
params.sender = sender.clone();
|
||||
params.origin = origin.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.value = ActionValue::transfer(1_000_000_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
|
||||
};
|
||||
|
||||
println!("ext.store: {:?}", ext.store);
|
||||
assert_eq!(gas_left, U256::from(99581));
|
||||
let address_val: H256 = address.into();
|
||||
assert_eq!(
|
||||
ext.store.get(&"0100000000000000000000000000000000000000000000000000000000000000".parse().unwrap()).expect("storage key to exist"),
|
||||
&address_val,
|
||||
"Logger sets 0x01 key to the provided address"
|
||||
);
|
||||
let sender_val: H256 = sender.into();
|
||||
assert_eq!(
|
||||
ext.store.get(&"0200000000000000000000000000000000000000000000000000000000000000".parse().unwrap()).expect("storage key to exist"),
|
||||
&sender_val,
|
||||
"Logger sets 0x02 key to the provided sender"
|
||||
);
|
||||
let origin_val: H256 = origin.into();
|
||||
assert_eq!(
|
||||
ext.store.get(&"0300000000000000000000000000000000000000000000000000000000000000".parse().unwrap()).expect("storage key to exist"),
|
||||
&origin_val,
|
||||
"Logger sets 0x03 key to the provided origin"
|
||||
);
|
||||
assert_eq!(
|
||||
U256::from(ext.store.get(&"0400000000000000000000000000000000000000000000000000000000000000".parse().unwrap()).expect("storage key to exist")),
|
||||
U256::from(1_000_000_000),
|
||||
"Logger sets 0x04 key to the trasferred value"
|
||||
);
|
||||
}
|
||||
|
||||
// This test checks if the contract can allocate memory and pass pointer to the result stream properly.
|
||||
// 1. Contract is being provided with the call descriptor ptr
|
||||
// 2. Descriptor ptr is 16 byte length
|
||||
// 3. The last 8 bytes of call descriptor is the space for the contract to fill [result_ptr[4], result_len[4]]
|
||||
// if it has any result.
|
||||
#[test]
|
||||
fn identity() {
|
||||
let code = load_sample!("identity.wasm");
|
||||
let sender: Address = "01030507090b0d0f11131517191b1d1f21232527".parse().unwrap();
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.sender = sender.clone();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("Identity contract should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(99_689));
|
||||
|
||||
assert_eq!(
|
||||
Address::from_slice(&result),
|
||||
sender,
|
||||
"Idenity test contract does not return the sender passed"
|
||||
);
|
||||
}
|
||||
|
||||
// Dispersion test sends byte array and expect the contract to 'disperse' the original elements with
|
||||
// their modulo 19 dopant.
|
||||
// The result is always twice as long as the input.
|
||||
// This also tests byte-perfect memory allocation and in/out ptr lifecycle.
|
||||
#[test]
|
||||
fn dispersion() {
|
||||
let code = load_sample!("dispersion.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(vec![
|
||||
0u8, 125, 197, 255, 19
|
||||
]);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("Dispersion routine should return payload"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(99_402));
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![0u8, 0, 125, 11, 197, 7, 255, 8, 19, 0]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suicide_not() {
|
||||
let code = load_sample!("suicidal.wasm");
|
||||
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
params.data = Some(vec![
|
||||
0u8
|
||||
]);
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let (gas_left, result) = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(_) => { panic!("Suicidal contract should return payload when had not actualy killed himself"); },
|
||||
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(99_703));
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![0u8]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suicide() {
|
||||
let code = load_sample!("suicidal.wasm");
|
||||
|
||||
let refund: Address = "01030507090b0d0f11131517191b1d1f21232527".parse().unwrap();
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(code));
|
||||
|
||||
let mut args = vec![127u8];
|
||||
args.extend(refund.to_vec());
|
||||
params.data = Some(args);
|
||||
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(gas) => gas,
|
||||
GasLeft::NeedsReturn { .. } => {
|
||||
panic!("Suicidal contract should not return anything when had killed itself");
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(gas_left, U256::from(99_747));
|
||||
assert!(ext.suicides.contains(&refund));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create() {
|
||||
let mut params = ActionParams::default();
|
||||
params.gas = U256::from(100_000);
|
||||
params.code = Some(Arc::new(load_sample!("creator.wasm")));
|
||||
params.data = Some(vec![0u8, 2, 4, 8, 16, 32, 64, 128]);
|
||||
params.value = ActionValue::transfer(1_000_000_000);
|
||||
|
||||
let mut ext = FakeExt::new();
|
||||
|
||||
let gas_left = {
|
||||
let mut interpreter = wasm_interpreter();
|
||||
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
|
||||
match result {
|
||||
GasLeft::Known(gas) => gas,
|
||||
GasLeft::NeedsReturn { .. } => {
|
||||
panic!("Create contract should not return anthing because ext always fails on creation");
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
trace!(target: "wasm", "fake_calls: {:?}", &ext.calls);
|
||||
assert!(ext.calls.contains(
|
||||
&FakeCall {
|
||||
call_type: FakeCallType::Create,
|
||||
gas: U256::from(99_778),
|
||||
sender_address: None,
|
||||
receive_address: None,
|
||||
value: Some(1_000_000_000.into()),
|
||||
data: vec![0u8, 2, 4, 8, 16, 32, 64, 128],
|
||||
code_address: None,
|
||||
}
|
||||
));
|
||||
assert_eq!(gas_left, U256::from(99_768));
|
||||
}
|
||||
@@ -3,7 +3,7 @@ description = "Parity Light Client Implementation"
|
||||
homepage = "http://parity.io"
|
||||
license = "GPL-3.0"
|
||||
name = "ethcore-light"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
build = "build.rs"
|
||||
|
||||
@@ -14,12 +14,19 @@ build = "build.rs"
|
||||
log = "0.3"
|
||||
ethcore = { path = ".."}
|
||||
ethcore-util = { path = "../../util" }
|
||||
ethcore-bigint = { path = "../../util/bigint" }
|
||||
ethcore-bytes = { path = "../../util/bytes" }
|
||||
memorydb = { path = "../../util/memorydb" }
|
||||
patricia_trie = { path = "../../util/patricia_trie" }
|
||||
ethcore-network = { path = "../../util/network" }
|
||||
ethcore-io = { path = "../../util/io" }
|
||||
ethcore-ipc = { path = "../../ipc/rpc", optional = true }
|
||||
ethcore-devtools = { path = "../../devtools" }
|
||||
evm = { path = "../evm" }
|
||||
heapsize = "0.4"
|
||||
vm = { path = "../vm" }
|
||||
rlp = { path = "../../util/rlp" }
|
||||
rlp_derive = { path = "../../util/rlp_derive" }
|
||||
time = "0.1"
|
||||
smallvec = "0.4"
|
||||
futures = "0.1"
|
||||
@@ -28,7 +35,13 @@ itertools = "0.5"
|
||||
bincode = "0.8.0"
|
||||
serde = "1.0"
|
||||
serde_derive = "1.0"
|
||||
parking_lot = "0.4"
|
||||
stats = { path = "../../util/stats" }
|
||||
hash = { path = "../../util/hash" }
|
||||
triehash = { path = "../../util/triehash" }
|
||||
kvdb = { path = "../../util/kvdb" }
|
||||
kvdb-rocksdb = { path = "../../util/kvdb-rocksdb" }
|
||||
kvdb-memorydb = { path = "../../util/kvdb-memorydb" }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
@@ -26,7 +26,9 @@ use ethcore::receipt::Receipt;
|
||||
|
||||
use stats::Corpus;
|
||||
use time::{SteadyTime, Duration};
|
||||
use util::{U256, H256, HeapSizeOf};
|
||||
use heapsize::HeapSizeOf;
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use util::cache::MemoryLruCache;
|
||||
|
||||
/// Configuration for how much data to cache.
|
||||
|
||||
@@ -21,8 +21,11 @@
|
||||
//! we discarded.
|
||||
|
||||
use ethcore::ids::BlockId;
|
||||
use util::{Bytes, H256, U256, HashDB, MemoryDB};
|
||||
use util::trie::{self, TrieMut, TrieDBMut, Trie, TrieDB, Recorder};
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use util::{HashDB, MemoryDB};
|
||||
use bytes::Bytes;
|
||||
use trie::{self, TrieMut, TrieDBMut, Trie, TrieDB, Recorder};
|
||||
use rlp::{RlpStream, UntrustedRlp};
|
||||
|
||||
// encode a key.
|
||||
@@ -130,7 +133,7 @@ pub fn compute_root<I>(cht_num: u64, iterable: I) -> Option<H256>
|
||||
}
|
||||
|
||||
if v.len() == SIZE as usize {
|
||||
Some(::util::triehash::trie_root(v))
|
||||
Some(::triehash::trie_root(v))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
||||
85
ethcore/light/src/client/fetch.rs
Normal file
85
ethcore/light/src/client/fetch.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trait for fetching chain data.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ethcore::encoded;
|
||||
use ethcore::engines::{EthEngine, StateDependentProof};
|
||||
use ethcore::machine::EthereumMachine;
|
||||
use ethcore::header::Header;
|
||||
use ethcore::receipt::Receipt;
|
||||
use futures::future::IntoFuture;
|
||||
use bigint::hash::H256;
|
||||
|
||||
/// Provides full chain data.
|
||||
pub trait ChainDataFetcher: Send + Sync + 'static {
|
||||
/// Error type when data unavailable.
|
||||
type Error: ::std::fmt::Debug;
|
||||
|
||||
/// Future for fetching block body.
|
||||
type Body: IntoFuture<Item=encoded::Block, Error=Self::Error>;
|
||||
/// Future for fetching block receipts.
|
||||
type Receipts: IntoFuture<Item=Vec<Receipt>, Error=Self::Error>;
|
||||
/// Future for fetching epoch transition
|
||||
type Transition: IntoFuture<Item=Vec<u8>, Error=Self::Error>;
|
||||
|
||||
/// Fetch a block body.
|
||||
fn block_body(&self, header: &Header) -> Self::Body;
|
||||
|
||||
/// Fetch block receipts.
|
||||
fn block_receipts(&self, header: &Header) -> Self::Receipts;
|
||||
|
||||
/// Fetch epoch transition proof at given header.
|
||||
fn epoch_transition(
|
||||
&self,
|
||||
_hash: H256,
|
||||
_engine: Arc<EthEngine>,
|
||||
_checker: Arc<StateDependentProof<EthereumMachine>>
|
||||
) -> Self::Transition;
|
||||
}
|
||||
|
||||
/// Fetcher implementation which cannot fetch anything.
|
||||
pub struct Unavailable;
|
||||
|
||||
/// Create a fetcher which has all data unavailable.
|
||||
pub fn unavailable() -> Unavailable { Unavailable }
|
||||
|
||||
impl ChainDataFetcher for Unavailable {
|
||||
type Error = &'static str;
|
||||
|
||||
type Body = Result<encoded::Block, &'static str>;
|
||||
type Receipts = Result<Vec<Receipt>, &'static str>;
|
||||
type Transition = Result<Vec<u8>, &'static str>;
|
||||
|
||||
fn block_body(&self, _header: &Header) -> Self::Body {
|
||||
Err("fetching block bodies unavailable")
|
||||
}
|
||||
|
||||
fn block_receipts(&self, _header: &Header) -> Self::Receipts {
|
||||
Err("fetching block receipts unavailable")
|
||||
}
|
||||
|
||||
fn epoch_transition(
|
||||
&self,
|
||||
_hash: H256,
|
||||
_engine: Arc<EthEngine>,
|
||||
_checker: Arc<StateDependentProof<EthereumMachine>>
|
||||
) -> Self::Transition {
|
||||
Err("fetching epoch transition proofs unavailable")
|
||||
}
|
||||
}
|
||||
@@ -18,11 +18,12 @@
|
||||
//!
|
||||
//! Unlike a full node's `BlockChain` this doesn't store much in the database.
|
||||
//! It stores candidates for the last 2048-4096 blocks as well as CHT roots for
|
||||
//! historical blocks all the way to the genesis.
|
||||
//! historical blocks all the way to the genesis. If the engine makes use
|
||||
//! of epoch transitions, those are stored as well.
|
||||
//!
|
||||
//! This is separate from the `BlockChain` for two reasons:
|
||||
//! - It stores only headers (and a pruned subset of them)
|
||||
//! - To allow for flexibility in the database layout once that's incorporated.
|
||||
//! - To allow for flexibility in the database layout..
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::Arc;
|
||||
@@ -30,17 +31,24 @@ use std::sync::Arc;
|
||||
use cht;
|
||||
|
||||
use ethcore::block_status::BlockStatus;
|
||||
use ethcore::error::BlockError;
|
||||
use ethcore::error::{BlockImportError, BlockError};
|
||||
use ethcore::encoded;
|
||||
use ethcore::header::Header;
|
||||
use ethcore::ids::BlockId;
|
||||
use ethcore::spec::Spec;
|
||||
use ethcore::engines::epoch::{
|
||||
Transition as EpochTransition,
|
||||
PendingTransition as PendingEpochTransition
|
||||
};
|
||||
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, Rlp, UntrustedRlp};
|
||||
use util::{H256, U256, HeapSizeOf, RwLock};
|
||||
use util::kvdb::{DBTransaction, KeyValueDB};
|
||||
use heapsize::HeapSizeOf;
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::{H256, H256FastMap, H264};
|
||||
use kvdb::{DBTransaction, KeyValueDB};
|
||||
|
||||
use cache::Cache;
|
||||
use util::Mutex;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
@@ -52,6 +60,9 @@ const HISTORY: u64 = 2048;
|
||||
/// The best block key. Maps to an RLP list: [best_era, last_era]
|
||||
const CURRENT_KEY: &'static [u8] = &*b"best_and_latest";
|
||||
|
||||
/// Key storing the last canonical epoch transition.
|
||||
const LAST_CANONICAL_TRANSITION: &'static [u8] = &*b"canonical_transition";
|
||||
|
||||
/// Information about a block.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BlockDescriptor {
|
||||
@@ -99,7 +110,6 @@ impl Encodable for Entry {
|
||||
|
||||
impl Decodable for Entry {
|
||||
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
|
||||
|
||||
let mut candidates = SmallVec::<[Candidate; 3]>::new();
|
||||
|
||||
for item in rlp.iter() {
|
||||
@@ -129,6 +139,42 @@ fn era_key(number: u64) -> String {
|
||||
format!("candidates_{}", number)
|
||||
}
|
||||
|
||||
fn pending_transition_key(block_hash: H256) -> H264 {
|
||||
const LEADING: u8 = 1;
|
||||
|
||||
let mut key = H264::default();
|
||||
|
||||
key[0] = LEADING;
|
||||
key.0[1..].copy_from_slice(&block_hash.0[..]);
|
||||
|
||||
key
|
||||
}
|
||||
|
||||
fn transition_key(block_hash: H256) -> H264 {
|
||||
const LEADING: u8 = 2;
|
||||
|
||||
let mut key = H264::default();
|
||||
|
||||
key[0] = LEADING;
|
||||
key.0[1..].copy_from_slice(&block_hash.0[..]);
|
||||
|
||||
key
|
||||
}
|
||||
|
||||
// encode last canonical transition entry: header and proof.
|
||||
fn encode_canonical_transition(header: &Header, proof: &[u8]) -> Vec<u8> {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(header).append(&proof);
|
||||
stream.out()
|
||||
}
|
||||
|
||||
// decode last canonical transition entry.
|
||||
fn decode_canonical_transition(t: &[u8]) -> Result<(Header, &[u8]), DecoderError> {
|
||||
let rlp = UntrustedRlp::new(t);
|
||||
|
||||
Ok((rlp.val_at(0)?, rlp.at(1)?.data()?))
|
||||
}
|
||||
|
||||
/// Pending changes from `insert` to be applied after the database write has finished.
|
||||
pub struct PendingChanges {
|
||||
best_block: Option<BlockDescriptor>, // new best block.
|
||||
@@ -139,6 +185,7 @@ pub struct HeaderChain {
|
||||
genesis_header: encoded::Header, // special-case the genesis.
|
||||
candidates: RwLock<BTreeMap<u64, Entry>>,
|
||||
best_block: RwLock<BlockDescriptor>,
|
||||
live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>,
|
||||
db: Arc<KeyValueDB>,
|
||||
col: Option<u32>,
|
||||
cache: Arc<Mutex<Cache>>,
|
||||
@@ -146,8 +193,16 @@ pub struct HeaderChain {
|
||||
|
||||
impl HeaderChain {
|
||||
/// Create a new header chain given this genesis block and database to read from.
|
||||
pub fn new(db: Arc<KeyValueDB>, col: Option<u32>, genesis: &[u8], cache: Arc<Mutex<Cache>>) -> Result<Self, String> {
|
||||
use ethcore::views::HeaderView;
|
||||
pub fn new(
|
||||
db: Arc<KeyValueDB>,
|
||||
col: Option<u32>,
|
||||
spec: &Spec,
|
||||
cache: Arc<Mutex<Cache>>,
|
||||
) -> Result<Self, String> {
|
||||
let mut live_epoch_proofs = ::std::collections::HashMap::default();
|
||||
|
||||
let genesis = ::rlp::encode(&spec.genesis_header()).into_vec();
|
||||
let decoded_header = spec.genesis_header();
|
||||
|
||||
let chain = if let Some(current) = db.get(col, CURRENT_KEY)? {
|
||||
let (best_number, highest_number) = {
|
||||
@@ -158,12 +213,24 @@ impl HeaderChain {
|
||||
let mut cur_number = highest_number;
|
||||
let mut candidates = BTreeMap::new();
|
||||
|
||||
// load all era entries and referenced headers within them.
|
||||
// load all era entries, referenced headers within them,
|
||||
// and live epoch proofs.
|
||||
while let Some(entry) = db.get(col, era_key(cur_number).as_bytes())? {
|
||||
let entry: Entry = ::rlp::decode(&entry);
|
||||
trace!(target: "chain", "loaded header chain entry for era {} with {} candidates",
|
||||
cur_number, entry.candidates.len());
|
||||
|
||||
for c in &entry.candidates {
|
||||
let key = transition_key(c.hash);
|
||||
|
||||
if let Some(proof) = db.get(col, &*key)? {
|
||||
live_epoch_proofs.insert(c.hash, EpochTransition {
|
||||
block_hash: c.hash,
|
||||
block_number: cur_number,
|
||||
proof: proof.into_vec(),
|
||||
});
|
||||
}
|
||||
}
|
||||
candidates.insert(cur_number, entry);
|
||||
|
||||
cur_number -= 1;
|
||||
@@ -185,29 +252,42 @@ impl HeaderChain {
|
||||
};
|
||||
|
||||
HeaderChain {
|
||||
genesis_header: encoded::Header::new(genesis.to_owned()),
|
||||
genesis_header: encoded::Header::new(genesis),
|
||||
best_block: RwLock::new(best_block),
|
||||
candidates: RwLock::new(candidates),
|
||||
live_epoch_proofs: RwLock::new(live_epoch_proofs),
|
||||
db: db,
|
||||
col: col,
|
||||
cache: cache,
|
||||
}
|
||||
} else {
|
||||
let g_view = HeaderView::new(genesis);
|
||||
HeaderChain {
|
||||
genesis_header: encoded::Header::new(genesis.to_owned()),
|
||||
genesis_header: encoded::Header::new(genesis),
|
||||
best_block: RwLock::new(BlockDescriptor {
|
||||
hash: g_view.hash(),
|
||||
hash: decoded_header.hash(),
|
||||
number: 0,
|
||||
total_difficulty: g_view.difficulty(),
|
||||
total_difficulty: *decoded_header.difficulty(),
|
||||
}),
|
||||
candidates: RwLock::new(BTreeMap::new()),
|
||||
live_epoch_proofs: RwLock::new(live_epoch_proofs),
|
||||
db: db,
|
||||
col: col,
|
||||
cache: cache,
|
||||
}
|
||||
};
|
||||
|
||||
// instantiate genesis epoch data if it doesn't exist.
|
||||
if let None = chain.db.get(col, LAST_CANONICAL_TRANSITION)? {
|
||||
let genesis_data = spec.genesis_epoch_data()?;
|
||||
|
||||
{
|
||||
let mut batch = chain.db.transaction();
|
||||
let data = encode_canonical_transition(&decoded_header, &genesis_data);
|
||||
batch.put_vec(col, LAST_CANONICAL_TRANSITION, data);
|
||||
chain.db.write(batch)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(chain)
|
||||
}
|
||||
|
||||
@@ -216,10 +296,24 @@ impl HeaderChain {
|
||||
/// This blindly trusts that the data given to it is sensible.
|
||||
/// Returns a set of pending changes to be applied with `apply_pending`
|
||||
/// before the next call to insert and after the transaction has been written.
|
||||
pub fn insert(&self, transaction: &mut DBTransaction, header: Header) -> Result<PendingChanges, BlockError> {
|
||||
///
|
||||
/// If the block is an epoch transition, provide the transition along with
|
||||
/// the header.
|
||||
pub fn insert(
|
||||
&self,
|
||||
transaction: &mut DBTransaction,
|
||||
header: Header,
|
||||
transition_proof: Option<Vec<u8>>,
|
||||
) -> Result<PendingChanges, BlockImportError> {
|
||||
let hash = header.hash();
|
||||
let number = header.number();
|
||||
let parent_hash = *header.parent_hash();
|
||||
let transition = transition_proof.map(|proof| EpochTransition {
|
||||
block_hash: hash,
|
||||
block_number: number,
|
||||
proof: proof,
|
||||
});
|
||||
|
||||
let mut pending = PendingChanges {
|
||||
best_block: None,
|
||||
};
|
||||
@@ -235,7 +329,8 @@ impl HeaderChain {
|
||||
candidates.get(&(number - 1))
|
||||
.and_then(|entry| entry.candidates.iter().find(|c| c.hash == parent_hash))
|
||||
.map(|c| c.total_difficulty)
|
||||
.ok_or_else(|| BlockError::UnknownParent(parent_hash))?
|
||||
.ok_or_else(|| BlockError::UnknownParent(parent_hash))
|
||||
.map_err(BlockImportError::Block)?
|
||||
};
|
||||
|
||||
let total_difficulty = parent_td + *header.difficulty();
|
||||
@@ -260,8 +355,13 @@ impl HeaderChain {
|
||||
transaction.put(self.col, era_key(number).as_bytes(), &::rlp::encode(&*cur_era))
|
||||
}
|
||||
|
||||
let raw = ::rlp::encode(&header);
|
||||
transaction.put(self.col, &hash[..], &*raw);
|
||||
if let Some(transition) = transition {
|
||||
transaction.put(self.col, &*transition_key(hash), &transition.proof);
|
||||
self.live_epoch_proofs.write().insert(hash, transition);
|
||||
}
|
||||
|
||||
let raw = header.encoded().into_inner();
|
||||
transaction.put_vec(self.col, &hash[..], raw);
|
||||
|
||||
let (best_num, is_new_best) = {
|
||||
let cur_best = self.best_block.read();
|
||||
@@ -314,8 +414,10 @@ impl HeaderChain {
|
||||
let cht_num = cht::block_to_cht_number(earliest_era)
|
||||
.expect("fails only for number == 0; genesis never imported; qed");
|
||||
|
||||
let mut last_canonical_transition = None;
|
||||
let cht_root = {
|
||||
let mut i = earliest_era;
|
||||
let mut live_epoch_proofs = self.live_epoch_proofs.write();
|
||||
|
||||
// iterable function which removes the candidates as it goes
|
||||
// along. this will only be called until the CHT is complete.
|
||||
@@ -326,7 +428,25 @@ impl HeaderChain {
|
||||
|
||||
i += 1;
|
||||
|
||||
// prune old blocks and epoch proofs.
|
||||
for ancient in &era_entry.candidates {
|
||||
let maybe_transition = live_epoch_proofs.remove(&ancient.hash);
|
||||
if let Some(epoch_transition) = maybe_transition {
|
||||
transaction.delete(self.col, &*transition_key(ancient.hash));
|
||||
|
||||
if ancient.hash == era_entry.canonical_hash {
|
||||
last_canonical_transition = match self.db.get(self.col, &ancient.hash) {
|
||||
Err(e) => {
|
||||
warn!(target: "chain", "Error reading from DB: {}\n
|
||||
", e);
|
||||
None
|
||||
}
|
||||
Ok(None) => panic!("stored candidates always have corresponding headers; qed"),
|
||||
Ok(Some(header)) => Some((epoch_transition, ::rlp::decode(&header))),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
transaction.delete(self.col, &ancient.hash);
|
||||
}
|
||||
|
||||
@@ -340,6 +460,12 @@ impl HeaderChain {
|
||||
// write the CHT root to the database.
|
||||
debug!(target: "chain", "Produced CHT {} root: {:?}", cht_num, cht_root);
|
||||
transaction.put(self.col, cht_key(cht_num).as_bytes(), &::rlp::encode(&cht_root));
|
||||
|
||||
// update the last canonical transition proof
|
||||
if let Some((epoch_transition, header)) = last_canonical_transition {
|
||||
let x = encode_canonical_transition(&header, &epoch_transition.proof);
|
||||
transaction.put_vec(self.col, LAST_CANONICAL_TRANSITION, x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -365,7 +491,7 @@ impl HeaderChain {
|
||||
/// will be returned.
|
||||
pub fn block_hash(&self, id: BlockId) -> Option<H256> {
|
||||
match id {
|
||||
BlockId::Earliest => Some(self.genesis_hash()),
|
||||
BlockId::Earliest | BlockId::Number(0) => Some(self.genesis_hash()),
|
||||
BlockId::Hash(hash) => Some(hash),
|
||||
BlockId::Number(num) => {
|
||||
if self.best_block.read().number < num { return None }
|
||||
@@ -405,6 +531,7 @@ impl HeaderChain {
|
||||
|
||||
match id {
|
||||
BlockId::Earliest | BlockId::Number(0) => Some(self.genesis_header.clone()),
|
||||
BlockId::Hash(hash) if hash == self.genesis_hash() => { Some(self.genesis_header.clone()) }
|
||||
BlockId::Hash(hash) => load_from_db(hash),
|
||||
BlockId::Number(num) => {
|
||||
if self.best_block.read().number < num { return None }
|
||||
@@ -485,7 +612,7 @@ impl HeaderChain {
|
||||
|
||||
/// Get the genesis hash.
|
||||
pub fn genesis_hash(&self) -> H256 {
|
||||
::util::Hashable::sha3(&self.genesis_header)
|
||||
self.genesis_header.hash()
|
||||
}
|
||||
|
||||
/// Get the best block's data.
|
||||
@@ -515,6 +642,56 @@ impl HeaderChain {
|
||||
false => BlockStatus::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a pending transition.
|
||||
pub fn insert_pending_transition(&self, batch: &mut DBTransaction, hash: H256, t: PendingEpochTransition) {
|
||||
let key = pending_transition_key(hash);
|
||||
batch.put(self.col, &*key, &*::rlp::encode(&t));
|
||||
}
|
||||
|
||||
/// Get pending transition for a specific block hash.
|
||||
pub fn pending_transition(&self, hash: H256) -> Option<PendingEpochTransition> {
|
||||
let key = pending_transition_key(hash);
|
||||
match self.db.get(self.col, &*key) {
|
||||
Ok(val) => val.map(|x| ::rlp::decode(&x)),
|
||||
Err(e) => {
|
||||
warn!(target: "chain", "Error reading from database: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the transition to the epoch the given parent hash is part of
|
||||
/// or transitions to.
|
||||
/// This will give the epoch that any children of this parent belong to.
|
||||
///
|
||||
/// The header corresponding the the parent hash must be stored already.
|
||||
pub fn epoch_transition_for(&self, parent_hash: H256) -> Option<(Header, Vec<u8>)> {
|
||||
// slow path: loop back block by block
|
||||
let live_proofs = self.live_epoch_proofs.read();
|
||||
|
||||
for hdr in self.ancestry_iter(BlockId::Hash(parent_hash)) {
|
||||
if let Some(transition) = live_proofs.get(&hdr.hash()).cloned() {
|
||||
return Some((hdr.decode(), transition.proof))
|
||||
}
|
||||
}
|
||||
|
||||
// any blocks left must be descendants of the last canonical transition block.
|
||||
match self.db.get(self.col, LAST_CANONICAL_TRANSITION) {
|
||||
Ok(x) => {
|
||||
let x = x.expect("last canonical transition always instantiated; qed");
|
||||
|
||||
let (hdr, proof) = decode_canonical_transition(&x)
|
||||
.expect("last canonical transition always encoded correctly; qed");
|
||||
|
||||
Some((hdr, proof.to_vec()))
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Error reading from DB: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HeapSizeOf for HeaderChain {
|
||||
@@ -551,12 +728,14 @@ mod tests {
|
||||
use ethcore::header::Header;
|
||||
use ethcore::spec::Spec;
|
||||
use cache::Cache;
|
||||
use kvdb::KeyValueDB;
|
||||
use kvdb_memorydb;
|
||||
|
||||
use time::Duration;
|
||||
use util::Mutex;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
fn make_db() -> Arc<::util::KeyValueDB> {
|
||||
Arc::new(::util::kvdb::in_memory(0))
|
||||
fn make_db() -> Arc<KeyValueDB> {
|
||||
Arc::new(kvdb_memorydb::create(0))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -567,7 +746,7 @@ mod tests {
|
||||
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
|
||||
|
||||
let mut parent_hash = genesis_header.hash();
|
||||
let mut rolling_timestamp = genesis_header.timestamp();
|
||||
@@ -580,7 +759,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -600,7 +779,7 @@ mod tests {
|
||||
let db = make_db();
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
|
||||
|
||||
let mut parent_hash = genesis_header.hash();
|
||||
let mut rolling_timestamp = genesis_header.timestamp();
|
||||
@@ -613,7 +792,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -632,7 +811,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -656,7 +835,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -679,12 +858,10 @@ mod tests {
|
||||
#[test]
|
||||
fn earliest_is_latest() {
|
||||
let spec = Spec::new_test();
|
||||
let genesis_header = spec.genesis_header();
|
||||
let db = make_db();
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache).unwrap();
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
|
||||
|
||||
assert!(chain.block_header(BlockId::Earliest).is_some());
|
||||
assert!(chain.block_header(BlockId::Latest).is_some());
|
||||
@@ -699,7 +876,7 @@ mod tests {
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
{
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
|
||||
let mut parent_hash = genesis_header.hash();
|
||||
let mut rolling_timestamp = genesis_header.timestamp();
|
||||
for i in 1..10000 {
|
||||
@@ -711,7 +888,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -719,7 +896,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
|
||||
assert!(chain.block_header(BlockId::Number(10)).is_none());
|
||||
assert!(chain.block_header(BlockId::Number(9000)).is_some());
|
||||
assert!(chain.cht_root(2).is_some());
|
||||
@@ -735,7 +912,7 @@ mod tests {
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
{
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
|
||||
let mut parent_hash = genesis_header.hash();
|
||||
let mut rolling_timestamp = genesis_header.timestamp();
|
||||
|
||||
@@ -749,7 +926,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -766,7 +943,7 @@ mod tests {
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header).unwrap();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
@@ -777,8 +954,88 @@ mod tests {
|
||||
}
|
||||
|
||||
// after restoration, non-canonical eras should still be loaded.
|
||||
let chain = HeaderChain::new(db.clone(), None, &::rlp::encode(&genesis_header), cache.clone()).unwrap();
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
|
||||
assert_eq!(chain.block_header(BlockId::Latest).unwrap().number(), 10);
|
||||
assert!(chain.candidates.read().get(&100).is_some())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn genesis_header_available() {
|
||||
let spec = Spec::new_test();
|
||||
let genesis_header = spec.genesis_header();
|
||||
let db = make_db();
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
|
||||
|
||||
assert!(chain.block_header(BlockId::Earliest).is_some());
|
||||
assert!(chain.block_header(BlockId::Number(0)).is_some());
|
||||
assert!(chain.block_header(BlockId::Hash(genesis_header.hash())).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn epoch_transitions_available_after_cht() {
|
||||
let spec = Spec::new_test();
|
||||
let genesis_header = spec.genesis_header();
|
||||
let db = make_db();
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
|
||||
|
||||
let mut parent_hash = genesis_header.hash();
|
||||
let mut rolling_timestamp = genesis_header.timestamp();
|
||||
for i in 1..6 {
|
||||
let mut header = Header::new();
|
||||
header.set_parent_hash(parent_hash);
|
||||
header.set_number(i);
|
||||
header.set_timestamp(rolling_timestamp);
|
||||
header.set_difficulty(*genesis_header.difficulty() * i.into());
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let epoch_proof = if i == 3 {
|
||||
Some(vec![1, 2, 3, 4])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let pending = chain.insert(&mut tx, header, epoch_proof).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
rolling_timestamp += 10;
|
||||
}
|
||||
|
||||
// these 3 should end up falling back to the genesis epoch proof in DB
|
||||
for i in 0..3 {
|
||||
let hash = chain.block_hash(BlockId::Number(i)).unwrap();
|
||||
assert_eq!(chain.epoch_transition_for(hash).unwrap().1, Vec::<u8>::new());
|
||||
}
|
||||
|
||||
// these are live.
|
||||
for i in 3..6 {
|
||||
let hash = chain.block_hash(BlockId::Number(i)).unwrap();
|
||||
assert_eq!(chain.epoch_transition_for(hash).unwrap().1, vec![1, 2, 3, 4]);
|
||||
}
|
||||
|
||||
for i in 6..10000 {
|
||||
let mut header = Header::new();
|
||||
header.set_parent_hash(parent_hash);
|
||||
header.set_number(i);
|
||||
header.set_timestamp(rolling_timestamp);
|
||||
header.set_difficulty(*genesis_header.difficulty() * i.into());
|
||||
parent_hash = header.hash();
|
||||
|
||||
let mut tx = db.transaction();
|
||||
let pending = chain.insert(&mut tx, header, None).unwrap();
|
||||
db.write(tx).unwrap();
|
||||
chain.apply_pending(pending);
|
||||
|
||||
rolling_timestamp += 10;
|
||||
}
|
||||
|
||||
// no live blocks have associated epoch proofs -- make sure we aren't leaking memory.
|
||||
assert!(chain.live_epoch_proofs.read().is_empty());
|
||||
assert_eq!(chain.epoch_transition_for(parent_hash).unwrap().1, vec![1, 2, 3, 4]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,20 +20,26 @@ use std::sync::{Weak, Arc};
|
||||
|
||||
use ethcore::block_status::BlockStatus;
|
||||
use ethcore::client::{ClientReport, EnvInfo};
|
||||
use ethcore::engines::Engine;
|
||||
use ethcore::engines::{epoch, EthEngine, EpochChange, EpochTransition, Proof};
|
||||
use ethcore::machine::EthereumMachine;
|
||||
use ethcore::error::BlockImportError;
|
||||
use ethcore::ids::BlockId;
|
||||
use ethcore::header::Header;
|
||||
use ethcore::header::{BlockNumber, Header};
|
||||
use ethcore::verification::queue::{self, HeaderQueue};
|
||||
use ethcore::blockchain_info::BlockChainInfo;
|
||||
use ethcore::spec::Spec;
|
||||
use ethcore::service::ClientIoMessage;
|
||||
use ethcore::encoded;
|
||||
use io::IoChannel;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use bigint::prelude::U256;
|
||||
use bigint::hash::H256;
|
||||
use futures::{IntoFuture, Future};
|
||||
|
||||
use util::{H256, U256, Mutex, RwLock};
|
||||
use util::kvdb::{KeyValueDB, CompactionProfile};
|
||||
use kvdb::KeyValueDB;
|
||||
use kvdb_rocksdb::CompactionProfile;
|
||||
|
||||
use self::fetch::ChainDataFetcher;
|
||||
use self::header_chain::{AncestryIter, HeaderChain};
|
||||
|
||||
use cache::Cache;
|
||||
@@ -43,6 +49,8 @@ pub use self::service::Service;
|
||||
mod header_chain;
|
||||
mod service;
|
||||
|
||||
pub mod fetch;
|
||||
|
||||
/// Configuration for the light client.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Config {
|
||||
@@ -58,6 +66,8 @@ pub struct Config {
|
||||
pub db_wal: bool,
|
||||
/// Should it do full verification of blocks?
|
||||
pub verify_full: bool,
|
||||
/// Should it check the seal of blocks?
|
||||
pub check_seal: bool,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
@@ -69,12 +79,16 @@ impl Default for Config {
|
||||
db_compaction: CompactionProfile::default(),
|
||||
db_wal: true,
|
||||
verify_full: true,
|
||||
check_seal: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for interacting with the header chain abstractly.
|
||||
pub trait LightChainClient: Send + Sync {
|
||||
/// Adds a new `LightChainNotify` listener.
|
||||
fn add_listener(&self, listener: Weak<LightChainNotify>);
|
||||
|
||||
/// Get chain info.
|
||||
fn chain_info(&self) -> BlockChainInfo;
|
||||
|
||||
@@ -97,15 +111,15 @@ pub trait LightChainClient: Send + Sync {
|
||||
/// Get an iterator over a block and its ancestry.
|
||||
fn ancestry_iter<'a>(&'a self, start: BlockId) -> Box<Iterator<Item=encoded::Header> + 'a>;
|
||||
|
||||
/// Get the signing network ID.
|
||||
fn signing_network_id(&self) -> Option<u64>;
|
||||
/// Get the signing chain ID.
|
||||
fn signing_chain_id(&self) -> Option<u64>;
|
||||
|
||||
/// Get environment info for execution at a given block.
|
||||
/// Fails if that block's header is not stored.
|
||||
fn env_info(&self, id: BlockId) -> Option<EnvInfo>;
|
||||
|
||||
/// Get a handle to the consensus engine.
|
||||
fn engine(&self) -> &Arc<Engine>;
|
||||
fn engine(&self) -> &Arc<EthEngine>;
|
||||
|
||||
/// Query whether a block is known.
|
||||
fn is_known(&self, hash: &H256) -> bool;
|
||||
@@ -123,7 +137,7 @@ pub trait LightChainClient: Send + Sync {
|
||||
fn cht_root(&self, i: usize) -> Option<H256>;
|
||||
|
||||
/// Get the EIP-86 transition block number.
|
||||
fn eip86_transition(&self) -> u64;
|
||||
fn eip86_transition(&self) -> BlockNumber;
|
||||
|
||||
/// Get a report of import activity since the last call.
|
||||
fn report(&self) -> ClientReport;
|
||||
@@ -151,30 +165,38 @@ impl<T: LightChainClient> AsLightClient for T {
|
||||
}
|
||||
|
||||
/// Light client implementation.
|
||||
pub struct Client {
|
||||
pub struct Client<T> {
|
||||
queue: HeaderQueue,
|
||||
engine: Arc<Engine>,
|
||||
engine: Arc<EthEngine>,
|
||||
chain: HeaderChain,
|
||||
report: RwLock<ClientReport>,
|
||||
import_lock: Mutex<()>,
|
||||
db: Arc<KeyValueDB>,
|
||||
listeners: RwLock<Vec<Weak<LightChainNotify>>>,
|
||||
fetcher: T,
|
||||
verify_full: bool,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
impl<T: ChainDataFetcher> Client<T> {
|
||||
/// Create a new `Client`.
|
||||
pub fn new(config: Config, db: Arc<KeyValueDB>, chain_col: Option<u32>, spec: &Spec, io_channel: IoChannel<ClientIoMessage>, cache: Arc<Mutex<Cache>>) -> Result<Self, String> {
|
||||
let gh = ::rlp::encode(&spec.genesis_header());
|
||||
|
||||
pub fn new(
|
||||
config: Config,
|
||||
db: Arc<KeyValueDB>,
|
||||
chain_col: Option<u32>,
|
||||
spec: &Spec,
|
||||
fetcher: T,
|
||||
io_channel: IoChannel<ClientIoMessage>,
|
||||
cache: Arc<Mutex<Cache>>
|
||||
) -> Result<Self, String> {
|
||||
Ok(Client {
|
||||
queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, true),
|
||||
queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, config.check_seal),
|
||||
engine: spec.engine.clone(),
|
||||
chain: HeaderChain::new(db.clone(), chain_col, &gh, cache)?,
|
||||
chain: HeaderChain::new(db.clone(), chain_col, &spec, cache)?,
|
||||
report: RwLock::new(ClientReport::default()),
|
||||
import_lock: Mutex::new(()),
|
||||
db: db,
|
||||
listeners: RwLock::new(vec![]),
|
||||
fetcher: fetcher,
|
||||
verify_full: config.verify_full,
|
||||
})
|
||||
}
|
||||
@@ -186,10 +208,24 @@ impl Client {
|
||||
|
||||
/// Create a new `Client` backed purely in-memory.
|
||||
/// This will ignore all database options in the configuration.
|
||||
pub fn in_memory(config: Config, spec: &Spec, io_channel: IoChannel<ClientIoMessage>, cache: Arc<Mutex<Cache>>) -> Self {
|
||||
let db = ::util::kvdb::in_memory(0);
|
||||
pub fn in_memory(
|
||||
config: Config,
|
||||
spec: &Spec,
|
||||
fetcher: T,
|
||||
io_channel: IoChannel<ClientIoMessage>,
|
||||
cache: Arc<Mutex<Cache>>
|
||||
) -> Self {
|
||||
let db = ::kvdb_memorydb::create(0);
|
||||
|
||||
Client::new(config, Arc::new(db), None, spec, io_channel, cache).expect("New DB creation infallible; qed")
|
||||
Client::new(
|
||||
config,
|
||||
Arc::new(db),
|
||||
None,
|
||||
spec,
|
||||
fetcher,
|
||||
io_channel,
|
||||
cache
|
||||
).expect("New DB creation infallible; qed")
|
||||
}
|
||||
|
||||
/// Import a header to the queue for additional verification.
|
||||
@@ -257,9 +293,9 @@ impl Client {
|
||||
self.chain.ancestry_iter(start)
|
||||
}
|
||||
|
||||
/// Get the signing network id.
|
||||
pub fn signing_network_id(&self) -> Option<u64> {
|
||||
self.engine.signing_network_id(&self.latest_env_info())
|
||||
/// Get the signing chain id.
|
||||
pub fn signing_chain_id(&self) -> Option<u64> {
|
||||
self.engine.signing_chain_id(&self.latest_env_info())
|
||||
}
|
||||
|
||||
/// Flush the header queue.
|
||||
@@ -282,24 +318,39 @@ impl Client {
|
||||
let mut good = Vec::new();
|
||||
for verified_header in self.queue.drain(MAX) {
|
||||
let (num, hash) = (verified_header.number(), verified_header.hash());
|
||||
trace!(target: "client", "importing block {}", num);
|
||||
|
||||
if self.verify_full && !self.check_header(&mut bad, &verified_header) {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: `epoch_end_signal`, `is_epoch_end`.
|
||||
// proofs we get from the network would be _complete_, whereas we need
|
||||
// _incomplete_ signals
|
||||
let write_proof_result = match self.check_epoch_signal(&verified_header) {
|
||||
Ok(Some(proof)) => self.write_pending_proof(&verified_header, proof),
|
||||
Ok(None) => Ok(()),
|
||||
Err(e) =>
|
||||
panic!("Unable to fetch epoch transition proof: {:?}", e),
|
||||
};
|
||||
|
||||
if let Err(e) = write_proof_result {
|
||||
warn!(target: "client", "Error writing pending transition proof to DB: {:?} \
|
||||
The node may not be able to synchronize further.", e);
|
||||
}
|
||||
|
||||
let epoch_proof = self.engine.is_epoch_end(
|
||||
&verified_header,
|
||||
&|h| self.chain.block_header(BlockId::Hash(h)).map(|hdr| hdr.decode()),
|
||||
&|h| self.chain.pending_transition(h),
|
||||
);
|
||||
|
||||
let mut tx = self.db.transaction();
|
||||
let pending = match self.chain.insert(&mut tx, verified_header) {
|
||||
let pending = match self.chain.insert(&mut tx, verified_header, epoch_proof) {
|
||||
Ok(pending) => {
|
||||
good.push(hash);
|
||||
self.report.write().blocks_imported += 1;
|
||||
pending
|
||||
}
|
||||
Err(e) => {
|
||||
debug!(target: "client", "Error importing header {:?}: {}", (num, hash), e);
|
||||
debug!(target: "client", "Error importing header {:?}: {:?}", (num, hash), e);
|
||||
bad.push(hash);
|
||||
continue;
|
||||
}
|
||||
@@ -326,13 +377,13 @@ impl Client {
|
||||
|
||||
/// Get blockchain mem usage in bytes.
|
||||
pub fn chain_mem_used(&self) -> usize {
|
||||
use util::HeapSizeOf;
|
||||
use heapsize::HeapSizeOf;
|
||||
|
||||
self.chain.heap_size_of_children()
|
||||
}
|
||||
|
||||
/// Get a handle to the verification engine.
|
||||
pub fn engine(&self) -> &Arc<Engine> {
|
||||
pub fn engine(&self) -> &Arc<EthEngine> {
|
||||
&self.engine
|
||||
}
|
||||
|
||||
@@ -381,17 +432,21 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
// return true if should skip, false otherwise. may push onto bad if
|
||||
// return false if should skip, true otherwise. may push onto bad if
|
||||
// should skip.
|
||||
fn check_header(&self, bad: &mut Vec<H256>, verified_header: &Header) -> bool {
|
||||
let hash = verified_header.hash();
|
||||
let parent_header = match self.chain.block_header(BlockId::Hash(*verified_header.parent_hash())) {
|
||||
Some(header) => header,
|
||||
None => return false, // skip import of block with missing parent.
|
||||
None => {
|
||||
trace!(target: "client", "No parent for block ({}, {})",
|
||||
verified_header.number(), hash);
|
||||
return false // skip import of block with missing parent.
|
||||
}
|
||||
};
|
||||
|
||||
// Verify Block Family
|
||||
let verify_family_result = self.engine.verify_block_family(&verified_header, &parent_header.decode(), None);
|
||||
let verify_family_result = self.engine.verify_block_family(&verified_header, &parent_header.decode());
|
||||
if let Err(e) = verify_family_result {
|
||||
warn!(target: "client", "Stage 3 block verification failed for #{} ({})\nError: {:?}",
|
||||
verified_header.number(), verified_header.hash(), e);
|
||||
@@ -400,7 +455,7 @@ impl Client {
|
||||
};
|
||||
|
||||
// "external" verification.
|
||||
let verify_external_result = self.engine.verify_block_external(&verified_header, None);
|
||||
let verify_external_result = self.engine.verify_block_external(&verified_header);
|
||||
if let Err(e) = verify_external_result {
|
||||
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}",
|
||||
verified_header.number(), verified_header.hash(), e);
|
||||
@@ -411,9 +466,80 @@ impl Client {
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn check_epoch_signal(&self, verified_header: &Header) -> Result<Option<Proof<EthereumMachine>>, T::Error> {
|
||||
use ethcore::machine::{AuxiliaryRequest, AuxiliaryData};
|
||||
|
||||
let mut block: Option<Vec<u8>> = None;
|
||||
let mut receipts: Option<Vec<_>> = None;
|
||||
|
||||
loop {
|
||||
|
||||
|
||||
let is_signal = {
|
||||
let auxiliary = AuxiliaryData {
|
||||
bytes: block.as_ref().map(|x| &x[..]),
|
||||
receipts: receipts.as_ref().map(|x| &x[..]),
|
||||
};
|
||||
|
||||
self.engine.signals_epoch_end(verified_header, auxiliary)
|
||||
};
|
||||
|
||||
// check with any auxiliary data fetched so far
|
||||
match is_signal {
|
||||
EpochChange::No => return Ok(None),
|
||||
EpochChange::Yes(proof) => return Ok(Some(proof)),
|
||||
EpochChange::Unsure(unsure) => {
|
||||
let (b, r) = match unsure {
|
||||
AuxiliaryRequest::Body =>
|
||||
(Some(self.fetcher.block_body(verified_header)), None),
|
||||
AuxiliaryRequest::Receipts =>
|
||||
(None, Some(self.fetcher.block_receipts(verified_header))),
|
||||
AuxiliaryRequest::Both => (
|
||||
Some(self.fetcher.block_body(verified_header)),
|
||||
Some(self.fetcher.block_receipts(verified_header)),
|
||||
),
|
||||
};
|
||||
|
||||
if let Some(b) = b {
|
||||
block = Some(b.into_future().wait()?.into_inner());
|
||||
}
|
||||
|
||||
if let Some(r) = r {
|
||||
receipts = Some(r.into_future().wait()?);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// attempts to fetch the epoch proof from the network until successful.
|
||||
fn write_pending_proof(&self, header: &Header, proof: Proof<EthereumMachine>) -> Result<(), T::Error> {
|
||||
let proof = match proof {
|
||||
Proof::Known(known) => known,
|
||||
Proof::WithState(state_dependent) => {
|
||||
self.fetcher.epoch_transition(
|
||||
header.hash(),
|
||||
self.engine.clone(),
|
||||
state_dependent
|
||||
).into_future().wait()?
|
||||
}
|
||||
};
|
||||
|
||||
let mut batch = self.db.transaction();
|
||||
self.chain.insert_pending_transition(&mut batch, header.hash(), epoch::PendingTransition {
|
||||
proof: proof,
|
||||
});
|
||||
self.db.write_buffered(batch);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl LightChainClient for Client {
|
||||
impl<T: ChainDataFetcher> LightChainClient for Client<T> {
|
||||
fn add_listener(&self, listener: Weak<LightChainNotify>) {
|
||||
Client::add_listener(self, listener)
|
||||
}
|
||||
|
||||
fn chain_info(&self) -> BlockChainInfo { Client::chain_info(self) }
|
||||
|
||||
fn queue_header(&self, header: Header) -> Result<H256, BlockImportError> {
|
||||
@@ -440,15 +566,15 @@ impl LightChainClient for Client {
|
||||
Box::new(Client::ancestry_iter(self, start))
|
||||
}
|
||||
|
||||
fn signing_network_id(&self) -> Option<u64> {
|
||||
Client::signing_network_id(self)
|
||||
fn signing_chain_id(&self) -> Option<u64> {
|
||||
Client::signing_chain_id(self)
|
||||
}
|
||||
|
||||
fn env_info(&self, id: BlockId) -> Option<EnvInfo> {
|
||||
Client::env_info(self, id)
|
||||
}
|
||||
|
||||
fn engine(&self) -> &Arc<Engine> {
|
||||
fn engine(&self) -> &Arc<EthEngine> {
|
||||
Client::engine(self)
|
||||
}
|
||||
|
||||
@@ -472,7 +598,7 @@ impl LightChainClient for Client {
|
||||
Client::cht_root(self, i)
|
||||
}
|
||||
|
||||
fn eip86_transition(&self) -> u64 {
|
||||
fn eip86_transition(&self) -> BlockNumber {
|
||||
self.engine().params().eip86_transition
|
||||
}
|
||||
|
||||
@@ -480,3 +606,29 @@ impl LightChainClient for Client {
|
||||
Client::report(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ChainDataFetcher> ::ethcore::client::EngineClient for Client<T> {
|
||||
fn update_sealing(&self) { }
|
||||
fn submit_seal(&self, _block_hash: H256, _seal: Vec<Vec<u8>>) { }
|
||||
fn broadcast_consensus_message(&self, _message: Vec<u8>) { }
|
||||
|
||||
fn epoch_transition_for(&self, parent_hash: H256) -> Option<EpochTransition> {
|
||||
self.chain.epoch_transition_for(parent_hash).map(|(hdr, proof)| EpochTransition {
|
||||
block_hash: hdr.hash(),
|
||||
block_number: hdr.number(),
|
||||
proof: proof,
|
||||
})
|
||||
}
|
||||
|
||||
fn chain_info(&self) -> BlockChainInfo {
|
||||
Client::chain_info(self)
|
||||
}
|
||||
|
||||
fn as_full_client(&self) -> Option<&::ethcore::client::BlockChainClient> {
|
||||
None
|
||||
}
|
||||
|
||||
fn block_number(&self, id: BlockId) -> Option<BlockNumber> {
|
||||
self.block_header(id).map(|hdr| hdr.number())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,12 +25,12 @@ use ethcore::db;
|
||||
use ethcore::service::ClientIoMessage;
|
||||
use ethcore::spec::Spec;
|
||||
use io::{IoContext, IoError, IoHandler, IoService};
|
||||
use util::kvdb::{Database, DatabaseConfig};
|
||||
use kvdb_rocksdb::{Database, DatabaseConfig};
|
||||
|
||||
use cache::Cache;
|
||||
use util::Mutex;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use super::{Client, Config as ClientConfig};
|
||||
use super::{ChainDataFetcher, Client, Config as ClientConfig};
|
||||
|
||||
/// Errors on service initialization.
|
||||
#[derive(Debug)]
|
||||
@@ -51,23 +51,19 @@ impl fmt::Display for Error {
|
||||
}
|
||||
|
||||
/// Light client service.
|
||||
pub struct Service {
|
||||
client: Arc<Client>,
|
||||
pub struct Service<T> {
|
||||
client: Arc<Client<T>>,
|
||||
io_service: IoService<ClientIoMessage>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
impl<T: ChainDataFetcher> Service<T> {
|
||||
/// Start the service: initialize I/O workers and client itself.
|
||||
pub fn start(config: ClientConfig, spec: &Spec, path: &Path, cache: Arc<Mutex<Cache>>) -> Result<Self, Error> {
|
||||
pub fn start(config: ClientConfig, spec: &Spec, fetcher: T, path: &Path, cache: Arc<Mutex<Cache>>) -> Result<Self, Error> {
|
||||
|
||||
// initialize database.
|
||||
let mut db_config = DatabaseConfig::with_columns(db::NUM_COLUMNS);
|
||||
|
||||
// give all rocksdb cache to the header chain column.
|
||||
if let Some(size) = config.db_cache_size {
|
||||
db_config.set_cache(db::COL_LIGHT_CHAIN, size);
|
||||
}
|
||||
|
||||
db_config.memory_budget = config.db_cache_size;
|
||||
db_config.compaction = config.db_compaction;
|
||||
db_config.wal = config.db_wal;
|
||||
|
||||
@@ -81,10 +77,14 @@ impl Service {
|
||||
db,
|
||||
db::COL_LIGHT_CHAIN,
|
||||
spec,
|
||||
fetcher,
|
||||
io_service.channel(),
|
||||
cache,
|
||||
).map_err(Error::Database)?);
|
||||
|
||||
io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
|
||||
spec.engine.register_client(Arc::downgrade(&client) as _);
|
||||
|
||||
Ok(Service {
|
||||
client: client,
|
||||
io_service: io_service,
|
||||
@@ -97,14 +97,14 @@ impl Service {
|
||||
}
|
||||
|
||||
/// Get a handle to the client.
|
||||
pub fn client(&self) -> &Arc<Client> {
|
||||
pub fn client(&self) -> &Arc<Client<T>> {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
struct ImportBlocks(Arc<Client>);
|
||||
struct ImportBlocks<T>(Arc<Client<T>>);
|
||||
|
||||
impl IoHandler<ClientIoMessage> for ImportBlocks {
|
||||
impl<T: ChainDataFetcher> IoHandler<ClientIoMessage> for ImportBlocks<T> {
|
||||
fn message(&self, _io: &IoContext<ClientIoMessage>, message: &ClientIoMessage) {
|
||||
if let ClientIoMessage::BlockVerified = *message {
|
||||
self.0.import_verified();
|
||||
@@ -117,11 +117,12 @@ mod tests {
|
||||
use super::Service;
|
||||
use devtools::RandomTempPath;
|
||||
use ethcore::spec::Spec;
|
||||
|
||||
|
||||
use std::sync::Arc;
|
||||
use cache::Cache;
|
||||
use client::fetch;
|
||||
use time::Duration;
|
||||
use util::Mutex;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
@@ -129,6 +130,6 @@ mod tests {
|
||||
let temp_path = RandomTempPath::new();
|
||||
let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::hours(6))));
|
||||
|
||||
Service::start(Default::default(), &spec, temp_path.as_path(), cache).unwrap();
|
||||
Service::start(Default::default(), &spec, fetch::unavailable(), temp_path.as_path(), cache).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,16 +70,30 @@ extern crate bincode;
|
||||
extern crate ethcore_io as io;
|
||||
extern crate ethcore_network as network;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
extern crate ethcore_bytes as bytes;
|
||||
extern crate ethcore;
|
||||
extern crate evm;
|
||||
extern crate heapsize;
|
||||
extern crate futures;
|
||||
extern crate itertools;
|
||||
extern crate memorydb;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate rand;
|
||||
extern crate rlp;
|
||||
extern crate parking_lot;
|
||||
#[macro_use]
|
||||
extern crate rlp_derive;
|
||||
extern crate serde;
|
||||
extern crate smallvec;
|
||||
extern crate stats;
|
||||
extern crate time;
|
||||
extern crate vm;
|
||||
extern crate hash;
|
||||
extern crate triehash;
|
||||
extern crate kvdb;
|
||||
extern crate kvdb_memorydb;
|
||||
extern crate kvdb_rocksdb;
|
||||
|
||||
#[cfg(feature = "ipc")]
|
||||
extern crate ethcore_ipc as ipc;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user