Merge branch 'master' of github.com:gavofyork/ethcore-util into network

Conflicts:
	src/triehash.rs
This commit is contained in:
arkpar 2015-12-03 15:15:17 +01:00
commit 34acc420f2
38 changed files with 2027 additions and 481 deletions

1
.gitignore vendored
View File

@ -11,6 +11,7 @@ Cargo.lock
# Generated by Cargo
/target/
/json-tests/target/
# Vim
*.swp

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "tests"]
path = tests
url = https://github.com/ethereum/tests

View File

@ -20,3 +20,6 @@ num = "0.1"
lazy_static = "0.1.*"
eth-secp256k1 = { git = "https://github.com/arkpar/rust-secp256k1.git" }
rust-crypto = "0.2.34"
[dev-dependencies]
json-tests = { path = "json-tests" }

8
json-tests/Cargo.toml Normal file
View File

@ -0,0 +1,8 @@
[package]
name = "json-tests"
version = "0.1.0"
authors = ["debris <marek.kotewicz@gmail.com>"]
[dependencies]
rustc-serialize = "0.3"
glob = "*"

15
json-tests/README.md Normal file
View File

@ -0,0 +1,15 @@
# How to write json test file?
Cause it's very hard to write generic json test files, each subdirectory should follow its own
convention. BUT all json files `within` same directory should be consistent.
### Test files should always contain a single test with input and output.
```json
{
input: ...,
output: ...
}
```
As a reference, please use trietests.

BIN
json-tests/json/.DS_Store vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,39 @@
# Rlp tests guideline
Rlp can be tested in various ways. It can encode/decode a value or an array of values. Let's start with encoding.
Each operation must have field:
- `operation` - `append`, `append_list`, `append_empty` or `append_raw`
Additionally `append` and `append_raw` must additionally define a `value` field:
- `value` - data
Also `append_raw` and `append_list` requires `len` field
- `len` - integer
### Encoding Test Example
```json
{
"input":
[
{
"operation": "append_list",
"len": 2
},
{
"operation": "append",
"value": "cat"
},
{
"operation": "append",
"value": "dog"
}
],
"output": "0xc88363617183646f67"
}
```

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "\u0000"
}
],
"output": "0x00"
}

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "\u0001"
}
],
"output": "0x01"
}

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "\u007f"
}
],
"output": "0x7f"
}

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "\u0000"
}
],
"output": "0x00"
}

View File

@ -0,0 +1,9 @@
{
"input":
[
{
"operation": "append_empty"
}
],
"output": "0x80"
}

View File

@ -0,0 +1,38 @@
{
"input":
[
{
"operation": "append_list",
"len": 3
},
{
"operation": "append_list",
"len": 0
},
{
"operation": "append_list",
"len": 1
},
{
"operation": "append_list",
"len": 0
},
{
"operation": "append_list",
"len": 2
},
{
"operation": "append_list",
"len": 0
},
{
"operation": "append_list",
"len": 1
},
{
"operation": "append_list",
"len": 0
}
],
"output": "0xc7c0c1c0c3c0c1c0"
}

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "0x0400"
}
],
"output": "0x820400"
}

View File

@ -0,0 +1,22 @@
{
"input":
[
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": ""
},
{
"operation": "append",
"value": ""
},
{
"operation": "append",
"value": ""
}
],
"output": "0xc3808080"
}

View File

@ -0,0 +1,19 @@
{
"input":
[
{
"operation": "append_list",
"len": 3
},
{
"operation": "append_empty"
},
{
"operation": "append_empty"
},
{
"operation": "append_empty"
}
],
"output": "0xc3808080"
}

View File

@ -0,0 +1,521 @@
{
"input": [
{
"operation": "append_list",
"len": 32
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
},
{
"operation": "append_list",
"len": 3
},
{
"operation": "append",
"value": "asdf"
},
{
"operation": "append",
"value": "qwer"
},
{
"operation": "append",
"value": "zxcv"
}],
"output": "0xf90200cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376cf84617364668471776572847a786376"
}

View File

@ -0,0 +1,10 @@
{
"input":
[
{
"operation": "append",
"value": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Curabitur mauris magna, suscipit sed vehicula non, iaculis faucibus tortor. Proin suscipit ultricies malesuada. Duis tortor elit, dictum quis tristique eu, ultrices at risus. Morbi a est imperdiet mi ullamcorper aliquet suscipit nec lorem. Aenean quis leo mollis, vulputate elit varius, consequat enim. Nulla ultrices turpis justo, et posuere urna consectetur nec. Proin non convallis metus. Donec tempor ipsum in mauris congue sollicitudin. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Suspendisse convallis sem vel massa faucibus, eget lacinia lacus tempor. Nulla quis ultricies purus. Proin auctor rhoncus nibh condimentum mollis. Aliquam consequat enim at metus luctus, a eleifend purus egestas. Curabitur at nibh metus. Nam bibendum, neque at auctor tristique, lorem libero aliquet arcu, non interdum tellus lectus sit amet eros. Cras rhoncus, metus ac ornare cursus, dolor justo ultrices metus, at ullamcorper volutpat"
}
],
"output": "0xb904004c6f72656d20697073756d20646f6c6f722073697420616d65742c20636f6e73656374657475722061646970697363696e6720656c69742e20437572616269747572206d6175726973206d61676e612c20737573636970697420736564207665686963756c61206e6f6e2c20696163756c697320666175636962757320746f72746f722e2050726f696e20737573636970697420756c74726963696573206d616c6573756164612e204475697320746f72746f7220656c69742c2064696374756d2071756973207472697374697175652065752c20756c7472696365732061742072697375732e204d6f72626920612065737420696d70657264696574206d6920756c6c616d636f7270657220616c6971756574207375736369706974206e6563206c6f72656d2e2041656e65616e2071756973206c656f206d6f6c6c69732c2076756c70757461746520656c6974207661726975732c20636f6e73657175617420656e696d2e204e756c6c6120756c74726963657320747572706973206a7573746f2c20657420706f73756572652075726e6120636f6e7365637465747572206e65632e2050726f696e206e6f6e20636f6e76616c6c6973206d657475732e20446f6e65632074656d706f7220697073756d20696e206d617572697320636f6e67756520736f6c6c696369747564696e2e20566573746962756c756d20616e746520697073756d207072696d697320696e206661756369627573206f726369206c756374757320657420756c74726963657320706f737565726520637562696c69612043757261653b2053757370656e646973736520636f6e76616c6c69732073656d2076656c206d617373612066617563696275732c2065676574206c6163696e6961206c616375732074656d706f722e204e756c6c61207175697320756c747269636965732070757275732e2050726f696e20617563746f722072686f6e637573206e69626820636f6e64696d656e74756d206d6f6c6c69732e20416c697175616d20636f6e73657175617420656e696d206174206d65747573206c75637475732c206120656c656966656e6420707572757320656765737461732e20437572616269747572206174206e696268206d657475732e204e616d20626962656e64756d2c206e6571756520617420617563746f72207472697374697175652c206c6f72656d206c696265726f20616c697175657420617263752c206e6f6e20696e74657264756d2074656c6c7573206c65637475732073697420616d65742065726f732e20437261732072686f6e6375732c206d65747573206163206f726e617265206375727375732c20646f6c6f72206a7573746f20756c747269636573206d657475732c20617420756c6c616d636f7270657220766f6c7574706174"
}

View File

@ -0,0 +1,35 @@
# Trie tests guideline
Trie test input is an array of operations. Each operation must have 2 fields:
- `operation` - string, either `insert` or `remove`
- `key` - string, or hex value prefixed with `0x`
And optional field:
- `value`- which is used by `insert` operation
### Example
```json
{
"input":
[
{
"operation": "insert",
"key": "world",
"value": "hello"
},
{
"operation": "insert",
"key": "0x1234",
"value": "ooooops"
},
{
"operation": "remove",
"key": "0x1234"
}
],
"output": "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84"
}
```

View File

@ -0,0 +1,11 @@
{
"input":
[
{
"operation": "insert",
"key": "A",
"value": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
],
"output": "0xd23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab"
}

View File

@ -0,0 +1,229 @@
{
"input": [
{
"operation": "insert",
"key": "0x04110d816c380812a427968ece99b1c963dfbce6",
"value": "something"
},
{
"operation": "insert",
"key": "0x095e7baea6a6c7c4c2dfeb977efac326af552d87",
"value": "something"
},
{
"operation": "insert",
"key": "0x0a517d755cebbf66312b30fff713666a9cb917e0",
"value": "something"
},
{
"operation": "insert",
"key": "0x24dd378f51adc67a50e339e8031fe9bd4aafab36",
"value": "something"
},
{
"operation": "insert",
"key": "0x293f982d000532a7861ab122bdc4bbfd26bf9030",
"value": "something"
},
{
"operation": "insert",
"key": "0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5",
"value": "something"
},
{
"operation": "insert",
"key": "0x31c640b92c21a1f1465c91070b4b3b4d6854195f",
"value": "something"
},
{
"operation": "insert",
"key": "0x37f998764813b136ddf5a754f34063fd03065e36",
"value": "something"
},
{
"operation": "insert",
"key": "0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a",
"value": "something"
},
{
"operation": "insert",
"key": "0x4f36659fa632310b6ec438dea4085b522a2dd077",
"value": "something"
},
{
"operation": "insert",
"key": "0x62c01474f089b07dae603491675dc5b5748f7049",
"value": "something"
},
{
"operation": "insert",
"key": "0x729af7294be595a0efd7d891c9e51f89c07950c7",
"value": "something"
},
{
"operation": "insert",
"key": "0x83e3e5a16d3b696a0314b30b2534804dd5e11197",
"value": "something"
},
{
"operation": "insert",
"key": "0x8703df2417e0d7c59d063caa9583cb10a4d20532",
"value": "something"
},
{
"operation": "insert",
"key": "0x8dffcd74e5b5923512916c6a64b502689cfa65e1",
"value": "something"
},
{
"operation": "insert",
"key": "0x95a4d7cccb5204733874fa87285a176fe1e9e240",
"value": "something"
},
{
"operation": "insert",
"key": "0x99b2fcba8120bedd048fe79f5262a6690ed38c39",
"value": "something"
},
{
"operation": "insert",
"key": "0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf",
"value": "something"
},
{
"operation": "insert",
"key": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"value": "something"
},
{
"operation": "insert",
"key": "0xa9647f4a0a14042d91dc33c0328030a7157c93ae",
"value": "something"
},
{
"operation": "insert",
"key": "0xaa6cffe5185732689c18f37a7f86170cb7304c2a",
"value": "something"
},
{
"operation": "insert",
"key": "0xaae4a2e3c51c04606dcb3723456e58f3ed214f45",
"value": "something"
},
{
"operation": "insert",
"key": "0xc37a43e940dfb5baf581a0b82b351d48305fc885",
"value": "something"
},
{
"operation": "insert",
"key": "0xd2571607e241ecf590ed94b12d87c94babe36db6",
"value": "something"
},
{
"operation": "insert",
"key": "0xf735071cbee190d76b704ce68384fc21e389fbe7",
"value": "something"
},
{
"operation": "remove",
"key": "0x04110d816c380812a427968ece99b1c963dfbce6"
},
{
"operation": "remove",
"key": "0x095e7baea6a6c7c4c2dfeb977efac326af552d87"
},
{
"operation": "remove",
"key": "0x0a517d755cebbf66312b30fff713666a9cb917e0"
},
{
"operation": "remove",
"key": "0x24dd378f51adc67a50e339e8031fe9bd4aafab36"
},
{
"operation": "remove",
"key": "0x293f982d000532a7861ab122bdc4bbfd26bf9030"
},
{
"operation": "remove",
"key": "0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5"
},
{
"operation": "remove",
"key": "0x31c640b92c21a1f1465c91070b4b3b4d6854195f"
},
{
"operation": "remove",
"key": "0x37f998764813b136ddf5a754f34063fd03065e36"
},
{
"operation": "remove",
"key": "0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a"
},
{
"operation": "remove",
"key": "0x4f36659fa632310b6ec438dea4085b522a2dd077"
},
{
"operation": "remove",
"key": "0x62c01474f089b07dae603491675dc5b5748f7049"
},
{
"operation": "remove",
"key": "0x729af7294be595a0efd7d891c9e51f89c07950c7"
},
{
"operation": "remove",
"key": "0x83e3e5a16d3b696a0314b30b2534804dd5e11197"
},
{
"operation": "remove",
"key": "0x8703df2417e0d7c59d063caa9583cb10a4d20532"
},
{
"operation": "remove",
"key": "0x8dffcd74e5b5923512916c6a64b502689cfa65e1"
},
{
"operation": "remove",
"key": "0x95a4d7cccb5204733874fa87285a176fe1e9e240"
},
{
"operation": "remove",
"key": "0x99b2fcba8120bedd048fe79f5262a6690ed38c39"
},
{
"operation": "remove",
"key": "0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf"
},
{
"operation": "remove",
"key": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b"
},
{
"operation": "remove",
"key": "0xa9647f4a0a14042d91dc33c0328030a7157c93ae"
},
{
"operation": "remove",
"key": "0xaa6cffe5185732689c18f37a7f86170cb7304c2a"
},
{
"operation": "remove",
"key": "0xaae4a2e3c51c04606dcb3723456e58f3ed214f45"
},
{
"operation": "remove",
"key": "0xc37a43e940dfb5baf581a0b82b351d48305fc885"
},
{
"operation": "remove",
"key": "0xd2571607e241ecf590ed94b12d87c94babe36db6"
},
{
"operation": "remove",
"key": "0xf735071cbee190d76b704ce68384fc21e389fbe7"
}],
"output": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
}

View File

@ -0,0 +1,21 @@
{
"input":
[
{
"operation": "insert",
"key": "doe",
"value": "reindeer"
},
{
"operation": "insert",
"key": "dogglesworth",
"value": "cat"
},
{
"operation": "insert",
"key": "dog",
"value": "puppy"
}
],
"output": "0x8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3"
}

View File

@ -0,0 +1,4 @@
{
"input": [],
"output": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
}

View File

@ -0,0 +1,44 @@
{
"input":
[
{
"operation": "insert",
"key": "do",
"value": "verb"
},
{
"operation": "insert",
"key": "ether",
"value": "wookiedoo"
},
{
"operation": "insert",
"key": "horse",
"value": "stallion"
},
{
"operation": "insert",
"key": "shaman",
"value": "horse"
},
{
"operation": "insert",
"key": "doge",
"value": "coin"
},
{
"operation": "remove",
"key": "ether"
},
{
"operation": "insert",
"key": "dog",
"value": "puppy"
},
{
"operation": "remove",
"key": "shaman"
}
],
"output": "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84"
}

View File

@ -0,0 +1,16 @@
{
"input":
[
{
"operation": "insert",
"key": "foo",
"value": "bar"
},
{
"operation": "insert",
"key": "food",
"value": "bass"
}
],
"output": "0x17beaa1648bafa633cda809c90c04af50fc8aed3cb40d16efbddee6fdf63c4c3"
}

View File

@ -0,0 +1,58 @@
{
"input": [
{
"operation": "insert",
"key": "0x0000000000000000000000000000000000000000000000000000000000000045",
"value": "0x22b224a1420a802ab51d326e29fa98e34c4f24ea"
},
{
"operation": "insert",
"key": "0x0000000000000000000000000000000000000000000000000000000000000046",
"value": "0x67706c2076330000000000000000000000000000000000000000000000000000"
},
{
"operation": "insert",
"key": "0x0000000000000000000000000000000000000000000000000000001234567890",
"value": "0x697c7b8c961b56f675d570498424ac8de1a918f6"
},
{
"operation": "insert",
"key": "0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6",
"value": "0x1234567890"
},
{
"operation": "insert",
"key": "0x0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2",
"value": "0x4655474156000000000000000000000000000000000000000000000000000000"
},
{
"operation": "insert",
"key": "0x000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1",
"value": "0x4e616d6552656700000000000000000000000000000000000000000000000000"
},
{
"operation": "insert",
"key": "0x4655474156000000000000000000000000000000000000000000000000000000",
"value": "0x7ef9e639e2733cb34e4dfc576d4b23f72db776b2"
},
{
"operation": "insert",
"key": "0x4e616d6552656700000000000000000000000000000000000000000000000000",
"value": "0xec4f34c97e43fbb2816cfd95e388353c7181dab1"
},
{
"operation": "remove",
"key": "0x0000000000000000000000000000000000000000000000000000001234567890"
},
{
"operation": "insert",
"key": "0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6",
"value": "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000"
},
{
"operation": "insert",
"key": "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000",
"value": "0x697c7b8c961b56f675d570498424ac8de1a918f6"
}],
"output": "0x9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100"
}

66
json-tests/src/lib.rs Normal file
View File

@ -0,0 +1,66 @@
extern crate rustc_serialize;
extern crate glob;
use std::str::from_utf8;
use std::path::*;
use std::io::prelude::*;
use std::fs::File;
use glob::glob;
use rustc_serialize::*;
mod util;
pub mod trie;
pub mod rlp;
pub trait JsonTest: Sized {
type Input;
type Output;
fn new(data: &[u8]) -> Self;
fn input(&self) -> Self::Input;
fn output(&self) -> Self::Output;
}
pub struct JsonLoader {
json: json::Json
}
impl JsonTest for JsonLoader {
type Input = json::Json;
type Output = json::Json;
fn new(data: &[u8]) -> Self {
JsonLoader {
json: json::Json::from_str(from_utf8(data).unwrap()).unwrap()
}
}
fn input(&self) -> Self::Input {
self.json.as_object().unwrap()["input"].clone()
}
fn output(&self) -> Self::Output {
self.json.as_object().unwrap()["output"].clone()
}
}
pub fn execute_test<T, F>(data: &[u8], f: &mut F) where T: JsonTest, F: FnMut(T::Input, T::Output) {
let test = T::new(data);
f(test.input(), test.output())
}
pub fn execute_test_from_file<T, F>(path: &Path, f: &mut F) where T: JsonTest, F: FnMut(T::Input, T::Output) {
let mut file = File::open(path).unwrap();
let mut buffer = vec![];
let _ = file.read_to_end(&mut buffer);
let test = T::new(&buffer);
f(test.input(), test.output())
}
pub fn execute_tests_from_directory<T, F>(pattern: &str, f: &mut F) where T: JsonTest, F: FnMut(String, T::Input, T::Output) {
for path in glob(pattern).unwrap().filter_map(Result::ok) {
execute_test_from_file::<T, _>(&path, &mut | input, output | {
f(path.to_str().unwrap().to_string(), input, output);
});
}
}

52
json-tests/src/rlp.rs Normal file
View File

@ -0,0 +1,52 @@
//! json rlp tests
use rustc_serialize::*;
use super::{JsonTest, JsonLoader};
use util::*;
pub enum Operation {
Append(Vec<u8>),
AppendList(usize),
AppendRaw(Vec<u8>, usize),
AppendEmpty
}
impl Into<Operation> for json::Json {
fn into(self) -> Operation {
let obj = self.as_object().unwrap();
match obj["operation"].as_string().unwrap().as_ref() {
"append" => Operation::Append(hex_or_string(obj["value"].as_string().unwrap())),
"append_list" => Operation::AppendList(obj["len"].as_u64().unwrap() as usize),
"append_raw" => Operation::AppendRaw(hex_or_string(obj["value"].as_string().unwrap()), obj["len"].as_u64().unwrap() as usize),
"append_empty" => Operation::AppendEmpty,
other => { panic!("Unsupported opertation: {}", other); }
}
}
}
pub struct RlpStreamTest {
loader: JsonLoader
}
impl JsonTest for RlpStreamTest {
type Input = Vec<Operation>;
type Output = Vec<u8>;
fn new(data: &[u8]) -> Self {
RlpStreamTest {
loader: JsonLoader::new(data)
}
}
fn input(&self) -> Self::Input {
self.loader.input().as_array().unwrap()
.iter()
.cloned()
.map(|i| i.into())
.collect()
}
fn output(&self) -> Self::Output {
hex_or_string(self.loader.output().as_string().unwrap())
}
}

89
json-tests/src/trie.rs Normal file
View File

@ -0,0 +1,89 @@
//! json trie tests
use std::collections::HashMap;
use rustc_serialize::*;
use super::{JsonTest, JsonLoader};
use util::*;
#[derive(RustcDecodable)]
struct RawOperation {
operation: String,
key: String,
value: Option<String>
}
pub enum Operation {
Insert(Vec<u8>, Vec<u8>),
Remove(Vec<u8>)
}
impl Into<Operation> for RawOperation {
fn into(self) -> Operation {
match self.operation.as_ref() {
"insert" => Operation::Insert(hex_or_string(&self.key), hex_or_string(&self.value.unwrap())),
"remove" => Operation::Remove(hex_or_string(&self.key)),
other => panic!("invalid operation type: {}", other)
}
}
}
pub struct TrieTest {
loader: JsonLoader
}
impl JsonTest for TrieTest {
type Input = Vec<Operation>;
type Output = Vec<u8>;
fn new(data: &[u8]) -> Self {
TrieTest {
loader: JsonLoader::new(data)
}
}
fn input(&self) -> Self::Input {
let mut decoder = json::Decoder::new(self.loader.input());
let raw: Vec<RawOperation> = Decodable::decode(&mut decoder).unwrap();
raw.into_iter()
.map(|i| i.into())
.collect()
}
fn output(&self) -> Self::Output {
hex_or_string(self.loader.output().as_string().unwrap())
}
}
pub struct TriehashTest {
trietest: TrieTest
}
impl JsonTest for TriehashTest {
type Input = Vec<(Vec<u8>, Vec<u8>)>;
type Output = Vec<u8>;
fn new(data: &[u8]) -> Self {
TriehashTest {
trietest: TrieTest::new(data)
}
}
fn input(&self) -> Self::Input {
self.trietest.input()
.into_iter()
.fold(HashMap::new(), | mut map, o | {
match o {
Operation::Insert(k, v) => map.insert(k, v),
Operation::Remove(k) => map.remove(&k)
};
map
})
.into_iter()
.map(|p| { p })
.collect()
}
fn output(&self) -> Self::Output {
self.trietest.output()
}
}

8
json-tests/src/util.rs Normal file
View File

@ -0,0 +1,8 @@
use rustc_serialize::hex::FromHex;
pub fn hex_or_string(s: &str) -> Vec<u8> {
match s.starts_with("0x") {
true => s[2..].from_hex().unwrap(),
false => From::from(s)
}
}

View File

@ -1,7 +1,11 @@
use hash::*;
use bytes::*;
use std::collections::HashMap;
pub trait HashDB {
/// Get the keys in the database together with number of underlying references.
fn keys(&self) -> HashMap<H256, u32>;
/// Look up a given hash into the bytes that hash to it, returning None if the
/// hash is not known.
///

View File

@ -116,6 +116,10 @@ impl MemoryDB {
}
self.data.get(key).unwrap()
}
pub fn raw_keys(&self) -> HashMap<H256, i32> {
self.data.iter().filter_map(|(k, v)| if v.1 != 0 {Some((k.clone(), v.1))} else {None}).collect::<HashMap<H256, i32>>()
}
}
impl HashDB for MemoryDB {
@ -126,6 +130,10 @@ impl HashDB for MemoryDB {
}
}
fn keys(&self) -> HashMap<H256, u32> {
self.data.iter().filter_map(|(k, v)| if v.1 > 0 {Some((k.clone(), v.1 as u32))} else {None} ).collect::<HashMap<H256, u32>>()
}
fn exists(&self, key: &H256) -> bool {
match self.data.get(key) {
Some(&(_, x)) if x > 0 => true,

View File

@ -30,6 +30,8 @@ use bytes::*;
pub struct NibbleSlice<'a> {
data: &'a [u8],
offset: usize,
data_encode_suffix: &'a [u8],
offset_encode_suffix: usize,
}
impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
@ -37,7 +39,26 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
pub fn new(data: &[u8]) -> NibbleSlice { NibbleSlice::new_offset(data, 0) }
/// Create a new nibble slice with the given byte-slice with a nibble offset.
pub fn new_offset(data: &'a [u8], offset: usize) -> NibbleSlice { NibbleSlice{data: data, offset: offset} }
pub fn new_offset(data: &'a [u8], offset: usize) -> NibbleSlice { NibbleSlice{data: data, offset: offset, data_encode_suffix: &b""[..], offset_encode_suffix: 0} }
///
pub fn new_composed(a: &'a NibbleSlice, b: &'a NibbleSlice) -> NibbleSlice<'a> { NibbleSlice{data: a.data, offset: a.offset, data_encode_suffix: b.data, offset_encode_suffix: b.offset} }
/*pub fn new_composed_bytes_offset(a: &NibbleSlice, b: &NibbleSlice) -> (Bytes, usize) {
let r: Vec<u8>::with_capacity((a.len() + b.len() + 1) / 2);
let mut i = (a.len() + b.len()) % 2;
while i < a.len() {
match i % 2 {
0 => ,
1 => ,
}
i += 1;
}
while i < a.len() + b.len() {
i += 1;
}
(r, a.len() + b.len())
}*/
/// Create a new nibble slice from the given HPE encoded data (e.g. output of `encoded()`).
pub fn from_encoded(data: &'a [u8]) -> (NibbleSlice, bool) {
@ -48,20 +69,32 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Get the length (in nibbles, naturally) of this slice.
pub fn len(&self) -> usize { self.data.len() * 2 - self.offset }
pub fn len(&self) -> usize { (self.data.len() + self.data_encode_suffix.len()) * 2 - self.offset - self.offset_encode_suffix }
/// Get the nibble at position `i`.
pub fn at(&self, i: usize) -> u8 {
if (self.offset + i) & 1 == 1 {
self.data[(self.offset + i) / 2] & 15u8
let l = self.data.len() * 2 - self.offset;
if i < l {
if (self.offset + i) & 1 == 1 {
self.data[(self.offset + i) / 2] & 15u8
}
else {
self.data[(self.offset + i) / 2] >> 4
}
}
else {
self.data[(self.offset + i) / 2] >> 4
let i = i - l;
if (self.offset_encode_suffix + i) & 1 == 1 {
self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] & 15u8
}
else {
self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] >> 4
}
}
}
/// Return object which represents a view on to this slice (further) offset by `i` nibbles.
pub fn mid(&'view self, i: usize) -> NibbleSlice<'a> { NibbleSlice{ data: self.data, offset: self.offset + i} }
pub fn mid(&'view self, i: usize) -> NibbleSlice<'a> { NibbleSlice{ data: self.data, offset: self.offset + i, data_encode_suffix: &b""[..], offset_encode_suffix: 0 } }
/// Do we start with the same nibbles as the whole of `them`?
pub fn starts_with(&self, them: &Self) -> bool { self.common_prefix(them) == them.len() }

View File

@ -9,6 +9,7 @@ use memorydb::*;
use std::ops::*;
use std::sync::*;
use std::env;
use std::collections::HashMap;
use rocksdb::{DB, Writable};
#[derive(Clone)]
@ -135,6 +136,20 @@ impl OverlayDB {
}
impl HashDB for OverlayDB {
fn keys(&self) -> HashMap<H256, u32> {
let mut ret: HashMap<H256, u32> = HashMap::new();
for (key, _) in self.backing.iterator().from_start() {
let h = H256::from_slice(key.deref());
let r = self.payload(&h).unwrap().1;
ret.insert(h, r);
}
for (key, refs) in self.overlay.raw_keys().into_iter() {
let refs = *ret.get(&key).unwrap_or(&0u32) as i32 + refs as i32;
ret.insert(key, refs as u32);
}
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
// return ok if positive; if negative, check backing - might be enough references there to make
// it positive again.

View File

@ -125,6 +125,7 @@ impl<'a> From<Rlp<'a>> for UntrustedRlp<'a> {
}
}
#[derive(Debug)]
pub enum Prototype {
Null,
Data(usize),
@ -1098,6 +1099,9 @@ impl Encoder for BasicEncoder {
#[cfg(test)]
mod tests {
extern crate json_tests;
use self::json_tests::execute_tests_from_directory;
use self::json_tests::rlp as rlptest;
use std::{fmt, cmp};
use std::str::FromStr;
use rlp;
@ -1256,7 +1260,7 @@ mod tests {
run_encode_tests(tests);
}
/// Vec<u8> is treated as a single value
/// Vec<u8> (Bytes) is treated as a single value
#[test]
fn encode_vector_u8() {
let tests = vec![
@ -1292,60 +1296,6 @@ mod tests {
run_encode_tests(tests);
}
#[test]
fn encode_bytes() {
let vec = vec![0u8];
let slice: &[u8] = &vec;
let res = rlp::encode(&slice);
assert_eq!(res, vec![0u8]);
}
#[test]
fn rlp_stream() {
let mut stream = RlpStream::new_list(2);
stream.append(&"cat").append(&"dog");
let out = stream.out();
assert_eq!(out,
vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']);
}
#[test]
fn rlp_stream_list() {
let mut stream = RlpStream::new_list(3);
stream.append_list(0);
stream.append_list(1).append_list(0);
stream.append_list(2).append_list(0).append_list(1).append_list(0);
let out = stream.out();
assert_eq!(out, vec![0xc7, 0xc0, 0xc1, 0xc0, 0xc3, 0xc0, 0xc1, 0xc0]);
}
#[test]
fn rlp_stream_list2() {
let mut stream = RlpStream::new();
stream.append_list(17);
for _ in 0..17 {
stream.append(&"");
}
let out = stream.out();
assert_eq!(out, vec![0xd1, 0x80, 0x80, 0x80, 0x80, 0x80,
0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
0x80, 0x80, 0x80, 0x80, 0x80, 0x80]);
}
#[test]
fn rlp_stream_list3() {
let mut stream = RlpStream::new();
stream.append_list(17);
let mut res = vec![0xf8, 0x44];
for _ in 0..17 {
stream.append(&"aaa");
res.extend(vec![0x83, b'a', b'a', b'a']);
}
let out = stream.out();
assert_eq!(out, res);
}
struct DTestPair<T>(T, Vec<u8>) where T: rlp::Decodable + fmt::Debug + cmp::Eq;
fn run_decode_tests<T>(tests: Vec<DTestPair<T>>) where T: rlp::Decodable + fmt::Debug + cmp::Eq {
@ -1355,7 +1305,7 @@ mod tests {
}
}
/// Vec<u8> is treated as a single value
/// Vec<u8> (Bytes) is treated as a single value
#[test]
fn decode_vector_u8() {
let tests = vec![
@ -1471,29 +1421,23 @@ mod tests {
}
#[test]
fn test_view() {
struct View<'a> {
bytes: &'a [u8]
}
fn test_rlp_json() {
println!("Json rlp test: ");
execute_tests_from_directory::<rlptest::RlpStreamTest, _>("json-tests/json/rlp/stream/*.json", &mut | file, input, output | {
println!("file: {}", file);
impl <'a, 'view> View<'a> where 'a: 'view {
fn new(bytes: &'a [u8]) -> View<'a> {
View {
bytes: bytes
}
let mut stream = RlpStream::new();
for operation in input.into_iter() {
match operation {
rlptest::Operation::Append(ref v) => stream.append(v),
rlptest::Operation::AppendList(len) => stream.append_list(len),
rlptest::Operation::AppendRaw(ref raw, len) => stream.append_raw(raw, len),
rlptest::Operation::AppendEmpty => stream.append_empty_data()
};
}
fn offset(&'view self, len: usize) -> View<'a> {
View::new(&self.bytes[len..])
}
fn data(&'view self) -> &'a [u8] {
self.bytes
}
}
let data = vec![0, 1, 2, 3];
let view = View::new(&data);
let _data_slice = view.offset(1).data();
assert_eq!(stream.out(), output);
});
}
}

File diff suppressed because it is too large Load Diff

View File

@ -277,63 +277,13 @@ fn test_hex_prefix_encode() {
#[cfg(test)]
mod tests {
use std::str::FromStr;
use std::collections::BTreeMap;
use rustc_serialize::hex::FromHex;
use rustc_serialize::json::Json;
use bytes::*;
extern crate json_tests;
use self::json_tests::*;
use hash::*;
use triehash::*;
#[test]
fn empty_trie_root() {
assert_eq!(trie_root(vec![]), H256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").unwrap());
}
#[test]
fn single_trie_item() {
let v = vec![(From::from("A"), From::from("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"))];
assert_eq!(trie_root(v), H256::from_str("d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab").unwrap());
}
#[test]
fn foo_trie_item() {
let v = vec![
(From::from("foo"), From::from("bar")),
(From::from("food"), From::from("bass"))
];
assert_eq!(trie_root(v), H256::from_str("17beaa1648bafa633cda809c90c04af50fc8aed3cb40d16efbddee6fdf63c4c3").unwrap());
}
#[test]
fn dogs_trie_item() {
let v = vec![
(From::from("doe"), From::from("reindeer")),
(From::from("dog"), From::from("puppy")),
(From::from("dogglesworth"), From::from("cat")),
];
assert_eq!(trie_root(v), H256::from_str("8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3").unwrap());
}
#[test]
fn puppy_trie_items() {
let v = vec![
(From::from("do"), From::from("verb")),
(From::from("dog"), From::from("puppy")),
(From::from("doge"), From::from("coin")),
(From::from("horse"), From::from("stallion")),
];
assert_eq!(trie_root(v), H256::from_str("5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84").unwrap());
}
#[test]
fn out_of_order() {
fn test_triehash_out_of_order() {
assert!(trie_root(vec![
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
(vec![0x81u8, 0x23], vec![0x81u8, 0x23]),
@ -347,50 +297,10 @@ mod tests {
}
#[test]
fn test_trie_root() {
let v = vec![
("0000000000000000000000000000000000000000000000000000000000000045".from_hex().unwrap(),
"22b224a1420a802ab51d326e29fa98e34c4f24ea".from_hex().unwrap()),
("0000000000000000000000000000000000000000000000000000000000000046".from_hex().unwrap(),
"67706c2076330000000000000000000000000000000000000000000000000000".from_hex().unwrap()),
("000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap(),
"6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap()),
("0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap(),
"4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap()),
("000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap(),
"4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap()),
("4655474156000000000000000000000000000000000000000000000000000000".from_hex().unwrap(),
"7ef9e639e2733cb34e4dfc576d4b23f72db776b2".from_hex().unwrap()),
("4e616d6552656700000000000000000000000000000000000000000000000000".from_hex().unwrap(),
"ec4f34c97e43fbb2816cfd95e388353c7181dab1".from_hex().unwrap()),
("6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000".from_hex().unwrap(),
"697c7b8c961b56f675d570498424ac8de1a918f6".from_hex().unwrap())
];
assert_eq!(trie_root(v), H256::from_str("9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100").unwrap());
fn test_triehash_json() {
execute_tests_from_directory::<trie::TriehashTest, _>("json-tests/json/trie/*.json", &mut | file, input, output | {
println!("file: {}, output: {:?}", file, output);
assert_eq!(trie_root(input), H256::from_slice(&output));
});
}
#[test]
fn test_triehash_json_trietest_json() {
//let data = include_bytes!("../tests/TrieTests/trietest.json");
//let s = String::from_bytes(data).unwrap();
//let json = Json::from_str(&s).unwrap();
//let obj = json.as_object().unwrap();
//for (key, value) in obj.iter() {
// println!("running test: {}", key);
//}
//assert!(false);
}
}

1
tests

@ -1 +0,0 @@
Subproject commit 2e4987ad2a973e2cf85ef742a8b9bd094363cd18