Upgrade elastic-array to 0.9.0

This is a huge change, which includes some changes to replace code that
originally cloned to reuse allocations instead. The updated
`elastic-array` crate renames its consuming `Vec`-conversion method to
`into_vec`, which means that I can do a simple
`sed -i 's/to_vec/into_vec/'` and then fix the compilation errors.

This commit is probably a minor performance win and definitely a
significant readability win.
This commit is contained in:
Vurich
2017-06-28 14:16:53 +02:00
parent 826bf28196
commit 3d8dc11442
76 changed files with 212 additions and 211 deletions

View File

@@ -16,7 +16,7 @@ time = "0.1.34"
rocksdb = { git = "https://github.com/paritytech/rust-rocksdb" }
eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
rust-crypto = "0.2.34"
elastic-array = "0.8"
elastic-array = "0.9"
rlp = { path = "rlp" }
heapsize = "0.4"
itertools = "0.5"

View File

@@ -45,14 +45,14 @@ fn random_bytes(min_count: usize, diff_count: usize, seed: &mut H256) -> Vec<u8>
assert!(min_count + diff_count <= 32);
*seed = seed.sha3();
let r = min_count + (seed[31] as usize % (diff_count + 1));
seed[0..r].to_vec()
seed[0..r].into_vec()
}
fn random_value(seed: &mut H256) -> Bytes {
*seed = seed.sha3();
match seed[0] % 2 {
1 => vec![seed[31];1],
_ => seed.to_vec(),
_ => seed.into_vec(),
}
}

View File

@@ -7,7 +7,7 @@ version = "0.2.0"
authors = ["Parity Technologies <admin@parity.io>"]
[dependencies]
elastic-array = "0.8"
elastic-array = "0.9"
ethcore-bigint = { path = "../bigint" }
lazy_static = "0.2"
rustc-serialize = "0.3"

File diff suppressed because one or more lines are too long

View File

@@ -98,7 +98,7 @@ pub fn decode_list<T>(bytes: &[u8]) -> Vec<T> where T: Decodable {
///
/// fn main () {
/// let animal = "cat";
/// let out = rlp::encode(&animal).to_vec();
/// let out = rlp::encode(&animal).into_vec();
/// assert_eq!(out, vec![0x83, b'c', b'a', b't']);
/// }
/// ```

View File

@@ -264,8 +264,8 @@ impl RlpStream {
/// panic! if stream is not finished.
pub fn out(self) -> Vec<u8> {
match self.is_finished() {
//true => self.encoder.out().to_vec(),
true => self.buffer.to_vec(),
//true => self.encoder.out().into_vec(),
true => self.buffer.into_vec(),
false => panic!()
}
}

View File

@@ -242,8 +242,8 @@ impl JournalDB for OverlayRecentDB {
fn state(&self, key: &H256) -> Option<Bytes> {
let journal_overlay = self.journal_overlay.read();
let key = to_short_key(key);
journal_overlay.backing_overlay.get(&key).map(|v| v.to_vec())
.or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().to_vec()))
journal_overlay.backing_overlay.get(&key).map(|v| v.into_vec())
.or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().into_vec()))
.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.into_vec()))
}
@@ -288,7 +288,7 @@ impl JournalDB for OverlayRecentDB {
batch.put_vec(self.column, &k.drain(), r.out());
if journal_overlay.latest_era.map_or(true, |e| now > e) {
trace!(target: "journaldb", "Set latest era to {}", now);
batch.put_vec(self.column, &LATEST_ERA_KEY, encode(&now).to_vec());
batch.put_vec(self.column, &LATEST_ERA_KEY, encode(&now).into_vec());
journal_overlay.latest_era = Some(now);
}

View File

@@ -216,7 +216,7 @@ impl KeyValueDB for InMemory {
Some(map) =>
map.iter()
.find(|&(ref k ,_)| k.starts_with(prefix))
.map(|(_, v)| (&**v).to_vec().into_boxed_slice())
.map(|(_, v)| v.to_vec().into_boxed_slice())
}
}
@@ -227,7 +227,7 @@ impl KeyValueDB for InMemory {
match op {
DBOp::Insert { col, key, value } => {
if let Some(mut col) = columns.get_mut(&col) {
col.insert(key.to_vec(), value);
col.insert(key.into_vec(), value);
}
},
DBOp::InsertCompressed { col, key, value } => {
@@ -235,7 +235,7 @@ impl KeyValueDB for InMemory {
let compressed = UntrustedRlp::new(&value).compress(RlpType::Blocks);
let mut value = DBValue::new();
value.append_slice(&compressed);
col.insert(key.to_vec(), value);
col.insert(key.into_vec(), value);
}
},
DBOp::Delete { col, key } => {
@@ -253,7 +253,7 @@ impl KeyValueDB for InMemory {
Some(map) => Box::new( // TODO: worth optimizing at all?
map.clone()
.into_iter()
.map(|(k, v)| (k.into_boxed_slice(), v.to_vec().into_boxed_slice()))
.map(|(k, v)| (k.into_boxed_slice(), v.into_vec().into_boxed_slice()))
),
None => Box::new(None.into_iter()),
}
@@ -267,7 +267,7 @@ impl KeyValueDB for InMemory {
map.clone()
.into_iter()
.skip_while(move |&(ref k, _)| !k.starts_with(prefix))
.map(|(k, v)| (k.into_boxed_slice(), v.to_vec().into_boxed_slice()))
.map(|(k, v)| (k.into_boxed_slice(), v.into_vec().into_boxed_slice()))
),
None => Box::new(None.into_iter()),
}

View File

@@ -163,7 +163,7 @@ impl<T: SimpleMigration> Migration for T {
};
for (key, value) in iter {
if let Some((key, value)) = self.simple_migrate(key.to_vec(), value.to_vec()) {
if let Some((key, value)) = self.simple_migrate(key.into_vec(), value.into_vec()) {
batch.insert(key, value, dest)?;
}
}

View File

@@ -95,7 +95,7 @@ impl Migration for AddsColumn {
let mut batch = Batch::new(config, col);
for (key, value) in source.iter(col).into_iter().flat_map(|inner| inner) {
batch.insert(key.to_vec(), value.to_vec(), dest)?;
batch.insert(key.into_vec(), value.into_vec(), dest)?;
}

View File

@@ -95,7 +95,7 @@ impl<'db> Iterator for FatDBIterator<'db> {
.map(|res|
res.map(|(hash, value)| {
let aux_hash = hash.sha3();
(self.trie.db().get(&aux_hash).expect("Missing fatdb hash").to_vec(), value)
(self.trie.db().get(&aux_hash).expect("Missing fatdb hash").into_vec(), value)
})
)
}

View File

@@ -111,7 +111,7 @@ impl StandardMap {
let v = match self.value_mode {
ValueMode::Mirror => k.clone(),
ValueMode::Random => Self::random_value(seed),
ValueMode::Index => encode(&index).to_vec(),
ValueMode::Index => encode(&index).into_vec(),
};
d.push((k, v))
}

View File

@@ -378,7 +378,7 @@ fn iterator() {
}
let t = TrieDB::new(&memdb, &root).unwrap();
assert_eq!(d.iter().map(|i| i.clone().to_vec()).collect::<Vec<_>>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::<Vec<_>>());
assert_eq!(d.iter().map(|i| i.clone().into_vec()).collect::<Vec<_>>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::<Vec<_>>());
assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::<Vec<_>>());
}
@@ -401,7 +401,7 @@ fn iterator_seek() {
let t = TrieDB::new(&memdb, &root).unwrap();
let mut iter = t.iter().unwrap();
assert_eq!(iter.next(), Some(Ok((b"A".to_vec(), DBValue::from_slice(b"A")))));
assert_eq!(iter.next(), Some(Ok((b"A".into_vec(), DBValue::from_slice(b"A")))));
iter.seek(b"!").unwrap();
assert_eq!(d, iter.map(|x| x.unwrap().1).collect::<Vec<_>>());
let mut iter = t.iter().unwrap();

View File

@@ -966,7 +966,7 @@ mod tests {
debug!("{:?} of 10000 stress tests done", test_i);
}
let x = StandardMap {
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()),
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".into_vec()),
min_key: 5,
journal_key: 0,
value_mode: ValueMode::Index,
@@ -1026,14 +1026,14 @@ mod tests {
let mut memdb = MemoryDB::new();
let mut root = H256::new();
let mut t1 = TrieDBMut::new(&mut memdb, &mut root);
t1.insert(&[0x01, 0x23], &big_value.to_vec()).unwrap();
t1.insert(&[0x01, 0x34], &big_value.to_vec()).unwrap();
t1.insert(&[0x01, 0x23], &big_value.into_vec()).unwrap();
t1.insert(&[0x01, 0x34], &big_value.into_vec()).unwrap();
let mut memdb2 = MemoryDB::new();
let mut root2 = H256::new();
let mut t2 = TrieDBMut::new(&mut memdb2, &mut root2);
t2.insert(&[0x01], &big_value.to_vec()).unwrap();
t2.insert(&[0x01, 0x23], &big_value.to_vec()).unwrap();
t2.insert(&[0x01, 0x34], &big_value.to_vec()).unwrap();
t2.insert(&[0x01], &big_value.into_vec()).unwrap();
t2.insert(&[0x01, 0x23], &big_value.into_vec()).unwrap();
t2.insert(&[0x01, 0x34], &big_value.into_vec()).unwrap();
t2.remove(&[0x01]).unwrap();
}
@@ -1127,8 +1127,8 @@ mod tests {
t.insert(&[0x01u8, 0x23], big_value0).unwrap();
t.insert(&[0x11u8, 0x23], big_value1).unwrap();
assert_eq!(*t.root(), trie_root(vec![
(vec![0x01u8, 0x23], big_value0.to_vec()),
(vec![0x11u8, 0x23], big_value1.to_vec())
(vec![0x01u8, 0x23], big_value0.into_vec()),
(vec![0x11u8, 0x23], big_value1.into_vec())
]));
}
@@ -1142,8 +1142,8 @@ mod tests {
t.insert(&[0x01u8, 0x23], big_value).unwrap();
t.insert(&[0x11u8, 0x23], big_value).unwrap();
assert_eq!(*t.root(), trie_root(vec![
(vec![0x01u8, 0x23], big_value.to_vec()),
(vec![0x11u8, 0x23], big_value.to_vec())
(vec![0x01u8, 0x23], big_value.into_vec()),
(vec![0x11u8, 0x23], big_value.into_vec())
]));
}
@@ -1190,7 +1190,7 @@ mod tests {
let mut seed = H256::new();
for _ in 0..50 {
let x = StandardMap {
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()),
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".into_vec()),
min_key: 5,
journal_key: 0,
value_mode: ValueMode::Index,
@@ -1241,7 +1241,7 @@ mod tests {
fn insert_empty() {
let mut seed = H256::new();
let x = StandardMap {
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()),
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".into_vec()),
min_key: 5,
journal_key: 0,
value_mode: ValueMode::Index,
@@ -1269,7 +1269,7 @@ mod tests {
fn return_old_values() {
let mut seed = H256::new();
let x = StandardMap {
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()),
alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".into_vec()),
min_key: 5,
journal_key: 0,
value_mode: ValueMode::Index,

View File

@@ -48,7 +48,7 @@ pub fn ordered_trie_root<I>(input: I) -> H256
// optimize it later
.into_iter()
.enumerate()
.map(|(i, vec)| (rlp::encode(&i).to_vec(), vec))
.map(|(i, vec)| (rlp::encode(&i).into_vec(), vec))
.collect::<BTreeMap<_, _>>()
// then move them to a vector
.into_iter()
@@ -331,7 +331,7 @@ mod tests {
#[test]
fn simple_test() {
assert_eq!(trie_root(vec![
(b"A".to_vec(), b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".to_vec())
(b"A".into_vec(), b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".into_vec())
]), H256::from_str("d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab").unwrap());
}