commit
c4d0907944
@ -6,4 +6,6 @@ authors = ["arkpar <arkadiy@ethcore.io"]
|
||||
[lib]
|
||||
|
||||
[dependencies]
|
||||
log = "0.3"
|
||||
lru-cache = "0.0"
|
||||
sha3 = { path = "../util/sha3" }
|
||||
|
@ -6,7 +6,11 @@
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
use sizes::{CACHE_SIZES, DAG_SIZES};
|
||||
use sha3::{self};
|
||||
use sha3;
|
||||
use std::slice;
|
||||
use std::path::PathBuf;
|
||||
use std::io::{Read, Write, self};
|
||||
use std::fs::{self, File};
|
||||
|
||||
pub const ETHASH_EPOCH_LENGTH: u64 = 30000;
|
||||
pub const ETHASH_CACHE_ROUNDS: usize = 3;
|
||||
@ -76,6 +80,45 @@ impl Light {
|
||||
pub fn compute(&self, header_hash: &H256, nonce: u64) -> ProofOfWork {
|
||||
light_compute(self, header_hash, nonce)
|
||||
}
|
||||
|
||||
pub fn file_path(block_number: u64) -> PathBuf {
|
||||
let mut home = ::std::env::home_dir().unwrap();
|
||||
home.push(".ethash");
|
||||
home.push("light");
|
||||
let seed_hash = get_seedhash(block_number);
|
||||
home.push(to_hex(&seed_hash));
|
||||
home
|
||||
}
|
||||
|
||||
pub fn from_file(block_number: u64) -> io::Result<Light> {
|
||||
let path = Light::file_path(block_number);
|
||||
let mut file = try!(File::open(path));
|
||||
|
||||
let cache_size = get_cache_size(block_number);
|
||||
if try!(file.metadata()).len() != cache_size as u64 {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, "Cache file size mismatch"));
|
||||
}
|
||||
let num_nodes = cache_size / NODE_BYTES;
|
||||
let mut nodes: Vec<Node> = Vec::new();
|
||||
nodes.resize(num_nodes, unsafe { mem::uninitialized() });
|
||||
let buf = unsafe { slice::from_raw_parts_mut(nodes.as_mut_ptr() as *mut u8, cache_size) };
|
||||
try!(file.read_exact(buf));
|
||||
Ok(Light {
|
||||
cache: nodes,
|
||||
block_number: block_number,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_file(&self) -> io::Result<()> {
|
||||
let path = Light::file_path(self.block_number);
|
||||
try!(fs::create_dir_all(path.parent().unwrap()));
|
||||
let mut file = try!(File::create(path));
|
||||
|
||||
let cache_size = self.cache.len() * NODE_BYTES;
|
||||
let buf = unsafe { slice::from_raw_parts(self.cache.as_ptr() as *const u8, cache_size) };
|
||||
try!(file.write(buf));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -249,6 +292,19 @@ fn light_new(block_number: u64) -> Light {
|
||||
}
|
||||
}
|
||||
|
||||
static CHARS: &'static[u8] = b"0123456789abcdef";
|
||||
fn to_hex(bytes: &[u8]) -> String {
|
||||
let mut v = Vec::with_capacity(bytes.len() * 2);
|
||||
for &byte in bytes.iter() {
|
||||
v.push(CHARS[(byte >> 4) as usize]);
|
||||
v.push(CHARS[(byte & 0xf) as usize]);
|
||||
}
|
||||
|
||||
unsafe {
|
||||
String::from_utf8_unchecked(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_difficulty_test() {
|
||||
let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
|
||||
|
@ -1,25 +1,28 @@
|
||||
//! Ethash implementation
|
||||
//! See https://github.com/ethereum/wiki/wiki/Ethash
|
||||
extern crate sha3;
|
||||
extern crate lru_cache;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
mod sizes;
|
||||
mod compute;
|
||||
|
||||
use lru_cache::LruCache;
|
||||
use compute::Light;
|
||||
pub use compute::{quick_get_difficulty, H256, ProofOfWork, ETHASH_EPOCH_LENGTH};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::RwLock;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Lighy/Full cache manager
|
||||
pub struct EthashManager {
|
||||
lights: RwLock<HashMap<u64, Light>>,
|
||||
lights: Mutex<LruCache<u64, Arc<Light>>>
|
||||
}
|
||||
|
||||
impl EthashManager {
|
||||
/// Create a new new instance of ethash manager
|
||||
pub fn new() -> EthashManager {
|
||||
EthashManager {
|
||||
lights: RwLock::new(HashMap::new())
|
||||
lights: Mutex::new(LruCache::new(2))
|
||||
}
|
||||
}
|
||||
|
||||
@ -30,15 +33,27 @@ impl EthashManager {
|
||||
/// `nonce` - The nonce to pack into the mix
|
||||
pub fn compute_light(&self, block_number: u64, header_hash: &H256, nonce: u64) -> ProofOfWork {
|
||||
let epoch = block_number / ETHASH_EPOCH_LENGTH;
|
||||
while !self.lights.read().unwrap().contains_key(&epoch) {
|
||||
if let Ok(mut lights) = self.lights.try_write()
|
||||
{
|
||||
if !lights.contains_key(&epoch) {
|
||||
let light = {
|
||||
let mut lights = self.lights.lock().unwrap();
|
||||
match lights.get_mut(&epoch).map(|l| l.clone()) {
|
||||
None => {
|
||||
let light = match Light::from_file(block_number) {
|
||||
Ok(light) => Arc::new(light),
|
||||
Err(e) => {
|
||||
debug!("Light cache file not found for {}:{}", block_number, e);
|
||||
let light = Light::new(block_number);
|
||||
lights.insert(epoch, light);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.lights.read().unwrap().get(&epoch).unwrap().compute(header_hash, nonce)
|
||||
if let Err(e) = light.to_file() {
|
||||
warn!("Light cache file write error: {}", e);
|
||||
}
|
||||
Arc::new(light)
|
||||
}
|
||||
};
|
||||
lights.insert(epoch, light.clone());
|
||||
light
|
||||
}
|
||||
Some(light) => light
|
||||
}
|
||||
};
|
||||
light.compute(header_hash, nonce)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user