2016-12-11 19:30:54 +01:00
|
|
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
2016-04-10 15:12:20 +02:00
|
|
|
// This file is part of Parity.
|
|
|
|
|
|
|
|
// Parity is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
|
|
|
// Parity is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
|
|
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
//! Parity upgrade logic
|
|
|
|
|
|
|
|
use semver::Version;
|
|
|
|
use std::collections::*;
|
2016-12-12 16:51:07 +01:00
|
|
|
use std::fs::{self, File, create_dir_all};
|
2016-04-10 15:42:33 +02:00
|
|
|
use std::env;
|
2016-12-12 16:51:07 +01:00
|
|
|
use std::io;
|
2016-04-10 15:42:33 +02:00
|
|
|
use std::io::{Read, Write};
|
2016-12-12 16:51:07 +01:00
|
|
|
use std::path::{PathBuf, Path};
|
2016-12-13 23:38:29 +01:00
|
|
|
use dir::{DatabaseDirectories, default_data_path};
|
|
|
|
use helpers::replace_home;
|
2016-12-12 16:51:07 +01:00
|
|
|
use util::journaldb::Algorithm;
|
2016-04-10 15:12:20 +02:00
|
|
|
|
2016-04-12 13:54:34 +02:00
|
|
|
#[cfg_attr(feature="dev", allow(enum_variant_names))]
|
2016-04-10 16:15:40 +02:00
|
|
|
#[derive(Debug)]
|
2016-04-10 15:42:33 +02:00
|
|
|
pub enum Error {
|
2016-04-14 03:02:16 +02:00
|
|
|
CannotCreateConfigPath,
|
|
|
|
CannotWriteVersionFile,
|
2016-04-10 15:42:33 +02:00
|
|
|
CannotUpdateVersionFile,
|
|
|
|
}
|
2016-04-10 15:12:20 +02:00
|
|
|
|
|
|
|
const CURRENT_VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
|
|
|
|
|
|
|
#[derive(Hash, PartialEq, Eq)]
|
|
|
|
struct UpgradeKey {
|
|
|
|
pub old_version: Version,
|
|
|
|
pub new_version: Version,
|
|
|
|
}
|
|
|
|
|
|
|
|
type UpgradeList = HashMap<UpgradeKey, fn() -> Result<(), Error>>;
|
|
|
|
|
|
|
|
impl UpgradeKey {
|
|
|
|
// given the following config exist
|
|
|
|
// ver.lock 1.1 (`previous_version`)
|
|
|
|
//
|
|
|
|
// current_version 1.4 (`current_version`)
|
|
|
|
//
|
|
|
|
//
|
|
|
|
//upgrades (set of `UpgradeKey`)
|
|
|
|
// 1.0 -> 1.1 (u1)
|
|
|
|
// 1.1 -> 1.2 (u2)
|
|
|
|
// 1.2 -> 1.3 (u3)
|
|
|
|
// 1.3 -> 1.4 (u4)
|
|
|
|
// 1.4 -> 1.5 (u5)
|
|
|
|
//
|
|
|
|
// then the following upgrades should be applied:
|
|
|
|
// u2, u3, u4
|
|
|
|
fn is_applicable(&self, previous_version: &Version, current_version: &Version) -> bool {
|
|
|
|
self.old_version >= *previous_version && self.new_version <= *current_version
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// dummy upgrade (remove when the first one is in)
|
|
|
|
fn dummy_upgrade() -> Result<(), Error> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-04-14 03:02:16 +02:00
|
|
|
fn push_upgrades(upgrades: &mut UpgradeList)
|
2016-04-10 15:12:20 +02:00
|
|
|
{
|
|
|
|
// dummy upgrade (remove when the first one is in)
|
|
|
|
upgrades.insert(
|
2016-04-10 16:15:40 +02:00
|
|
|
UpgradeKey { old_version: Version::parse("0.9.0").unwrap(), new_version: Version::parse("1.0.0").unwrap() },
|
2016-04-10 15:12:20 +02:00
|
|
|
dummy_upgrade);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upgrade_from_version(previous_version: &Version) -> Result<usize, Error> {
|
|
|
|
let mut upgrades = HashMap::new();
|
2016-04-14 03:02:16 +02:00
|
|
|
push_upgrades(&mut upgrades);
|
2016-04-10 15:12:20 +02:00
|
|
|
|
|
|
|
let current_version = Version::parse(CURRENT_VERSION).unwrap();
|
|
|
|
|
|
|
|
let mut count = 0;
|
|
|
|
for upgrade_key in upgrades.keys() {
|
|
|
|
if upgrade_key.is_applicable(previous_version, ¤t_version) {
|
|
|
|
let upgrade_script = upgrades[upgrade_key];
|
|
|
|
try!(upgrade_script());
|
2016-05-17 10:32:05 +02:00
|
|
|
count += 1;
|
2016-04-10 15:12:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(count)
|
|
|
|
}
|
2016-04-10 15:42:33 +02:00
|
|
|
|
2016-04-15 01:49:42 +02:00
|
|
|
fn with_locked_version<F>(db_path: Option<&str>, script: F) -> Result<usize, Error>
|
2016-04-10 15:42:33 +02:00
|
|
|
where F: Fn(&Version) -> Result<usize, Error>
|
|
|
|
{
|
2016-04-15 01:49:42 +02:00
|
|
|
let mut path = db_path.map_or({
|
|
|
|
let mut path = env::home_dir().expect("Applications should have a home dir");
|
|
|
|
path.push(".parity");
|
|
|
|
path
|
2016-04-21 15:56:35 +02:00
|
|
|
}, PathBuf::from);
|
2016-04-14 03:02:16 +02:00
|
|
|
try!(create_dir_all(&path).map_err(|_| Error::CannotCreateConfigPath));
|
2016-04-10 15:42:33 +02:00
|
|
|
path.push("ver.lock");
|
|
|
|
|
2016-04-12 05:19:15 +02:00
|
|
|
let version =
|
|
|
|
File::open(&path).ok().and_then(|ref mut file|
|
|
|
|
{
|
2016-04-10 15:42:33 +02:00
|
|
|
let mut version_string = String::new();
|
2016-04-12 05:19:15 +02:00
|
|
|
file.read_to_string(&mut version_string)
|
|
|
|
.ok()
|
|
|
|
.and_then(|_| Version::parse(&version_string).ok())
|
|
|
|
})
|
|
|
|
.unwrap_or_else(|| Version::parse("0.9.0").unwrap());
|
2016-04-10 15:42:33 +02:00
|
|
|
|
2016-04-14 03:02:16 +02:00
|
|
|
let mut lock = try!(File::create(&path).map_err(|_| Error::CannotWriteVersionFile));
|
2016-04-12 13:54:34 +02:00
|
|
|
let result = script(&version);
|
2016-04-10 15:42:33 +02:00
|
|
|
|
2016-04-12 13:54:34 +02:00
|
|
|
let written_version = Version::parse(CURRENT_VERSION).unwrap();
|
|
|
|
try!(lock.write_all(written_version.to_string().as_bytes()).map_err(|_| Error::CannotUpdateVersionFile));
|
|
|
|
result
|
2016-04-10 15:42:33 +02:00
|
|
|
}
|
|
|
|
|
2016-04-15 01:49:42 +02:00
|
|
|
pub fn upgrade(db_path: Option<&str>) -> Result<usize, Error> {
|
|
|
|
with_locked_version(db_path, |ver| {
|
2016-04-10 15:42:33 +02:00
|
|
|
upgrade_from_version(ver)
|
|
|
|
})
|
|
|
|
}
|
2016-12-12 16:51:07 +01:00
|
|
|
|
|
|
|
fn file_exists(path: &Path) -> bool {
|
|
|
|
match fs::metadata(&path) {
|
|
|
|
Err(ref e) if e.kind() == io::ErrorKind::NotFound => false,
|
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn upgrade_key_location(from: &PathBuf, to: &PathBuf) {
|
2016-12-19 17:41:55 +01:00
|
|
|
match fs::create_dir_all(&to).and_then(|()| fs::read_dir(from)) {
|
2016-12-12 16:51:07 +01:00
|
|
|
Ok(entries) => {
|
|
|
|
let files: Vec<_> = entries.filter_map(|f| f.ok().and_then(|f| if f.file_type().ok().map_or(false, |f| f.is_file()) { f.file_name().to_str().map(|s| s.to_owned()) } else { None })).collect();
|
|
|
|
let mut num: usize = 0;
|
|
|
|
for name in files {
|
|
|
|
let mut from = from.clone();
|
|
|
|
from.push(&name);
|
|
|
|
let mut to = to.clone();
|
|
|
|
to.push(&name);
|
|
|
|
if !file_exists(&to) {
|
|
|
|
if let Err(e) = fs::rename(&from, &to) {
|
|
|
|
debug!("Error upgrading key {:?}: {:?}", from, e);
|
|
|
|
} else {
|
|
|
|
num += 1;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!("Skipped upgrading key {:?}", from);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if num > 0 {
|
|
|
|
info!("Moved {} keys from {} to {}", num, from.to_string_lossy(), to.to_string_lossy());
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(e) => {
|
2016-12-19 17:41:55 +01:00
|
|
|
debug!("Error moving keys from {:?} to {:?}: {:?}", from, to, e);
|
2016-12-12 16:51:07 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upgrade_dir_location(source: &PathBuf, dest: &PathBuf) {
|
|
|
|
if file_exists(&source) {
|
|
|
|
if !file_exists(&dest) {
|
2016-12-14 01:33:57 +01:00
|
|
|
let mut parent = dest.clone();
|
|
|
|
parent.pop();
|
|
|
|
if let Err(e) = fs::create_dir_all(&parent).and_then(|()| fs::rename(&source, &dest)) {
|
|
|
|
debug!("Skipped path {:?} -> {:?} :{:?}", source, dest, e);
|
2016-12-12 16:51:07 +01:00
|
|
|
} else {
|
|
|
|
info!("Moved {} to {}", source.to_string_lossy(), dest.to_string_lossy());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!("Skipped upgrading directory {:?}, Destination already exists at {:?}", source, dest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upgrade_user_defaults(dirs: &DatabaseDirectories) {
|
|
|
|
let source = dirs.legacy_user_defaults_path();
|
|
|
|
let dest = dirs.user_defaults_path();
|
|
|
|
if file_exists(&source) {
|
|
|
|
if !file_exists(&dest) {
|
|
|
|
if let Err(e) = fs::rename(&source, &dest) {
|
|
|
|
debug!("Skipped upgrading user defaults {:?}:{:?}", dest, e);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!("Skipped upgrading user defaults {:?}, File exists at {:?}", source, dest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-15 21:56:45 +01:00
|
|
|
pub fn upgrade_data_paths(base_path: &str, dirs: &DatabaseDirectories, pruning: Algorithm) {
|
2016-12-13 23:38:29 +01:00
|
|
|
let legacy_root_path = replace_home("", "$HOME/.parity");
|
|
|
|
let default_path = default_data_path();
|
2016-12-15 21:56:45 +01:00
|
|
|
if legacy_root_path != base_path && base_path == default_path {
|
|
|
|
upgrade_dir_location(&PathBuf::from(legacy_root_path), &PathBuf::from(&base_path));
|
2016-12-13 23:38:29 +01:00
|
|
|
}
|
2016-12-12 16:51:07 +01:00
|
|
|
upgrade_dir_location(&dirs.legacy_version_path(pruning), &dirs.db_path(pruning));
|
|
|
|
upgrade_dir_location(&dirs.legacy_snapshot_path(), &dirs.snapshot_path());
|
|
|
|
upgrade_dir_location(&dirs.legacy_network_path(), &dirs.network_path());
|
|
|
|
upgrade_user_defaults(&dirs);
|
|
|
|
}
|