Merge pull request #980 from ethcore/binary-serializer
Auto (with codegen) binary serializer
This commit is contained in:
commit
2d6f9af612
8
Cargo.lock
generated
8
Cargo.lock
generated
@ -237,11 +237,15 @@ dependencies = [
|
||||
name = "ethcore"
|
||||
version = "1.1.0"
|
||||
dependencies = [
|
||||
"bincode 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethash 1.1.0",
|
||||
"ethcore-devtools 1.1.0",
|
||||
"ethcore-ipc 1.1.0",
|
||||
"ethcore-ipc-codegen 1.1.0",
|
||||
"ethcore-ipc-nano 1.1.0",
|
||||
"ethcore-util 1.1.0",
|
||||
"ethjson 0.1.0",
|
||||
"heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -250,6 +254,10 @@ dependencies = [
|
||||
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rust-crypto 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_codegen 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syntex 0.31.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -298,14 +298,14 @@ fn implement_dispatch_arms(
|
||||
.map(|dispatch| { index = index + 1; implement_dispatch_arm(cx, builder, index as u32, dispatch, buffer, replacements) }).collect()
|
||||
}
|
||||
|
||||
fn strip_ptr(ty: &P<ast::Ty>) -> P<ast::Ty> {
|
||||
pub fn strip_ptr(ty: &P<ast::Ty>) -> P<ast::Ty> {
|
||||
if let ast::TyKind::Rptr(_, ref ptr_mut) = ty.node {
|
||||
ptr_mut.ty.clone()
|
||||
}
|
||||
else { ty.clone() }
|
||||
}
|
||||
|
||||
fn has_ptr(ty: &P<ast::Ty>) -> bool {
|
||||
pub fn has_ptr(ty: &P<ast::Ty>) -> bool {
|
||||
if let ast::TyKind::Rptr(_, ref _ptr_mut) = ty.node {
|
||||
true
|
||||
}
|
||||
|
@ -54,6 +54,7 @@ pub fn register(reg: &mut syntex::Registry) {
|
||||
reg.add_attr("feature(custom_attribute)");
|
||||
|
||||
reg.add_decorator("derive_Ipc", codegen::expand_ipc_implementation);
|
||||
reg.add_decorator("derive_Binary", serialization::expand_serialization_implementation);
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "with-syntex"))]
|
||||
@ -62,4 +63,8 @@ pub fn register(reg: &mut rustc_plugin::Registry) {
|
||||
syntax::parse::token::intern("derive_Ipc"),
|
||||
syntax::ext::base::MultiDecorator(
|
||||
Box::new(codegen::expand_ipc_implementation)));
|
||||
reg.register_syntax_extension(
|
||||
syntax::parse::token::intern("derive_Binary"),
|
||||
syntax::ext::base::MultiDecorator(
|
||||
Box::new(serialization::expand_serialization_implementation)));
|
||||
}
|
||||
|
@ -15,4 +15,5 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
mod codegen;
|
||||
mod serialization;
|
||||
pub mod typegen;
|
||||
|
585
ipc/codegen/src/serialization.rs
Normal file
585
ipc/codegen/src/serialization.rs
Normal file
@ -0,0 +1,585 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
use aster;
|
||||
|
||||
use syntax::ast::{
|
||||
MetaItem,
|
||||
Item,
|
||||
Ident,
|
||||
};
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::ptr::P;
|
||||
|
||||
pub struct Error;
|
||||
|
||||
use super::codegen;
|
||||
|
||||
pub fn expand_serialization_implementation(
|
||||
cx: &mut ExtCtxt,
|
||||
span: Span,
|
||||
meta_item: &MetaItem,
|
||||
annotatable: &Annotatable,
|
||||
push: &mut FnMut(Annotatable)
|
||||
) {
|
||||
let item = match *annotatable {
|
||||
Annotatable::Item(ref item) => item,
|
||||
_ => {
|
||||
cx.span_err(meta_item.span, "`#[derive(Binary)]` may only be applied to structs and enums");
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
let builder = aster::AstBuilder::new().span(span);
|
||||
|
||||
let impl_item = match serialize_item(cx, &builder, &item) {
|
||||
Ok(item) => item,
|
||||
Err(Error) => {
|
||||
// An error occured, but it should have been reported already.
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
push(Annotatable::Item(impl_item))
|
||||
}
|
||||
|
||||
fn serialize_item(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
item: &Item,
|
||||
) -> Result<P<ast::Item>, Error> {
|
||||
let generics = match item.node {
|
||||
ast::ItemKind::Struct(_, ref generics) => generics,
|
||||
ast::ItemKind::Enum(_, ref generics) => generics,
|
||||
_ => {
|
||||
cx.span_err(
|
||||
item.span,
|
||||
"`#[derive(Binary)]` may only be applied to structs and enums");
|
||||
return Err(Error);
|
||||
},
|
||||
};
|
||||
|
||||
let ty = builder.ty().path()
|
||||
.segment(item.ident).with_generics(generics.clone()).build()
|
||||
.build();
|
||||
|
||||
let where_clause = &generics.where_clause;
|
||||
|
||||
let binary_expressions = try!(binary_expr(cx,
|
||||
&builder,
|
||||
&item,
|
||||
&generics,
|
||||
ty.clone()));
|
||||
|
||||
let (size_expr, read_expr, write_expr) =
|
||||
(binary_expressions.size, binary_expressions.read, binary_expressions.write);
|
||||
|
||||
Ok(quote_item!(cx,
|
||||
impl $generics ::ipc::BinaryConvertable for $ty $where_clause {
|
||||
fn size(&self) -> usize {
|
||||
$size_expr
|
||||
}
|
||||
|
||||
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError> {
|
||||
$write_expr
|
||||
}
|
||||
|
||||
fn from_bytes(buffer: &[u8], length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError> {
|
||||
$read_expr
|
||||
}
|
||||
|
||||
fn len_params() -> usize {
|
||||
1
|
||||
}
|
||||
}
|
||||
).unwrap())
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
fn binary_expr(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
item: &Item,
|
||||
impl_generics: &ast::Generics,
|
||||
ty: P<ast::Ty>,
|
||||
) -> Result<BinaryExpressions, Error> {
|
||||
match item.node {
|
||||
ast::ItemKind::Struct(ref variant_data, _) => {
|
||||
binary_expr_item_struct(
|
||||
cx,
|
||||
builder,
|
||||
impl_generics,
|
||||
ty,
|
||||
item.span,
|
||||
variant_data,
|
||||
)
|
||||
},
|
||||
ast::ItemKind::Enum(ref enum_def, _) => {
|
||||
binary_expr_enum(
|
||||
cx,
|
||||
builder,
|
||||
item.ident,
|
||||
impl_generics,
|
||||
ty,
|
||||
item.span,
|
||||
enum_def,
|
||||
)
|
||||
},
|
||||
_ => {
|
||||
cx.span_bug(item.span,
|
||||
"expected ItemStruct or ItemEnum in #[derive(Binary)]");
|
||||
Err(Error)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
struct BinaryExpressions {
|
||||
pub size: P<ast::Expr>,
|
||||
pub write: P<ast::Expr>,
|
||||
pub read: P<ast::Expr>,
|
||||
}
|
||||
|
||||
fn binary_expr_struct(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
ty: P<ast::Ty>,
|
||||
fields: &[ast::StructField],
|
||||
value_ident: Option<ast::Ident>,
|
||||
instance_ident: Option<ast::Ident>,
|
||||
) -> Result<BinaryExpressions, Error> {
|
||||
|
||||
let size_exprs: Vec<P<ast::Expr>> = fields.iter().enumerate().map(|(index, field)| {
|
||||
let field_type_ident = builder.id(
|
||||
&::syntax::print::pprust::ty_to_string(
|
||||
&codegen::strip_ptr(&field.ty)));
|
||||
let index_ident = builder.id(format!("__field{}", index));
|
||||
value_ident.and_then(|x| {
|
||||
let field_id = builder.id(field.ident.unwrap());
|
||||
Some(quote_expr!(cx,
|
||||
match $field_type_ident::len_params() {
|
||||
0 => mem::size_of::<$field_type_ident>(),
|
||||
_ => $x. $field_id .size(),
|
||||
}))
|
||||
})
|
||||
.unwrap_or_else(|| quote_expr!(cx, match $field_type_ident::len_params() {
|
||||
0 => mem::size_of::<$field_type_ident>(),
|
||||
_ => $index_ident .size(),
|
||||
}))
|
||||
}).collect();
|
||||
|
||||
let first_size_expr = size_exprs[0].clone();
|
||||
let mut total_size_expr = quote_expr!(cx, 0usize + $first_size_expr);
|
||||
for index in 1..size_exprs.len() {
|
||||
let next_expr = size_exprs[index].clone();
|
||||
total_size_expr = quote_expr!(cx, $total_size_expr + $next_expr);
|
||||
}
|
||||
|
||||
let mut write_stmts = Vec::<ast::Stmt>::new();
|
||||
write_stmts.push(quote_stmt!(cx, let mut offset = 0usize;).unwrap());
|
||||
|
||||
let mut map_stmts = Vec::<ast::Stmt>::new();
|
||||
let field_amount = builder.id(&format!("{}",fields.len()));
|
||||
map_stmts.push(quote_stmt!(cx, let mut map = vec![0usize; $field_amount];).unwrap());
|
||||
map_stmts.push(quote_stmt!(cx, let mut total = 0usize;).unwrap());
|
||||
for (index, field) in fields.iter().enumerate() {
|
||||
let field_type_ident = builder.id(
|
||||
&::syntax::print::pprust::ty_to_string(
|
||||
&codegen::strip_ptr(&field.ty)));
|
||||
|
||||
let member_expr = match value_ident {
|
||||
Some(x) => {
|
||||
let field_id = builder.id(field.ident.unwrap());
|
||||
quote_expr!(cx, $x . $field_id)
|
||||
},
|
||||
None => {
|
||||
let index_ident = builder.id(format!("__field{}", index));
|
||||
quote_expr!(cx, $index_ident)
|
||||
},
|
||||
};
|
||||
|
||||
write_stmts.push(quote_stmt!(cx, let next_line = offset + match $field_type_ident::len_params() {
|
||||
0 => mem::size_of::<$field_type_ident>(),
|
||||
_ => { let size = $member_expr .size(); length_stack.push_back(size); size },
|
||||
}).unwrap());
|
||||
|
||||
write_stmts.push(quote_stmt!(cx,
|
||||
if let Err(e) = $member_expr .to_bytes(&mut buffer[offset..next_line], length_stack) { return Err(e) };).unwrap());
|
||||
|
||||
write_stmts.push(quote_stmt!(cx, offset = next_line; ).unwrap());
|
||||
|
||||
let field_index = builder.id(&format!("{}", index));
|
||||
map_stmts.push(quote_stmt!(cx, map[$field_index] = total;).unwrap());
|
||||
map_stmts.push(quote_stmt!(cx, let size = match $field_type_ident::len_params() {
|
||||
0 => mem::size_of::<$field_type_ident>(),
|
||||
_ => length_stack.pop_front().unwrap(),
|
||||
}).unwrap());
|
||||
map_stmts.push(quote_stmt!(cx, total = total + size;).unwrap());
|
||||
};
|
||||
|
||||
let read_expr = match fields.iter().any(|f| codegen::has_ptr(&f.ty)) {
|
||||
true => {
|
||||
// cannot create structs with pointers
|
||||
quote_expr!(cx, Err(::ipc::binary::BinaryConvertError))
|
||||
},
|
||||
false => {
|
||||
if value_ident.is_some() {
|
||||
let instance_create = named_fields_sequence(cx, &ty, fields);
|
||||
quote_expr!(cx, { $map_stmts; $instance_create; Ok(result) })
|
||||
}
|
||||
else {
|
||||
let map_variant = P(fields_sequence(cx, &ty, fields, &instance_ident.unwrap_or(builder.id("Self"))));
|
||||
quote_expr!(cx, { $map_stmts; Ok($map_variant) })
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Ok(BinaryExpressions {
|
||||
size: total_size_expr,
|
||||
write: quote_expr!(cx, { $write_stmts; Ok(()) } ),
|
||||
read: read_expr,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
fn binary_expr_item_struct(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
_impl_generics: &ast::Generics,
|
||||
ty: P<ast::Ty>,
|
||||
span: Span,
|
||||
variant_data: &ast::VariantData,
|
||||
) -> Result<BinaryExpressions, Error> {
|
||||
match *variant_data {
|
||||
ast::VariantData::Tuple(ref fields, _) => {
|
||||
binary_expr_struct(
|
||||
cx,
|
||||
&builder,
|
||||
ty,
|
||||
fields,
|
||||
Some(builder.id("self")),
|
||||
None,
|
||||
)
|
||||
},
|
||||
ast::VariantData::Struct(ref fields, _) => {
|
||||
binary_expr_struct(
|
||||
cx,
|
||||
&builder,
|
||||
ty,
|
||||
fields,
|
||||
Some(builder.id("self")),
|
||||
None,
|
||||
)
|
||||
},
|
||||
_ => {
|
||||
cx.span_bug(span,
|
||||
&format!("#[derive(Binary)] Unsupported struct content, expected tuple/struct, found: {:?}",
|
||||
variant_data));
|
||||
Err(Error)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn binary_expr_enum(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
type_ident: Ident,
|
||||
impl_generics: &ast::Generics,
|
||||
ty: P<ast::Ty>,
|
||||
span: Span,
|
||||
enum_def: &ast::EnumDef,
|
||||
) -> Result<BinaryExpressions, Error> {
|
||||
let arms: Vec<_> = try!(
|
||||
enum_def.variants.iter()
|
||||
.enumerate()
|
||||
.map(|(variant_index, variant)| {
|
||||
binary_expr_variant(
|
||||
cx,
|
||||
builder,
|
||||
type_ident,
|
||||
impl_generics,
|
||||
ty.clone(),
|
||||
span,
|
||||
variant,
|
||||
variant_index,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
);
|
||||
|
||||
let (size_arms, write_arms, mut read_arms) = (
|
||||
arms.iter().map(|x| x.size.clone()).collect::<Vec<ast::Arm>>(),
|
||||
arms.iter().map(|x| x.write.clone()).collect::<Vec<ast::Arm>>(),
|
||||
arms.iter().map(|x| x.read.clone()).collect::<Vec<ast::Arm>>());
|
||||
|
||||
read_arms.push(quote_arm!(cx, _ => { Err(BinaryConvertError) } ));
|
||||
|
||||
Ok(BinaryExpressions {
|
||||
size: quote_expr!(cx, 1usize + match *self { $size_arms }),
|
||||
write: quote_expr!(cx, match *self { $write_arms }; ),
|
||||
read: quote_expr!(cx, match buffer[0] { $read_arms }),
|
||||
})
|
||||
}
|
||||
|
||||
struct BinaryArm {
|
||||
size: ast::Arm,
|
||||
write: ast::Arm,
|
||||
read: ast::Arm,
|
||||
}
|
||||
|
||||
fn fields_sequence(
|
||||
ext_cx: &ExtCtxt,
|
||||
_ty: &P<ast::Ty>,
|
||||
fields: &[ast::StructField],
|
||||
variant_ident: &ast::Ident,
|
||||
) -> ast::Expr {
|
||||
use syntax::parse::token;
|
||||
use syntax::ast::TokenTree::Token;
|
||||
|
||||
::quasi::parse_expr_panic(&mut ::syntax::parse::new_parser_from_tts(
|
||||
ext_cx.parse_sess(),
|
||||
ext_cx.cfg(),
|
||||
{
|
||||
let _sp = ext_cx.call_site();
|
||||
let mut tt = ::std::vec::Vec::new();
|
||||
tt.push(Token(_sp, token::Ident(variant_ident.clone(), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
||||
|
||||
for (idx, field) in fields.iter().enumerate() {
|
||||
if field.ident.is_some() {
|
||||
tt.push(Token(_sp, token::Ident(field.ident.clone().unwrap(), token::Plain)));
|
||||
tt.push(Token(_sp, token::Colon));
|
||||
}
|
||||
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
||||
tt.push(Token(
|
||||
_sp,
|
||||
token::Ident(
|
||||
ext_cx.ident_of(&::syntax::print::pprust::ty_to_string(&field.ty)),
|
||||
token::Plain)));
|
||||
tt.push(Token(_sp, token::ModSep));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
||||
|
||||
tt.push(Token(_sp, token::BinOp(token::And)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"), token::Plain)));
|
||||
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)), token::Plain)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::DotDot));
|
||||
|
||||
if idx+1 != fields.len() {
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)), token::Plain)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
}
|
||||
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
|
||||
tt.push(Token(_sp, token::Comma));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"), token::Plain)));
|
||||
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
||||
tt.push(Token(_sp, token::Comma));
|
||||
}
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
||||
|
||||
tt
|
||||
})
|
||||
).unwrap()
|
||||
}
|
||||
|
||||
fn named_fields_sequence(
|
||||
ext_cx: &ExtCtxt,
|
||||
ty: &P<ast::Ty>,
|
||||
fields: &[ast::StructField],
|
||||
) -> ast::Stmt {
|
||||
use syntax::parse::token;
|
||||
use syntax::ast::TokenTree::Token;
|
||||
|
||||
::quasi::parse_stmt_panic(&mut ::syntax::parse::new_parser_from_tts(
|
||||
ext_cx.parse_sess(),
|
||||
ext_cx.cfg(),
|
||||
{
|
||||
let _sp = ext_cx.call_site();
|
||||
let mut tt = ::std::vec::Vec::new();
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("let"), token::Plain)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("result"), token::Plain)));
|
||||
tt.push(Token(_sp, token::Eq));
|
||||
|
||||
tt.push(Token(
|
||||
_sp,
|
||||
token::Ident(
|
||||
ext_cx.ident_of(&::syntax::print::pprust::ty_to_string(ty)),
|
||||
token::Plain)));
|
||||
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Brace)));
|
||||
|
||||
for (idx, field) in fields.iter().enumerate() {
|
||||
tt.push(Token(_sp, token::Ident(field.ident.clone().unwrap(), token::Plain)));
|
||||
tt.push(Token(_sp, token::Colon));
|
||||
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
||||
tt.push(Token(
|
||||
_sp,
|
||||
token::Ident(
|
||||
ext_cx.ident_of(&::syntax::print::pprust::ty_to_string(&field.ty)),
|
||||
token::Plain)));
|
||||
tt.push(Token(_sp, token::ModSep));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
||||
|
||||
tt.push(Token(_sp, token::BinOp(token::And)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"), token::Plain)));
|
||||
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)), token::Plain)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::DotDot));
|
||||
if idx + 1 != fields.len() {
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"), token::Plain)));
|
||||
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)), token::Plain)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
}
|
||||
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
||||
|
||||
tt.push(Token(_sp, token::Comma));
|
||||
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"), token::Plain)));
|
||||
|
||||
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
||||
tt.push(Token(_sp, token::Comma));
|
||||
}
|
||||
|
||||
tt.push(Token(_sp, token::CloseDelim(token::Brace)));
|
||||
tt
|
||||
})
|
||||
).unwrap()
|
||||
}
|
||||
|
||||
fn binary_expr_variant(
|
||||
cx: &ExtCtxt,
|
||||
builder: &aster::AstBuilder,
|
||||
type_ident: Ident,
|
||||
_generics: &ast::Generics,
|
||||
ty: P<ast::Ty>,
|
||||
_span: Span,
|
||||
variant: &ast::Variant,
|
||||
variant_index: usize,
|
||||
) -> Result<BinaryArm, Error> {
|
||||
let variant_ident = variant.node.name;
|
||||
let variant_index_ident = builder.id(format!("{}", variant_index));
|
||||
|
||||
match variant.node.data {
|
||||
ast::VariantData::Unit(_) => {
|
||||
let pat = builder.pat().path()
|
||||
.id(type_ident).id(variant_ident)
|
||||
.build();
|
||||
|
||||
let variant_val = builder.id(format!("{}::{}", type_ident, variant_ident));
|
||||
|
||||
Ok(BinaryArm {
|
||||
size: quote_arm!(cx, $pat => { 0usize } ),
|
||||
write: quote_arm!(cx, $pat => { buffer[0] = $variant_index_ident; Ok(()) } ),
|
||||
read: quote_arm!(cx, $variant_index_ident => { Ok($variant_val) } ),
|
||||
})
|
||||
},
|
||||
ast::VariantData::Tuple(ref fields, _) => {
|
||||
let field_names: Vec<ast::Ident> = (0 .. fields.len())
|
||||
.map(|i| builder.id(format!("__field{}", i)))
|
||||
.collect();
|
||||
|
||||
let pat = builder.pat().enum_()
|
||||
.id(type_ident).id(variant_ident).build()
|
||||
.with_pats(
|
||||
field_names.iter()
|
||||
.map(|field| builder.pat().ref_id(field))
|
||||
)
|
||||
.build();
|
||||
|
||||
let binary_expr = try!(binary_expr_struct(
|
||||
cx,
|
||||
&builder,
|
||||
ty,
|
||||
fields,
|
||||
None,
|
||||
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
|
||||
));
|
||||
|
||||
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
|
||||
Ok(BinaryArm {
|
||||
size: quote_arm!(cx, $pat => { $size_expr } ),
|
||||
write: quote_arm!(cx,
|
||||
$pat => {
|
||||
buffer[0] = $variant_index_ident;
|
||||
let buffer = &mut buffer[1..];
|
||||
$write_expr
|
||||
}),
|
||||
read: quote_arm!(cx, $variant_index_ident => { $read_expr } ),
|
||||
})
|
||||
},
|
||||
ast::VariantData::Struct(ref fields, _) => {
|
||||
let field_names: Vec<_> = (0 .. fields.len())
|
||||
.map(|i| builder.id(format!("__field{}", i)))
|
||||
.collect();
|
||||
|
||||
let pat = builder.pat().struct_()
|
||||
.id(type_ident).id(variant_ident).build()
|
||||
.with_pats(
|
||||
field_names.iter()
|
||||
.zip(fields.iter())
|
||||
.map(|(id, field)|(field.ident.unwrap(), builder.pat().ref_id(id))))
|
||||
.build();
|
||||
|
||||
|
||||
let binary_expr = try!(binary_expr_struct(
|
||||
cx,
|
||||
&builder,
|
||||
ty,
|
||||
fields,
|
||||
None,
|
||||
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
|
||||
));
|
||||
|
||||
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
|
||||
|
||||
Ok(BinaryArm {
|
||||
size: quote_arm!(cx, $pat => { $size_expr } ),
|
||||
write: quote_arm!(cx,
|
||||
$pat => {
|
||||
buffer[0] = $variant_index_ident;
|
||||
let buffer = &mut buffer[1..];
|
||||
$write_expr
|
||||
}),
|
||||
read: quote_arm!(cx, $pat => { $read_expr } ),
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
@ -50,7 +50,8 @@ fn is_new_entry(path: &Path) -> Option<String> {
|
||||
ident == "H256" ||
|
||||
ident == "U256" ||
|
||||
ident == "H2048" ||
|
||||
ident == "Address"
|
||||
ident == "Address" ||
|
||||
ident == "Bytes"
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -10,3 +10,4 @@ license = "GPL-3.0"
|
||||
ethcore-devtools = { path = "../../devtools" }
|
||||
semver = "0.2.0"
|
||||
nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git" }
|
||||
ethcore-util = { path = "../../util" }
|
||||
|
357
ipc/rpc/src/binary.rs
Normal file
357
ipc/rpc/src/binary.rs
Normal file
@ -0,0 +1,357 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Binary representation of types
|
||||
|
||||
use util::bytes::Populatable;
|
||||
use std::mem;
|
||||
use std::collections::VecDeque;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BinaryConvertError;
|
||||
|
||||
pub trait BinaryConvertable : Sized {
|
||||
fn size(&self) -> usize {
|
||||
mem::size_of::<Self>()
|
||||
}
|
||||
|
||||
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError>;
|
||||
|
||||
fn from_bytes(buffer: &[u8], length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError>;
|
||||
|
||||
fn from_empty_bytes() -> Result<Self, BinaryConvertError> {
|
||||
Err(BinaryConvertError)
|
||||
}
|
||||
|
||||
fn len_params() -> usize {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> BinaryConvertable for Option<T> where T: BinaryConvertable {
|
||||
fn size(&self) -> usize {
|
||||
match * self { None => 0, Some(ref val) => val.size() }
|
||||
}
|
||||
|
||||
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError> {
|
||||
match *self { None => Err(BinaryConvertError), Some(ref val) => val.to_bytes(buffer, length_stack) }
|
||||
}
|
||||
|
||||
fn from_bytes(buffer: &[u8], length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError> {
|
||||
Ok(Some(try!(T::from_bytes(buffer, length_stack))))
|
||||
}
|
||||
|
||||
fn from_empty_bytes() -> Result<Self, BinaryConvertError> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn len_params() -> usize {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> BinaryConvertable for Vec<T> where T: BinaryConvertable {
|
||||
fn size(&self) -> usize {
|
||||
match T::len_params() {
|
||||
0 => mem::size_of::<T>() * self.len(),
|
||||
_ => self.iter().fold(0usize, |acc, t| acc + t.size()),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError> {
|
||||
let mut offset = 0usize;
|
||||
for item in self.iter() {
|
||||
let next_size = match T::len_params() {
|
||||
0 => mem::size_of::<T>(),
|
||||
_ => { let size = item.size(); length_stack.push_back(size); size },
|
||||
};
|
||||
if next_size > 0 {
|
||||
let item_end = offset + next_size;
|
||||
try!(item.to_bytes(&mut buffer[offset..item_end], length_stack));
|
||||
offset = item_end;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn from_bytes(buffer: &[u8], length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError> {
|
||||
let mut index = 0;
|
||||
let mut result = Self::with_capacity(
|
||||
match T::len_params() {
|
||||
0 => buffer.len() / mem::size_of::<T>(),
|
||||
_ => 128,
|
||||
});
|
||||
|
||||
loop {
|
||||
let next_size = match T::len_params() {
|
||||
0 => mem::size_of::<T>(),
|
||||
_ => try!(length_stack.pop_front().ok_or(BinaryConvertError)),
|
||||
};
|
||||
let item = if next_size == 0 {
|
||||
try!(T::from_empty_bytes())
|
||||
}
|
||||
else {
|
||||
try!(T::from_bytes(&buffer[index..index+next_size], length_stack))
|
||||
};
|
||||
result.push(item);
|
||||
|
||||
index = index + next_size;
|
||||
if index == buffer.len() { break; }
|
||||
if index > buffer.len() {
|
||||
return Err(BinaryConvertError)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn from_empty_bytes() -> Result<Self, BinaryConvertError> {
|
||||
Ok(Self::new())
|
||||
}
|
||||
|
||||
fn len_params() -> usize {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_from<T, R>(r: &mut R) -> Result<T, BinaryConvertError>
|
||||
where R: ::std::io::Read,
|
||||
T: BinaryConvertable
|
||||
{
|
||||
let mut fake_stack = VecDeque::new();
|
||||
let mut length_stack = VecDeque::<usize>::new();
|
||||
let mut size_buffer = [0u8; 8];
|
||||
try!(r.read(&mut size_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
let stack_len = try!(u64::from_bytes(&mut size_buffer[..], &mut fake_stack)) as usize;
|
||||
if stack_len > 0 {
|
||||
let mut header_buffer = Vec::with_capacity(stack_len * 8);
|
||||
unsafe { header_buffer.set_len(stack_len * 8); };
|
||||
|
||||
try!(r.read(&mut header_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
for idx in 0..stack_len {
|
||||
let stack_item = try!(u64::from_bytes(&header_buffer[idx*8..(idx+1)*8], &mut fake_stack));
|
||||
length_stack.push_back(stack_item as usize);
|
||||
}
|
||||
}
|
||||
|
||||
try!(r.read(&mut size_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
let size = try!(u64::from_bytes(&size_buffer[..], &mut fake_stack)) as usize;
|
||||
|
||||
let mut data = Vec::with_capacity(size);
|
||||
unsafe { data.set_len(size) };
|
||||
try!(r.read(&mut data).map_err(|_| BinaryConvertError));
|
||||
|
||||
T::from_bytes(&data[..], &mut length_stack)
|
||||
}
|
||||
|
||||
pub fn serialize_into<T, W>(t: &T, w: &mut W) -> Result<(), BinaryConvertError>
|
||||
where W: ::std::io::Write,
|
||||
T: BinaryConvertable
|
||||
{
|
||||
let mut length_stack = VecDeque::<usize>::new();
|
||||
let mut fake_stack = VecDeque::new();
|
||||
let mut size_buffer = [0u8; 8];
|
||||
|
||||
let size = t.size();
|
||||
let mut buffer = Vec::with_capacity(size);
|
||||
unsafe { buffer.set_len(size); }
|
||||
try!(t.to_bytes(&mut buffer[..], &mut length_stack));
|
||||
|
||||
let stack_len = length_stack.len();
|
||||
try!((stack_len as u64).to_bytes(&mut size_buffer[..], &mut fake_stack));
|
||||
try!(w.write(&size_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
if stack_len > 0 {
|
||||
let mut header_buffer = Vec::with_capacity(stack_len * 8);
|
||||
unsafe { header_buffer.set_len(stack_len * 8); };
|
||||
try!((stack_len as u64).to_bytes(&mut header_buffer[0..8], &mut fake_stack));
|
||||
let mut idx = 0;
|
||||
loop {
|
||||
match length_stack.pop_front() {
|
||||
Some(val) => try!((val as u64).to_bytes(&mut header_buffer[idx * 8..(idx+1) * 8], &mut fake_stack)),
|
||||
None => { break; }
|
||||
}
|
||||
idx = idx + 1;
|
||||
}
|
||||
try!(w.write(&header_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
}
|
||||
|
||||
try!((size as u64).to_bytes(&mut size_buffer[..], &mut fake_stack));
|
||||
try!(w.write(&size_buffer[..]).map_err(|_| BinaryConvertError));
|
||||
|
||||
try!(w.write(&buffer[..]).map_err(|_| BinaryConvertError));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
macro_rules! binary_fixed_size {
|
||||
($target_ty: ident) => {
|
||||
impl BinaryConvertable for $target_ty {
|
||||
fn from_bytes(bytes: &[u8], _length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError> {
|
||||
match bytes.len().cmp(&::std::mem::size_of::<$target_ty>()) {
|
||||
::std::cmp::Ordering::Less => return Err(BinaryConvertError),
|
||||
::std::cmp::Ordering::Greater => return Err(BinaryConvertError),
|
||||
::std::cmp::Ordering::Equal => ()
|
||||
};
|
||||
let mut res: Self = unsafe { ::std::mem::uninitialized() };
|
||||
res.copy_raw(bytes);
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn to_bytes(&self, buffer: &mut [u8], _length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError> {
|
||||
let sz = ::std::mem::size_of::<$target_ty>();
|
||||
let ip: *const $target_ty = self;
|
||||
let ptr: *const u8 = ip as *const _;
|
||||
unsafe {
|
||||
::std::ptr::copy(ptr, buffer.as_mut_ptr(), sz);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
binary_fixed_size!(u64);
|
||||
binary_fixed_size!(u32);
|
||||
binary_fixed_size!(bool);
|
||||
|
||||
#[test]
|
||||
fn vec_serialize() {
|
||||
let mut v = Vec::new();
|
||||
v.push(5u64);
|
||||
v.push(10u64);
|
||||
let mut length_stack = VecDeque::new();
|
||||
let mut data = Vec::with_capacity(v.size());
|
||||
unsafe { data.set_len(v.size()); }
|
||||
let result = v.to_bytes(&mut data[..], &mut length_stack);
|
||||
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(5, data[0]);
|
||||
assert_eq!(0, data[1]);
|
||||
assert_eq!(10, data[8]);
|
||||
assert_eq!(0, data[12]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn calculates_size() {
|
||||
let mut v = Vec::new();
|
||||
v.push(5u64);
|
||||
v.push(10u64);
|
||||
|
||||
assert_eq!(16, v.size());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn vec_deserialize() {
|
||||
let data = [
|
||||
10u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
5u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
];
|
||||
|
||||
let mut length_stack = VecDeque::new();
|
||||
let vec = Vec::<u64>::from_bytes(&data[..], &mut length_stack).unwrap();
|
||||
|
||||
assert_eq!(vec![10u64, 5u64], vec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn vec_deserialize_chained() {
|
||||
let mut v = Vec::new();
|
||||
v.push(Some(5u64));
|
||||
v.push(Some(10u64));
|
||||
v.push(None);
|
||||
v.push(Some(12u64));
|
||||
|
||||
let mut length_stack = VecDeque::new();
|
||||
let mut data = Vec::with_capacity(v.size());
|
||||
unsafe { data.set_len(v.size()); }
|
||||
let result = v.to_bytes(&mut data[..], &mut length_stack);
|
||||
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(4, length_stack.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn vec_serialize_deserialize() {
|
||||
let mut v = Vec::new();
|
||||
v.push(Some(5u64));
|
||||
v.push(None);
|
||||
v.push(Some(10u64));
|
||||
v.push(None);
|
||||
v.push(Some(12u64));
|
||||
|
||||
|
||||
let mut data = Vec::with_capacity(v.size());
|
||||
unsafe { data.set_len(v.size()); }
|
||||
let mut length_stack = VecDeque::new();
|
||||
|
||||
v.to_bytes(&mut data[..], &mut length_stack).unwrap();
|
||||
let de_v = Vec::<Option<u64>>::from_bytes(&data[..], &mut length_stack).unwrap();
|
||||
|
||||
assert_eq!(v, de_v);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_into_ok() {
|
||||
use std::io::Cursor;
|
||||
let mut buff = Cursor::new(vec![0; 128]);
|
||||
|
||||
let mut v = Vec::new();
|
||||
v.push(Some(5u64));
|
||||
v.push(None);
|
||||
v.push(Some(10u64));
|
||||
v.push(None);
|
||||
v.push(Some(12u64));
|
||||
|
||||
serialize_into(&v, &mut buff).unwrap();
|
||||
assert_eq!(5, buff.get_ref()[0]);
|
||||
assert_eq!(8, buff.get_ref()[8]);
|
||||
assert_eq!(0, buff.get_ref()[16]);
|
||||
assert_eq!(8, buff.get_ref()[24]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_from_ok() {
|
||||
use std::io::Cursor;
|
||||
let mut buff = Cursor::new(vec![
|
||||
0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
16u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
10u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
5u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
|
||||
]);
|
||||
|
||||
let vec = deserialize_from::<Vec<u64>, _>(&mut buff).unwrap();
|
||||
|
||||
assert_eq!(vec![10u64, 5u64], vec);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_into_deserialize_from() {
|
||||
use std::io::{Cursor, SeekFrom, Seek};
|
||||
|
||||
let mut buff = Cursor::new(vec![0u8; 1024]);
|
||||
let mut v = Vec::new();
|
||||
v.push(Some(5u64));
|
||||
v.push(None);
|
||||
v.push(Some(10u64));
|
||||
v.push(None);
|
||||
v.push(Some(12u64));
|
||||
|
||||
serialize_into(&v, &mut buff).unwrap();
|
||||
buff.seek(SeekFrom::Start(0)).unwrap();
|
||||
let de_v = deserialize_from::<Vec<Option<u64>>, _>(&mut buff).unwrap();
|
||||
assert_eq!(v, de_v);
|
||||
}
|
@ -19,6 +19,9 @@
|
||||
extern crate ethcore_devtools as devtools;
|
||||
extern crate semver;
|
||||
extern crate nanomsg;
|
||||
extern crate ethcore_util as util;
|
||||
|
||||
pub mod interface;
|
||||
pub mod binary;
|
||||
pub use interface::{IpcInterface, IpcSocket, invoke, IpcConfig, Handshake, Error, WithSocket};
|
||||
pub use binary::{BinaryConvertable, BinaryConvertError};
|
||||
|
19
ipc/tests/binary.rs
Normal file
19
ipc/tests/binary.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![allow(dead_code, unused_assignments, unused_variables)] // codegen issues
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/binary.rs"));
|
38
ipc/tests/binary.rs.in
Normal file
38
ipc/tests/binary.rs.in
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
use ipc::*;
|
||||
use std::mem;
|
||||
use std::collections::VecDeque;
|
||||
|
||||
#[derive(Binary)]
|
||||
pub enum Root {
|
||||
Top,
|
||||
Middle(u32, u64),
|
||||
}
|
||||
|
||||
#[derive(Binary, PartialEq, Debug)]
|
||||
pub struct DoubleRoot {
|
||||
pub x1: u32,
|
||||
pub x2: u64,
|
||||
pub x3: u32,
|
||||
}
|
||||
|
||||
#[derive(Binary, PartialEq, Debug)]
|
||||
pub struct ReferenceStruct<'a> {
|
||||
pub ref_data: &'a u64,
|
||||
}
|
@ -60,4 +60,13 @@ pub fn main() {
|
||||
registry.expand("", &src, &dst).unwrap();
|
||||
}
|
||||
|
||||
|
||||
// ipc pass
|
||||
{
|
||||
let src = Path::new("binary.rs.in");
|
||||
let dst = Path::new(&out_dir).join("binary.rs");
|
||||
let mut registry = syntex::Registry::new();
|
||||
codegen::register(&mut registry);
|
||||
registry.expand("", &src, &dst).unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
mod tests {
|
||||
|
||||
use super::super::service::*;
|
||||
use super::super::binary::*;
|
||||
use super::super::nested::{DBClient,DBWriter};
|
||||
use ipc::*;
|
||||
use devtools::*;
|
||||
@ -143,4 +144,19 @@ mod tests {
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_serialize_dummy_structs() {
|
||||
let mut socket = TestSocket::new();
|
||||
|
||||
let struct_ = DoubleRoot { x1: 0, x2: 100, x3: 100000};
|
||||
let res = ::ipc::binary::serialize_into(&struct_, &mut socket);
|
||||
|
||||
assert!(res.is_ok());
|
||||
|
||||
let mut read_socket = TestSocket::new_ready(socket.write_buffer.clone());
|
||||
let new_struct: DoubleRoot = ::ipc::binary::deserialize_from(&mut read_socket).unwrap();
|
||||
|
||||
assert_eq!(struct_, new_struct);
|
||||
}
|
||||
}
|
||||
|
@ -29,3 +29,4 @@ pub mod service;
|
||||
mod examples;
|
||||
mod over_nano;
|
||||
mod nested;
|
||||
mod binary;
|
||||
|
Loading…
Reference in New Issue
Block a user