openethereum/ipc/codegen/src/serialization.rs

811 lines
24 KiB
Rust
Raw Normal View History

// Copyright 2015-2017 Parity Technologies (UK) Ltd.
2016-04-20 13:17:11 +02:00
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use aster;
use syntax::ast::{
MetaItem,
Item,
Ident,
};
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ptr::P;
pub struct Error;
2016-04-22 15:37:22 +02:00
use super::codegen;
2016-04-20 13:17:11 +02:00
pub fn expand_serialization_implementation(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
let item = match *annotatable {
Annotatable::Item(ref item) => item,
_ => {
cx.span_err(meta_item.span, "`#[derive(Binary)]` may only be applied to structs and enums");
return;
2016-04-24 20:34:19 +02:00
},
2016-04-20 13:17:11 +02:00
};
let builder = aster::AstBuilder::new().span(span);
2016-04-23 17:50:12 +02:00
let impl_item = match serialize_item(cx, &builder, &item) {
Ok(item) => item,
Err(Error) => {
2016-07-19 20:42:23 +02:00
// An error occurred, but it should have been reported already.
2016-04-23 17:50:12 +02:00
return;
2016-04-24 20:34:19 +02:00
},
2016-04-23 17:50:12 +02:00
};
push(Annotatable::Item(impl_item))
2016-04-20 13:17:11 +02:00
}
fn serialize_item(
2016-04-23 17:49:39 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
item: &Item,
2016-04-20 13:17:11 +02:00
) -> Result<P<ast::Item>, Error> {
let generics = match item.node {
ast::ItemKind::Struct(_, ref generics) => generics,
ast::ItemKind::Enum(_, ref generics) => generics,
_ => {
cx.span_err(
item.span,
2016-04-20 17:02:17 +02:00
"`#[derive(Binary)]` may only be applied to structs and enums");
2016-04-20 13:17:11 +02:00
return Err(Error);
2016-04-24 20:34:19 +02:00
},
2016-04-20 13:17:11 +02:00
};
let ty = builder.ty().path()
2016-04-20 17:02:17 +02:00
.segment(item.ident).with_generics(generics.clone()).build()
2016-04-20 13:17:11 +02:00
.build();
2016-04-20 17:02:17 +02:00
let where_clause = &generics.where_clause;
2016-04-20 13:17:11 +02:00
let binary_expressions = try!(binary_expr(cx,
&builder,
&item,
2016-04-20 17:02:17 +02:00
&generics,
2016-04-20 13:17:11 +02:00
ty.clone()));
2016-04-20 17:02:17 +02:00
let (size_expr, read_expr, write_expr) =
(binary_expressions.size, binary_expressions.read, binary_expressions.write);
2016-06-10 08:53:09 +02:00
match quote_item!(cx,
2016-04-20 17:02:17 +02:00
impl $generics ::ipc::BinaryConvertable for $ty $where_clause {
2016-04-20 13:17:11 +02:00
fn size(&self) -> usize {
$size_expr
}
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut ::std::collections::VecDeque<usize>) -> Result<(), ::ipc::BinaryConvertError> {
2016-04-20 13:17:11 +02:00
$write_expr
}
fn from_bytes(buffer: &[u8], length_stack: &mut ::std::collections::VecDeque<usize>) -> Result<Self, ::ipc::BinaryConvertError> {
2016-04-20 22:02:29 +02:00
$read_expr
2016-04-20 13:17:11 +02:00
}
2016-04-21 22:03:05 +02:00
fn len_params() -> usize {
1
}
2016-06-10 08:53:09 +02:00
})
{
Some(item) => Ok(item),
None => {
cx.span_err(
item.span,
"syntax error expanding serialization implementation");
Err(Error)
}
}
2016-04-20 13:17:11 +02:00
}
2016-04-22 15:46:09 +02:00
#[allow(unreachable_code)]
2016-04-20 13:17:11 +02:00
fn binary_expr(
2016-04-23 17:53:09 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
item: &Item,
impl_generics: &ast::Generics,
ty: P<ast::Ty>,
2016-04-20 13:17:11 +02:00
) -> Result<BinaryExpressions, Error> {
match item.node {
ast::ItemKind::Struct(ref variant_data, _) => {
2016-04-20 14:55:08 +02:00
binary_expr_item_struct(
2016-04-20 13:17:11 +02:00
cx,
builder,
impl_generics,
ty,
item.span,
variant_data,
)
2016-04-22 15:46:09 +02:00
},
2016-04-20 13:17:11 +02:00
ast::ItemKind::Enum(ref enum_def, _) => {
binary_expr_enum(
cx,
builder,
item.ident,
impl_generics,
ty,
2016-04-20 17:02:17 +02:00
item.span,
2016-04-20 13:17:11 +02:00
enum_def,
)
2016-04-22 15:46:09 +02:00
},
2016-04-20 13:17:11 +02:00
_ => {
cx.span_bug(item.span,
2016-04-20 17:02:17 +02:00
"expected ItemStruct or ItemEnum in #[derive(Binary)]");
Err(Error) as Result<BinaryExpressions, Error>
2016-04-24 20:34:19 +02:00
},
2016-04-20 13:17:11 +02:00
}
}
struct BinaryExpressions {
2016-04-20 14:55:08 +02:00
pub size: P<ast::Expr>,
2016-04-20 17:02:17 +02:00
pub write: P<ast::Expr>,
2016-04-20 14:55:08 +02:00
pub read: P<ast::Expr>,
2016-04-20 13:17:11 +02:00
}
2016-04-25 16:20:38 +02:00
fn replace_qualified(s: &str) -> String {
if let Some(pos) = s.find("<") {
let mut source = s.to_owned();
source.insert(pos, ':');
source.insert(pos, ':');
source
}
else { s.to_owned() }
}
2016-04-20 14:55:08 +02:00
fn binary_expr_struct(
2016-04-20 13:17:11 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
ty: P<ast::Ty>,
2016-04-23 17:52:12 +02:00
fields: &[ast::StructField],
2016-04-20 17:02:17 +02:00
value_ident: Option<ast::Ident>,
2016-04-20 22:02:29 +02:00
instance_ident: Option<ast::Ident>,
2016-04-20 14:55:08 +02:00
) -> Result<BinaryExpressions, Error> {
2016-04-21 21:18:13 +02:00
2016-04-20 17:02:17 +02:00
let size_exprs: Vec<P<ast::Expr>> = fields.iter().enumerate().map(|(index, field)| {
let raw_ident = ::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty));
2016-04-21 22:03:05 +02:00
let index_ident = builder.id(format!("__field{}", index));
let field_id = match field.ident {
Some(ident) => builder.id(ident),
None => builder.id(format!("{}", index)),
};
match raw_ident.as_ref() {
"u8" => {
quote_expr!(cx, 1)
},
"[u8]" => {
value_ident.and_then(|x| {
Some(quote_expr!(cx, $x. $field_id .len()))
})
.unwrap_or_else(|| {
quote_expr!(cx, $index_ident .len())
}
)
}
_ => {
let field_type_ident = builder.id(
&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty)));
let field_type_ident_qualified = builder.id(
replace_qualified(&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty))));
value_ident.and_then(|x|
{
Some(quote_expr!(cx,
match $field_type_ident_qualified::len_params() {
0 => ::std::mem::size_of::<$field_type_ident>(),
_ => $x. $field_id .size(),
}))
})
.unwrap_or_else(|| {
quote_expr!(cx, match $field_type_ident_qualified::len_params() {
0 => ::std::mem::size_of::<$field_type_ident>(),
_ => $index_ident .size(),
})
})
}
}
2016-04-20 17:02:17 +02:00
}).collect();
2016-04-20 14:55:08 +02:00
2016-04-21 22:03:05 +02:00
let first_size_expr = size_exprs[0].clone();
let mut total_size_expr = quote_expr!(cx, 0usize + $first_size_expr);
2016-04-20 17:02:17 +02:00
for index in 1..size_exprs.len() {
let next_expr = size_exprs[index].clone();
2016-04-20 14:55:08 +02:00
total_size_expr = quote_expr!(cx, $total_size_expr + $next_expr);
}
2016-04-20 17:02:17 +02:00
let mut write_stmts = Vec::<ast::Stmt>::new();
write_stmts.push(quote_stmt!(cx, let mut offset = 0usize;).expect("stmt1"));
2016-04-20 22:02:29 +02:00
let mut map_stmts = Vec::<ast::Stmt>::new();
let field_amount = builder.id(&format!("{}",fields.len()));
map_stmts.push(quote_stmt!(cx, let mut map = vec![0usize; $field_amount];).expect("stmt2"));
map_stmts.push(quote_stmt!(cx, let mut total = 0usize;).expect("stmt3"));
let mut post_write_stmts = Vec::<ast::Stmt>::new();
2016-04-20 17:02:17 +02:00
for (index, field) in fields.iter().enumerate() {
2016-04-22 15:37:22 +02:00
let field_type_ident = builder.id(
2016-04-22 18:45:09 +02:00
&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty)));
let field_type_ident_qualified = builder.id(
2016-04-25 16:20:38 +02:00
replace_qualified(&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty))));
2016-04-21 22:03:05 +02:00
let field_id = match field.ident {
Some(ident) => builder.id(ident),
None => builder.id(format!("{}", index)),
};
2016-04-21 22:03:05 +02:00
let member_expr = match value_ident {
2016-04-20 17:02:17 +02:00
Some(x) => {
2016-04-21 22:03:05 +02:00
quote_expr!(cx, $x . $field_id)
2016-04-20 17:02:17 +02:00
},
None => {
2016-04-20 18:01:53 +02:00
let index_ident = builder.id(format!("__field{}", index));
2016-04-21 22:03:05 +02:00
quote_expr!(cx, $index_ident)
2016-04-24 20:34:19 +02:00
},
2016-04-21 22:03:05 +02:00
};
let raw_ident = ::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty));
let range_ident = builder.id(format!("r{}", index));
let error_message = "Error serializing member: ".to_owned() + &::syntax::print::pprust::expr_to_string(&member_expr);
2016-12-12 03:00:18 +01:00
let _error_message_literal = builder.expr().lit().str::<&str>(&error_message);
match raw_ident.as_ref() {
"u8" => {
write_stmts.push(quote_stmt!(cx, let next_line = offset + 1;).expect("stmt4"));
write_stmts.push(quote_stmt!(cx, buffer[offset] = $member_expr; ).expect("stm5"));
},
"[u8]" => {
write_stmts.push(quote_stmt!(cx, let size = $member_expr .len();).unwrap());
write_stmts.push(quote_stmt!(cx, let next_line = offset + size;).unwrap());
write_stmts.push(quote_stmt!(cx, length_stack.push_back(size);).unwrap());
write_stmts.push(quote_stmt!(cx, let $range_ident = offset..next_line; ).unwrap());
post_write_stmts.push(quote_stmt!(cx, buffer[$range_ident].clone_from_slice($member_expr); ).unwrap());
}
_ => {
write_stmts.push(quote_stmt!(cx, let next_line = offset + match $field_type_ident_qualified::len_params() {
0 => ::std::mem::size_of::<$field_type_ident>(),
_ => { let size = $member_expr .size(); length_stack.push_back(size); size },
}).unwrap());
write_stmts.push(quote_stmt!(cx, let $range_ident = offset..next_line; ).unwrap());
post_write_stmts.push(quote_stmt!(cx,
if $range_ident.end - $range_ident.start > 0 {
if let Err(e) = $member_expr .to_bytes(&mut buffer[$range_ident], length_stack) {
return Err(e)
};
}
).unwrap());
}
}
2016-04-21 22:03:05 +02:00
2016-04-20 17:02:17 +02:00
write_stmts.push(quote_stmt!(cx, offset = next_line; ).unwrap());
2016-04-20 22:02:29 +02:00
let field_index = builder.id(&format!("{}", index));
map_stmts.push(quote_stmt!(cx, map[$field_index] = total;).unwrap());
match raw_ident.as_ref() {
"u8" => {
map_stmts.push(quote_stmt!(cx, total += 1;).unwrap());
},
"[u8]" => {
map_stmts.push(quote_stmt!(cx, let size = length_stack.pop_front().unwrap();).unwrap());
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
},
_ => {
map_stmts.push(quote_stmt!(cx, let size = match $field_type_ident_qualified::len_params() {
0 => ::std::mem::size_of::<$field_type_ident>(),
_ => length_stack.pop_front().unwrap(),
}).unwrap());
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
}
}
2016-04-20 22:02:29 +02:00
};
2016-04-22 15:37:22 +02:00
let read_expr = match fields.iter().any(|f| codegen::has_ptr(&f.ty)) {
true => {
// cannot create structs with pointers
quote_expr!(cx, Err(::ipc::binary::BinaryConvertError::not_supported()))
2016-04-22 15:37:22 +02:00
},
false => {
if value_ident.is_some() {
let instance_create = named_fields_sequence(cx, &ty, fields);
quote_expr!(cx, { $map_stmts; $instance_create; Ok(result) })
}
else {
let map_variant = P(fields_sequence(cx, &ty, fields, &instance_ident.unwrap_or(builder.id("Self"))));
quote_expr!(cx, { $map_stmts; Ok($map_variant) })
}
2016-04-24 20:34:19 +02:00
},
2016-04-20 17:02:17 +02:00
};
2016-04-20 14:55:08 +02:00
2016-04-20 13:17:11 +02:00
Ok(BinaryExpressions {
2016-04-20 14:55:08 +02:00
size: total_size_expr,
write: quote_expr!(cx, { $write_stmts; $post_write_stmts; Ok(()) } ),
2016-04-20 22:02:29 +02:00
read: read_expr,
2016-04-20 17:02:17 +02:00
})
2016-04-20 13:17:11 +02:00
}
2016-04-22 15:46:09 +02:00
#[allow(unreachable_code)]
2016-04-20 14:55:08 +02:00
fn binary_expr_item_struct(
2016-04-20 13:17:11 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
2016-04-22 15:46:09 +02:00
_impl_generics: &ast::Generics,
2016-04-20 13:17:11 +02:00
ty: P<ast::Ty>,
span: Span,
variant_data: &ast::VariantData,
) -> Result<BinaryExpressions, Error> {
match *variant_data {
ast::VariantData::Tuple(ref fields, _) => {
2016-04-20 14:55:08 +02:00
binary_expr_struct(
2016-04-20 13:17:11 +02:00
cx,
&builder,
ty,
fields,
2016-04-20 17:02:17 +02:00
Some(builder.id("self")),
2016-04-20 22:02:29 +02:00
None,
2016-04-20 13:17:11 +02:00
)
2016-04-24 20:34:19 +02:00
},
2016-04-20 13:17:11 +02:00
ast::VariantData::Struct(ref fields, _) => {
2016-04-20 14:55:08 +02:00
binary_expr_struct(
2016-04-20 13:17:11 +02:00
cx,
&builder,
ty,
fields,
2016-04-20 17:02:17 +02:00
Some(builder.id("self")),
2016-04-20 22:02:29 +02:00
None,
2016-04-20 13:17:11 +02:00
)
2016-04-20 17:02:17 +02:00
},
_ => {
2016-04-20 18:01:53 +02:00
cx.span_bug(span,
&format!("#[derive(Binary)] Unsupported struct content, expected tuple/struct, found: {:?}",
variant_data));
Err(Error) as Result<BinaryExpressions, Error>
2016-04-20 17:02:17 +02:00
},
2016-04-20 13:17:11 +02:00
}
}
2016-04-20 14:55:08 +02:00
fn binary_expr_enum(
2016-04-23 17:49:39 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
type_ident: Ident,
impl_generics: &ast::Generics,
ty: P<ast::Ty>,
2016-04-20 17:02:17 +02:00
span: Span,
2016-04-23 17:49:39 +02:00
enum_def: &ast::EnumDef,
2016-04-20 14:55:08 +02:00
) -> Result<BinaryExpressions, Error> {
let arms: Vec<_> = try!(enum_def.variants.iter()
2016-04-20 14:55:08 +02:00
.enumerate()
.map(|(variant_index, variant)| {
binary_expr_variant(
cx,
builder,
type_ident,
impl_generics,
ty.clone(),
2016-04-20 17:02:17 +02:00
span,
2016-04-20 14:55:08 +02:00
variant,
variant_index,
)
})
.collect());
2016-04-20 14:55:08 +02:00
2016-04-20 22:02:29 +02:00
let (size_arms, write_arms, mut read_arms) = (
2016-04-20 17:02:17 +02:00
arms.iter().map(|x| x.size.clone()).collect::<Vec<ast::Arm>>(),
arms.iter().map(|x| x.write.clone()).collect::<Vec<ast::Arm>>(),
arms.iter().map(|x| x.read.clone()).collect::<Vec<ast::Arm>>());
2016-04-20 14:55:08 +02:00
read_arms.push(quote_arm!(cx, _ => { Err(::ipc::BinaryConvertError::variant(buffer[0])) } ));
2016-04-20 22:02:29 +02:00
2016-04-20 14:55:08 +02:00
Ok(BinaryExpressions {
2016-04-20 18:01:53 +02:00
size: quote_expr!(cx, 1usize + match *self { $size_arms }),
2016-04-20 17:02:17 +02:00
write: quote_expr!(cx, match *self { $write_arms }; ),
2016-04-20 22:02:29 +02:00
read: quote_expr!(cx, match buffer[0] { $read_arms }),
2016-04-20 17:02:17 +02:00
})
2016-04-20 14:55:08 +02:00
}
struct BinaryArm {
2016-04-20 17:02:17 +02:00
size: ast::Arm,
write: ast::Arm,
read: ast::Arm,
2016-04-20 14:55:08 +02:00
}
2016-04-20 22:02:29 +02:00
fn fields_sequence(
ext_cx: &ExtCtxt,
2016-04-21 22:23:06 +02:00
_ty: &P<ast::Ty>,
2016-04-23 17:49:39 +02:00
fields: &[ast::StructField],
2016-04-20 22:02:29 +02:00
variant_ident: &ast::Ident,
) -> ast::Expr {
use syntax::parse::token;
use syntax::tokenstream::TokenTree::Token;
2016-04-20 22:02:29 +02:00
let named_members = fields.iter().any(|f| f.ident.is_some());
2016-04-20 22:02:29 +02:00
::quasi::parse_expr_panic(&mut ::syntax::parse::new_parser_from_tts(
ext_cx.parse_sess(),
{
let _sp = ext_cx.call_site();
let mut tt = ::std::vec::Vec::new();
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(variant_ident.clone())));
if named_members {
tt.push(Token(_sp, token::OpenDelim(token::Brace)));
}
else {
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
}
2016-04-20 22:02:29 +02:00
for (idx, field) in fields.iter().enumerate() {
if field.ident.is_some() {
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(field.ident.clone().unwrap())));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::Colon));
}
// special case for u8, it just takes byte form sequence
if ::syntax::print::pprust::ty_to_string(&field.ty) == "u8" {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::Comma));
continue;
}
// special case for [u8], it just takes a byte sequence
if ::syntax::print::pprust::ty_to_string(&field.ty) == "[u8]" {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::DotDot));
if idx+1 != fields.len() {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
}
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::Comma));
continue;
}
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
2016-05-06 00:25:58 +02:00
tt.push(
Token(
_sp,
token::Ident(ext_cx.ident_of(&replace_qualified(&::syntax::print::pprust::ty_to_string(&field.ty))))
));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::ModSep));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::BinOp(token::And)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::DotDot));
2016-04-21 22:23:06 +02:00
if idx+1 != fields.len() {
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
2016-04-21 22:23:06 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
2016-04-21 22:23:06 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
}
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
2016-04-21 22:03:05 +02:00
tt.push(Token(_sp, token::Comma));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"))));
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
// name member if it has resulted in the error
tt.push(Token(_sp, token::Dot));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map_err"))));
2016-04-21 22:03:05 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::BinOp(token::Or)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("e"))));
tt.push(Token(_sp, token::BinOp(token::Or)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("e"))));
tt.push(Token(_sp, token::Dot));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("named"))));
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::Literal(token::Lit::Str_(
field.ident.unwrap_or(ext_cx.ident_of(&format!("f{}", idx))).name),
None))
);
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::Comma));
}
if named_members {
tt.push(Token(_sp, token::CloseDelim(token::Brace)));
}
else {
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
}
2016-04-20 22:02:29 +02:00
tt
})
).unwrap()
}
fn named_fields_sequence(
ext_cx: &ExtCtxt,
ty: &P<ast::Ty>,
2016-04-23 17:49:39 +02:00
fields: &[ast::StructField],
2016-04-20 22:02:29 +02:00
) -> ast::Stmt {
use syntax::parse::token;
use syntax::tokenstream::TokenTree::Token;
2016-04-20 22:02:29 +02:00
::quasi::parse_stmt_panic(&mut ::syntax::parse::new_parser_from_tts(
ext_cx.parse_sess(),
{
let _sp = ext_cx.call_site();
let mut tt = ::std::vec::Vec::new();
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("let"))));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("result"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::Eq));
tt.push(Token(
_sp,
token::Ident(
2016-05-06 00:25:58 +02:00
ext_cx.ident_of(&::syntax::print::pprust::ty_to_string(ty))
)));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Brace)));
for (idx, field) in fields.iter().enumerate() {
tt.push(Token(_sp, match field.ident {
Some(ident) => token::Ident(ident),
None => token::Ident(ext_cx.ident_of(&format!("{}", idx))),
}));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::Colon));
// special case for u8, it just takes byte form sequence
if ::syntax::print::pprust::ty_to_string(&field.ty) == "u8" {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::Comma));
continue;
}
// special case for [u8], it just takes a byte sequence
if ::syntax::print::pprust::ty_to_string(&field.ty) == "[u8]" {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::DotDot));
if idx+1 != fields.len() {
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
}
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::Comma));
continue;
}
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(
_sp,
token::Ident(
2016-05-06 00:25:58 +02:00
ext_cx.ident_of(&replace_qualified(&::syntax::print::pprust::ty_to_string(&field.ty)))
)));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::ModSep));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::BinOp(token::And)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
tt.push(Token(_sp, token::DotDot));
2016-04-21 22:23:06 +02:00
if idx + 1 != fields.len() {
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
2016-04-21 22:23:06 +02:00
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
2016-04-21 22:23:06 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
}
2016-04-21 22:03:05 +02:00
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
2016-04-21 22:03:05 +02:00
tt.push(Token(_sp, token::Comma));
2016-05-06 00:25:58 +02:00
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"))));
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
2016-04-21 22:03:05 +02:00
// name member if it has resulted in the error
tt.push(Token(_sp, token::Dot));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map_err"))));
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::BinOp(token::Or)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("e"))));
tt.push(Token(_sp, token::BinOp(token::Or)));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("e"))));
tt.push(Token(_sp, token::Dot));
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("named"))));
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
tt.push(Token(_sp, token::Literal(token::Lit::Str_(
field.ident.unwrap_or(ext_cx.ident_of(&format!("f{}", idx))).name),
None))
);
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
2016-04-20 22:02:29 +02:00
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
tt.push(Token(_sp, token::Comma));
}
tt.push(Token(_sp, token::CloseDelim(token::Brace)));
tt
})
).unwrap()
}
2016-04-20 14:55:08 +02:00
fn binary_expr_variant(
2016-04-23 17:49:39 +02:00
cx: &ExtCtxt,
builder: &aster::AstBuilder,
type_ident: Ident,
_generics: &ast::Generics,
ty: P<ast::Ty>,
2016-04-21 22:23:06 +02:00
_span: Span,
2016-04-23 17:49:39 +02:00
variant: &ast::Variant,
variant_index: usize,
2016-04-20 14:55:08 +02:00
) -> Result<BinaryArm, Error> {
let variant_ident = variant.node.name;
2016-04-20 19:22:24 +02:00
let variant_index_ident = builder.id(format!("{}", variant_index));
2016-04-20 14:55:08 +02:00
match variant.node.data {
2016-04-20 18:01:53 +02:00
ast::VariantData::Unit(_) => {
let pat = builder.pat().path()
.id(type_ident).id(variant_ident)
.build();
2016-04-20 22:02:29 +02:00
let variant_val = builder.id(format!("{}::{}", type_ident, variant_ident));
2016-04-20 18:01:53 +02:00
Ok(BinaryArm {
size: quote_arm!(cx, $pat => { 0usize } ),
2016-04-20 19:22:24 +02:00
write: quote_arm!(cx, $pat => { buffer[0] = $variant_index_ident; Ok(()) } ),
2016-04-20 22:02:29 +02:00
read: quote_arm!(cx, $variant_index_ident => { Ok($variant_val) } ),
2016-04-20 18:01:53 +02:00
})
},
2016-04-20 14:55:08 +02:00
ast::VariantData::Tuple(ref fields, _) => {
let field_names: Vec<ast::Ident> = (0 .. fields.len())
.map(|i| builder.id(format!("__field{}", i)))
.collect();
let pat = builder.pat().enum_()
.id(type_ident).id(variant_ident).build()
.with_pats(
field_names.iter()
.map(|field| builder.pat().ref_id(field))
)
.build();
let binary_expr = try!(binary_expr_struct(
cx,
&builder,
ty,
fields,
None,
2016-04-20 22:02:29 +02:00
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
2016-04-20 14:55:08 +02:00
));
2016-04-20 17:02:17 +02:00
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
2016-04-20 14:55:08 +02:00
Ok(BinaryArm {
2016-04-20 17:02:17 +02:00
size: quote_arm!(cx, $pat => { $size_expr } ),
2016-04-20 19:22:24 +02:00
write: quote_arm!(cx,
$pat => {
buffer[0] = $variant_index_ident;
let buffer = &mut buffer[1..];
$write_expr
}),
2016-07-07 10:07:27 +02:00
read: quote_arm!(cx,
$variant_index_ident => {
let buffer = &buffer[1..];
$read_expr
}
),
2016-04-20 14:55:08 +02:00
})
2016-04-24 20:34:19 +02:00
},
2016-04-20 14:55:08 +02:00
ast::VariantData::Struct(ref fields, _) => {
let field_names: Vec<_> = (0 .. fields.len())
.map(|i| builder.id(format!("__field{}", i)))
.collect();
let pat = builder.pat().struct_()
.id(type_ident).id(variant_ident).build()
.with_pats(
field_names.iter()
.zip(fields.iter())
2016-04-20 17:02:17 +02:00
.map(|(id, field)|(field.ident.unwrap(), builder.pat().ref_id(id))))
2016-04-20 14:55:08 +02:00
.build();
let binary_expr = try!(binary_expr_struct(
cx,
&builder,
ty,
fields,
None,
2016-04-20 22:02:29 +02:00
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
2016-04-20 14:55:08 +02:00
));
2016-04-20 17:02:17 +02:00
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
2016-04-22 15:37:22 +02:00
2016-04-20 14:55:08 +02:00
Ok(BinaryArm {
2016-04-20 17:02:17 +02:00
size: quote_arm!(cx, $pat => { $size_expr } ),
2016-04-20 19:22:24 +02:00
write: quote_arm!(cx,
$pat => {
buffer[0] = $variant_index_ident;
let buffer = &mut buffer[1..];
$write_expr
}),
2016-07-07 10:07:27 +02:00
read: quote_arm!(cx,
$variant_index_ident => {
let buffer = &buffer[1..];
$read_expr
}
),
2016-04-20 14:55:08 +02:00
})
2016-04-20 17:02:17 +02:00
},
2016-04-20 14:55:08 +02:00
}
}