2016-04-20 13:17:11 +02:00
|
|
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
|
|
|
// This file is part of Parity.
|
|
|
|
|
|
|
|
// Parity is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
|
|
|
// Parity is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
|
|
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
use aster;
|
|
|
|
|
|
|
|
use syntax::ast::{
|
|
|
|
MetaItem,
|
|
|
|
Item,
|
|
|
|
Ident,
|
|
|
|
};
|
|
|
|
|
|
|
|
use syntax::ast;
|
|
|
|
use syntax::codemap::Span;
|
|
|
|
use syntax::ext::base::{Annotatable, ExtCtxt};
|
|
|
|
use syntax::ptr::P;
|
|
|
|
|
|
|
|
pub struct Error;
|
|
|
|
|
2016-04-22 15:37:22 +02:00
|
|
|
use super::codegen;
|
|
|
|
|
2016-04-20 13:17:11 +02:00
|
|
|
pub fn expand_serialization_implementation(
|
|
|
|
cx: &mut ExtCtxt,
|
|
|
|
span: Span,
|
|
|
|
meta_item: &MetaItem,
|
|
|
|
annotatable: &Annotatable,
|
|
|
|
push: &mut FnMut(Annotatable)
|
|
|
|
) {
|
|
|
|
let item = match *annotatable {
|
|
|
|
Annotatable::Item(ref item) => item,
|
|
|
|
_ => {
|
|
|
|
cx.span_err(meta_item.span, "`#[derive(Binary)]` may only be applied to structs and enums");
|
|
|
|
return;
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let builder = aster::AstBuilder::new().span(span);
|
|
|
|
|
2016-04-23 17:50:12 +02:00
|
|
|
let impl_item = match serialize_item(cx, &builder, &item) {
|
|
|
|
Ok(item) => item,
|
|
|
|
Err(Error) => {
|
|
|
|
// An error occured, but it should have been reported already.
|
|
|
|
return;
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-23 17:50:12 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
push(Annotatable::Item(impl_item))
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn serialize_item(
|
2016-04-23 17:49:39 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
|
|
|
item: &Item,
|
2016-04-20 13:17:11 +02:00
|
|
|
) -> Result<P<ast::Item>, Error> {
|
|
|
|
let generics = match item.node {
|
|
|
|
ast::ItemKind::Struct(_, ref generics) => generics,
|
|
|
|
ast::ItemKind::Enum(_, ref generics) => generics,
|
|
|
|
_ => {
|
|
|
|
cx.span_err(
|
|
|
|
item.span,
|
2016-04-20 17:02:17 +02:00
|
|
|
"`#[derive(Binary)]` may only be applied to structs and enums");
|
2016-04-20 13:17:11 +02:00
|
|
|
return Err(Error);
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let ty = builder.ty().path()
|
2016-04-20 17:02:17 +02:00
|
|
|
.segment(item.ident).with_generics(generics.clone()).build()
|
2016-04-20 13:17:11 +02:00
|
|
|
.build();
|
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let where_clause = &generics.where_clause;
|
2016-04-20 13:17:11 +02:00
|
|
|
|
|
|
|
let binary_expressions = try!(binary_expr(cx,
|
|
|
|
&builder,
|
|
|
|
&item,
|
2016-04-20 17:02:17 +02:00
|
|
|
&generics,
|
2016-04-20 13:17:11 +02:00
|
|
|
ty.clone()));
|
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let (size_expr, read_expr, write_expr) =
|
|
|
|
(binary_expressions.size, binary_expressions.read, binary_expressions.write);
|
|
|
|
|
2016-04-20 13:17:11 +02:00
|
|
|
Ok(quote_item!(cx,
|
2016-04-20 17:02:17 +02:00
|
|
|
impl $generics ::ipc::BinaryConvertable for $ty $where_clause {
|
2016-04-20 13:17:11 +02:00
|
|
|
fn size(&self) -> usize {
|
|
|
|
$size_expr
|
|
|
|
}
|
|
|
|
|
2016-04-21 22:03:05 +02:00
|
|
|
fn to_bytes(&self, buffer: &mut [u8], length_stack: &mut VecDeque<usize>) -> Result<(), BinaryConvertError> {
|
2016-04-20 13:17:11 +02:00
|
|
|
$write_expr
|
|
|
|
}
|
|
|
|
|
2016-04-21 22:03:05 +02:00
|
|
|
fn from_bytes(buffer: &[u8], length_stack: &mut VecDeque<usize>) -> Result<Self, BinaryConvertError> {
|
2016-04-20 22:02:29 +02:00
|
|
|
$read_expr
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
2016-04-21 22:03:05 +02:00
|
|
|
|
|
|
|
fn len_params() -> usize {
|
|
|
|
1
|
|
|
|
}
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
).unwrap())
|
|
|
|
}
|
|
|
|
|
2016-04-22 15:46:09 +02:00
|
|
|
#[allow(unreachable_code)]
|
2016-04-20 13:17:11 +02:00
|
|
|
fn binary_expr(
|
2016-04-23 17:53:09 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
|
|
|
item: &Item,
|
|
|
|
impl_generics: &ast::Generics,
|
|
|
|
ty: P<ast::Ty>,
|
2016-04-20 13:17:11 +02:00
|
|
|
) -> Result<BinaryExpressions, Error> {
|
|
|
|
match item.node {
|
|
|
|
ast::ItemKind::Struct(ref variant_data, _) => {
|
2016-04-20 14:55:08 +02:00
|
|
|
binary_expr_item_struct(
|
2016-04-20 13:17:11 +02:00
|
|
|
cx,
|
|
|
|
builder,
|
|
|
|
impl_generics,
|
|
|
|
ty,
|
|
|
|
item.span,
|
|
|
|
variant_data,
|
|
|
|
)
|
2016-04-22 15:46:09 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
ast::ItemKind::Enum(ref enum_def, _) => {
|
|
|
|
binary_expr_enum(
|
|
|
|
cx,
|
|
|
|
builder,
|
|
|
|
item.ident,
|
|
|
|
impl_generics,
|
|
|
|
ty,
|
2016-04-20 17:02:17 +02:00
|
|
|
item.span,
|
2016-04-20 13:17:11 +02:00
|
|
|
enum_def,
|
|
|
|
)
|
2016-04-22 15:46:09 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
_ => {
|
|
|
|
cx.span_bug(item.span,
|
2016-04-20 17:02:17 +02:00
|
|
|
"expected ItemStruct or ItemEnum in #[derive(Binary)]");
|
|
|
|
Err(Error)
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct BinaryExpressions {
|
2016-04-20 14:55:08 +02:00
|
|
|
pub size: P<ast::Expr>,
|
2016-04-20 17:02:17 +02:00
|
|
|
pub write: P<ast::Expr>,
|
2016-04-20 14:55:08 +02:00
|
|
|
pub read: P<ast::Expr>,
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
|
2016-04-25 16:20:38 +02:00
|
|
|
fn replace_qualified(s: &str) -> String {
|
|
|
|
if let Some(pos) = s.find("<") {
|
|
|
|
let mut source = s.to_owned();
|
|
|
|
source.insert(pos, ':');
|
|
|
|
source.insert(pos, ':');
|
|
|
|
source
|
|
|
|
}
|
|
|
|
else { s.to_owned() }
|
|
|
|
}
|
|
|
|
|
2016-04-20 14:55:08 +02:00
|
|
|
fn binary_expr_struct(
|
2016-04-20 13:17:11 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
|
|
|
ty: P<ast::Ty>,
|
2016-04-23 17:52:12 +02:00
|
|
|
fields: &[ast::StructField],
|
2016-04-20 17:02:17 +02:00
|
|
|
value_ident: Option<ast::Ident>,
|
2016-04-20 22:02:29 +02:00
|
|
|
instance_ident: Option<ast::Ident>,
|
2016-04-20 14:55:08 +02:00
|
|
|
) -> Result<BinaryExpressions, Error> {
|
2016-04-21 21:18:13 +02:00
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let size_exprs: Vec<P<ast::Expr>> = fields.iter().enumerate().map(|(index, field)| {
|
2016-05-19 14:36:15 +02:00
|
|
|
let raw_ident = ::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty));
|
2016-04-21 22:03:05 +02:00
|
|
|
let index_ident = builder.id(format!("__field{}", index));
|
2016-05-19 14:36:15 +02:00
|
|
|
match raw_ident.as_ref() {
|
|
|
|
"u8" => {
|
|
|
|
quote_expr!(cx, 1)
|
|
|
|
},
|
|
|
|
"[u8]" => {
|
|
|
|
value_ident.and_then(|x| {
|
|
|
|
let field_id = builder.id(field.ident.unwrap());
|
|
|
|
Some(quote_expr!(cx, $x. $field_id .len()))
|
|
|
|
})
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
quote_expr!(cx, $index_ident .len())
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
let field_type_ident = builder.id(
|
|
|
|
&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty)));
|
|
|
|
|
|
|
|
let field_type_ident_qualified = builder.id(
|
|
|
|
replace_qualified(&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty))));
|
|
|
|
|
|
|
|
value_ident.and_then(|x|
|
|
|
|
{
|
|
|
|
let field_id = builder.id(field.ident.unwrap());
|
|
|
|
Some(quote_expr!(cx,
|
|
|
|
match $field_type_ident_qualified::len_params() {
|
|
|
|
0 => mem::size_of::<$field_type_ident>(),
|
|
|
|
_ => $x. $field_id .size(),
|
|
|
|
}))
|
|
|
|
})
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
quote_expr!(cx, match $field_type_ident_qualified::len_params() {
|
|
|
|
0 => mem::size_of::<$field_type_ident>(),
|
|
|
|
_ => $index_ident .size(),
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2016-04-20 17:02:17 +02:00
|
|
|
}).collect();
|
2016-04-20 14:55:08 +02:00
|
|
|
|
2016-04-21 22:03:05 +02:00
|
|
|
let first_size_expr = size_exprs[0].clone();
|
|
|
|
let mut total_size_expr = quote_expr!(cx, 0usize + $first_size_expr);
|
2016-04-20 17:02:17 +02:00
|
|
|
for index in 1..size_exprs.len() {
|
|
|
|
let next_expr = size_exprs[index].clone();
|
2016-04-20 14:55:08 +02:00
|
|
|
total_size_expr = quote_expr!(cx, $total_size_expr + $next_expr);
|
|
|
|
}
|
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let mut write_stmts = Vec::<ast::Stmt>::new();
|
|
|
|
write_stmts.push(quote_stmt!(cx, let mut offset = 0usize;).unwrap());
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
let mut map_stmts = Vec::<ast::Stmt>::new();
|
|
|
|
let field_amount = builder.id(&format!("{}",fields.len()));
|
|
|
|
map_stmts.push(quote_stmt!(cx, let mut map = vec![0usize; $field_amount];).unwrap());
|
|
|
|
map_stmts.push(quote_stmt!(cx, let mut total = 0usize;).unwrap());
|
2016-04-20 17:02:17 +02:00
|
|
|
for (index, field) in fields.iter().enumerate() {
|
2016-04-22 15:37:22 +02:00
|
|
|
let field_type_ident = builder.id(
|
2016-04-22 18:45:09 +02:00
|
|
|
&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty)));
|
|
|
|
|
|
|
|
let field_type_ident_qualified = builder.id(
|
2016-04-25 16:20:38 +02:00
|
|
|
replace_qualified(&::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty))));
|
2016-04-21 22:03:05 +02:00
|
|
|
|
|
|
|
let member_expr = match value_ident {
|
2016-04-20 17:02:17 +02:00
|
|
|
Some(x) => {
|
2016-04-20 18:10:41 +02:00
|
|
|
let field_id = builder.id(field.ident.unwrap());
|
2016-04-21 22:03:05 +02:00
|
|
|
quote_expr!(cx, $x . $field_id)
|
2016-04-20 17:02:17 +02:00
|
|
|
},
|
|
|
|
None => {
|
2016-04-20 18:01:53 +02:00
|
|
|
let index_ident = builder.id(format!("__field{}", index));
|
2016-04-21 22:03:05 +02:00
|
|
|
quote_expr!(cx, $index_ident)
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-21 22:03:05 +02:00
|
|
|
};
|
|
|
|
|
2016-05-19 14:36:15 +02:00
|
|
|
let raw_ident = ::syntax::print::pprust::ty_to_string(&codegen::strip_ptr(&field.ty));
|
|
|
|
match raw_ident.as_ref() {
|
|
|
|
"u8" => {
|
|
|
|
write_stmts.push(quote_stmt!(cx, let next_line = offset + 1;).unwrap());
|
|
|
|
write_stmts.push(quote_stmt!(cx, buffer[offset] = $member_expr; ).unwrap());
|
|
|
|
},
|
|
|
|
"[u8]" => {
|
|
|
|
write_stmts.push(quote_stmt!(cx, let size = $member_expr .len();).unwrap());
|
|
|
|
write_stmts.push(quote_stmt!(cx, let next_line = offset + size;).unwrap());
|
|
|
|
write_stmts.push(quote_stmt!(cx, length_stack.push_back(size);).unwrap());
|
|
|
|
write_stmts.push(quote_stmt!(cx, buffer[offset..next_line].clone_from_slice($member_expr); ).unwrap());
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
write_stmts.push(quote_stmt!(cx, let next_line = offset + match $field_type_ident_qualified::len_params() {
|
|
|
|
0 => mem::size_of::<$field_type_ident>(),
|
|
|
|
_ => { let size = $member_expr .size(); length_stack.push_back(size); size },
|
|
|
|
}).unwrap());
|
|
|
|
write_stmts.push(quote_stmt!(cx,
|
|
|
|
if let Err(e) = $member_expr .to_bytes(&mut buffer[offset..next_line], length_stack) { return Err(e) };).unwrap());
|
|
|
|
}
|
2016-05-16 18:33:32 +02:00
|
|
|
}
|
2016-04-21 22:03:05 +02:00
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
write_stmts.push(quote_stmt!(cx, offset = next_line; ).unwrap());
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
let field_index = builder.id(&format!("{}", index));
|
|
|
|
map_stmts.push(quote_stmt!(cx, map[$field_index] = total;).unwrap());
|
2016-05-16 18:33:32 +02:00
|
|
|
|
2016-05-19 14:36:15 +02:00
|
|
|
match raw_ident.as_ref() {
|
|
|
|
"u8" => {
|
2016-05-25 17:03:58 +02:00
|
|
|
map_stmts.push(quote_stmt!(cx, total += 1;).unwrap());
|
2016-05-19 14:36:15 +02:00
|
|
|
},
|
|
|
|
"[u8]" => {
|
|
|
|
map_stmts.push(quote_stmt!(cx, let size = length_stack.pop_front().unwrap();).unwrap());
|
2016-05-25 17:03:58 +02:00
|
|
|
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
|
2016-05-19 14:36:15 +02:00
|
|
|
},
|
|
|
|
_ => {
|
|
|
|
map_stmts.push(quote_stmt!(cx, let size = match $field_type_ident_qualified::len_params() {
|
|
|
|
0 => mem::size_of::<$field_type_ident>(),
|
|
|
|
_ => length_stack.pop_front().unwrap(),
|
|
|
|
}).unwrap());
|
2016-05-25 17:03:58 +02:00
|
|
|
map_stmts.push(quote_stmt!(cx, total += size;).unwrap());
|
2016-05-19 14:36:15 +02:00
|
|
|
}
|
2016-05-16 18:33:32 +02:00
|
|
|
}
|
2016-04-20 22:02:29 +02:00
|
|
|
};
|
|
|
|
|
2016-04-22 15:37:22 +02:00
|
|
|
let read_expr = match fields.iter().any(|f| codegen::has_ptr(&f.ty)) {
|
|
|
|
true => {
|
|
|
|
// cannot create structs with pointers
|
|
|
|
quote_expr!(cx, Err(::ipc::binary::BinaryConvertError))
|
|
|
|
},
|
|
|
|
false => {
|
|
|
|
if value_ident.is_some() {
|
|
|
|
let instance_create = named_fields_sequence(cx, &ty, fields);
|
|
|
|
quote_expr!(cx, { $map_stmts; $instance_create; Ok(result) })
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
let map_variant = P(fields_sequence(cx, &ty, fields, &instance_ident.unwrap_or(builder.id("Self"))));
|
|
|
|
quote_expr!(cx, { $map_stmts; Ok($map_variant) })
|
|
|
|
}
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 17:02:17 +02:00
|
|
|
};
|
2016-04-20 14:55:08 +02:00
|
|
|
|
2016-04-20 13:17:11 +02:00
|
|
|
Ok(BinaryExpressions {
|
2016-04-20 14:55:08 +02:00
|
|
|
size: total_size_expr,
|
2016-04-20 18:01:53 +02:00
|
|
|
write: quote_expr!(cx, { $write_stmts; Ok(()) } ),
|
2016-04-20 22:02:29 +02:00
|
|
|
read: read_expr,
|
2016-04-20 17:02:17 +02:00
|
|
|
})
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
|
2016-04-22 15:46:09 +02:00
|
|
|
#[allow(unreachable_code)]
|
2016-04-20 14:55:08 +02:00
|
|
|
fn binary_expr_item_struct(
|
2016-04-20 13:17:11 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
2016-04-22 15:46:09 +02:00
|
|
|
_impl_generics: &ast::Generics,
|
2016-04-20 13:17:11 +02:00
|
|
|
ty: P<ast::Ty>,
|
|
|
|
span: Span,
|
|
|
|
variant_data: &ast::VariantData,
|
|
|
|
) -> Result<BinaryExpressions, Error> {
|
|
|
|
match *variant_data {
|
|
|
|
ast::VariantData::Tuple(ref fields, _) => {
|
2016-04-20 14:55:08 +02:00
|
|
|
binary_expr_struct(
|
2016-04-20 13:17:11 +02:00
|
|
|
cx,
|
|
|
|
&builder,
|
|
|
|
ty,
|
|
|
|
fields,
|
2016-04-20 17:02:17 +02:00
|
|
|
Some(builder.id("self")),
|
2016-04-20 22:02:29 +02:00
|
|
|
None,
|
2016-04-20 13:17:11 +02:00
|
|
|
)
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
ast::VariantData::Struct(ref fields, _) => {
|
2016-04-20 14:55:08 +02:00
|
|
|
binary_expr_struct(
|
2016-04-20 13:17:11 +02:00
|
|
|
cx,
|
|
|
|
&builder,
|
|
|
|
ty,
|
|
|
|
fields,
|
2016-04-20 17:02:17 +02:00
|
|
|
Some(builder.id("self")),
|
2016-04-20 22:02:29 +02:00
|
|
|
None,
|
2016-04-20 13:17:11 +02:00
|
|
|
)
|
2016-04-20 17:02:17 +02:00
|
|
|
},
|
|
|
|
_ => {
|
2016-04-20 18:01:53 +02:00
|
|
|
cx.span_bug(span,
|
|
|
|
&format!("#[derive(Binary)] Unsupported struct content, expected tuple/struct, found: {:?}",
|
|
|
|
variant_data));
|
2016-04-20 17:02:17 +02:00
|
|
|
Err(Error)
|
|
|
|
},
|
2016-04-20 13:17:11 +02:00
|
|
|
}
|
|
|
|
}
|
2016-04-20 14:55:08 +02:00
|
|
|
|
|
|
|
fn binary_expr_enum(
|
2016-04-23 17:49:39 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
|
|
|
type_ident: Ident,
|
|
|
|
impl_generics: &ast::Generics,
|
|
|
|
ty: P<ast::Ty>,
|
2016-04-20 17:02:17 +02:00
|
|
|
span: Span,
|
2016-04-23 17:49:39 +02:00
|
|
|
enum_def: &ast::EnumDef,
|
2016-04-20 14:55:08 +02:00
|
|
|
) -> Result<BinaryExpressions, Error> {
|
|
|
|
let arms: Vec<_> = try!(
|
|
|
|
enum_def.variants.iter()
|
|
|
|
.enumerate()
|
|
|
|
.map(|(variant_index, variant)| {
|
|
|
|
binary_expr_variant(
|
|
|
|
cx,
|
|
|
|
builder,
|
|
|
|
type_ident,
|
|
|
|
impl_generics,
|
|
|
|
ty.clone(),
|
2016-04-20 17:02:17 +02:00
|
|
|
span,
|
2016-04-20 14:55:08 +02:00
|
|
|
variant,
|
|
|
|
variant_index,
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
);
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
let (size_arms, write_arms, mut read_arms) = (
|
2016-04-20 17:02:17 +02:00
|
|
|
arms.iter().map(|x| x.size.clone()).collect::<Vec<ast::Arm>>(),
|
|
|
|
arms.iter().map(|x| x.write.clone()).collect::<Vec<ast::Arm>>(),
|
|
|
|
arms.iter().map(|x| x.read.clone()).collect::<Vec<ast::Arm>>());
|
2016-04-20 14:55:08 +02:00
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
read_arms.push(quote_arm!(cx, _ => { Err(BinaryConvertError) } ));
|
|
|
|
|
2016-04-20 14:55:08 +02:00
|
|
|
Ok(BinaryExpressions {
|
2016-04-20 18:01:53 +02:00
|
|
|
size: quote_expr!(cx, 1usize + match *self { $size_arms }),
|
2016-04-20 17:02:17 +02:00
|
|
|
write: quote_expr!(cx, match *self { $write_arms }; ),
|
2016-04-20 22:02:29 +02:00
|
|
|
read: quote_expr!(cx, match buffer[0] { $read_arms }),
|
2016-04-20 17:02:17 +02:00
|
|
|
})
|
2016-04-20 14:55:08 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
struct BinaryArm {
|
2016-04-20 17:02:17 +02:00
|
|
|
size: ast::Arm,
|
|
|
|
write: ast::Arm,
|
|
|
|
read: ast::Arm,
|
2016-04-20 14:55:08 +02:00
|
|
|
}
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
fn fields_sequence(
|
|
|
|
ext_cx: &ExtCtxt,
|
2016-04-21 22:23:06 +02:00
|
|
|
_ty: &P<ast::Ty>,
|
2016-04-23 17:49:39 +02:00
|
|
|
fields: &[ast::StructField],
|
2016-04-20 22:02:29 +02:00
|
|
|
variant_ident: &ast::Ident,
|
|
|
|
) -> ast::Expr {
|
|
|
|
use syntax::parse::token;
|
|
|
|
use syntax::ast::TokenTree::Token;
|
|
|
|
|
2016-05-16 18:33:32 +02:00
|
|
|
let named_members = fields.iter().any(|f| f.ident.is_some());
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
::quasi::parse_expr_panic(&mut ::syntax::parse::new_parser_from_tts(
|
|
|
|
ext_cx.parse_sess(),
|
|
|
|
ext_cx.cfg(),
|
|
|
|
{
|
|
|
|
let _sp = ext_cx.call_site();
|
|
|
|
let mut tt = ::std::vec::Vec::new();
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(variant_ident.clone())));
|
2016-05-16 18:33:32 +02:00
|
|
|
if named_members {
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Brace)));
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
|
|
|
}
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
for (idx, field) in fields.iter().enumerate() {
|
|
|
|
if field.ident.is_some() {
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(field.ident.clone().unwrap())));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::Colon));
|
|
|
|
}
|
|
|
|
|
2016-05-16 18:33:32 +02:00
|
|
|
// special case for u8, it just takes byte form sequence
|
|
|
|
if ::syntax::print::pprust::ty_to_string(&field.ty) == "u8" {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-05-19 14:36:15 +02:00
|
|
|
// special case for [u8], it just takes a byte sequence
|
|
|
|
if ::syntax::print::pprust::ty_to_string(&field.ty) == "[u8]" {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::DotDot));
|
|
|
|
|
|
|
|
if idx+1 != fields.len() {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
}
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(
|
|
|
|
Token(
|
|
|
|
_sp,
|
|
|
|
token::Ident(ext_cx.ident_of(&replace_qualified(&::syntax::print::pprust::ty_to_string(&field.ty))))
|
|
|
|
));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::ModSep));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::BinOp(token::And)));
|
2016-05-16 18:33:32 +02:00
|
|
|
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::DotDot));
|
2016-04-21 22:23:06 +02:00
|
|
|
|
|
|
|
if idx+1 != fields.len() {
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
2016-04-21 22:23:06 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
|
2016-04-21 22:23:06 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
}
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
2016-04-21 22:03:05 +02:00
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"))));
|
2016-04-21 22:03:05 +02:00
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
}
|
2016-05-16 18:33:32 +02:00
|
|
|
if named_members {
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Brace)));
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
|
|
|
}
|
2016-04-20 22:02:29 +02:00
|
|
|
tt
|
|
|
|
})
|
|
|
|
).unwrap()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn named_fields_sequence(
|
|
|
|
ext_cx: &ExtCtxt,
|
|
|
|
ty: &P<ast::Ty>,
|
2016-04-23 17:49:39 +02:00
|
|
|
fields: &[ast::StructField],
|
2016-04-20 22:02:29 +02:00
|
|
|
) -> ast::Stmt {
|
|
|
|
use syntax::parse::token;
|
|
|
|
use syntax::ast::TokenTree::Token;
|
|
|
|
|
|
|
|
::quasi::parse_stmt_panic(&mut ::syntax::parse::new_parser_from_tts(
|
|
|
|
ext_cx.parse_sess(),
|
|
|
|
ext_cx.cfg(),
|
|
|
|
{
|
|
|
|
let _sp = ext_cx.call_site();
|
|
|
|
let mut tt = ::std::vec::Vec::new();
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("let"))));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("result"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::Eq));
|
|
|
|
|
|
|
|
tt.push(Token(
|
|
|
|
_sp,
|
|
|
|
token::Ident(
|
2016-05-06 00:25:58 +02:00
|
|
|
ext_cx.ident_of(&::syntax::print::pprust::ty_to_string(ty))
|
|
|
|
)));
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Brace)));
|
|
|
|
|
|
|
|
for (idx, field) in fields.iter().enumerate() {
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(field.ident.clone().unwrap())));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::Colon));
|
|
|
|
|
2016-05-16 18:33:32 +02:00
|
|
|
// special case for u8, it just takes byte form sequence
|
|
|
|
if ::syntax::print::pprust::ty_to_string(&field.ty) == "u8" {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-05-19 14:36:15 +02:00
|
|
|
// special case for [u8], it just takes a byte sequence
|
|
|
|
if ::syntax::print::pprust::ty_to_string(&field.ty) == "[u8]" {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::DotDot));
|
|
|
|
|
|
|
|
if idx+1 != fields.len() {
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
}
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("try!"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
|
|
|
tt.push(Token(
|
|
|
|
_sp,
|
|
|
|
token::Ident(
|
2016-05-06 00:25:58 +02:00
|
|
|
ext_cx.ident_of(&replace_qualified(&::syntax::print::pprust::ty_to_string(&field.ty)))
|
|
|
|
)));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::ModSep));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("from_bytes"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Paren)));
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::BinOp(token::And)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("buffer"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
|
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx)))));
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
tt.push(Token(_sp, token::DotDot));
|
2016-04-21 22:23:06 +02:00
|
|
|
if idx + 1 != fields.len() {
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("map"))));
|
2016-04-21 22:23:06 +02:00
|
|
|
tt.push(Token(_sp, token::OpenDelim(token::Bracket)));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of(&format!("{}", idx+1)))));
|
2016-04-21 22:23:06 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
|
|
|
}
|
2016-04-21 22:03:05 +02:00
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Bracket)));
|
2016-04-21 22:03:05 +02:00
|
|
|
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
2016-05-06 00:25:58 +02:00
|
|
|
tt.push(Token(_sp, token::Ident(ext_cx.ident_of("length_stack"))));
|
2016-04-21 22:03:05 +02:00
|
|
|
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Paren)));
|
|
|
|
tt.push(Token(_sp, token::Comma));
|
|
|
|
}
|
|
|
|
|
|
|
|
tt.push(Token(_sp, token::CloseDelim(token::Brace)));
|
|
|
|
tt
|
|
|
|
})
|
|
|
|
).unwrap()
|
|
|
|
}
|
|
|
|
|
2016-04-20 14:55:08 +02:00
|
|
|
fn binary_expr_variant(
|
2016-04-23 17:49:39 +02:00
|
|
|
cx: &ExtCtxt,
|
|
|
|
builder: &aster::AstBuilder,
|
|
|
|
type_ident: Ident,
|
|
|
|
_generics: &ast::Generics,
|
|
|
|
ty: P<ast::Ty>,
|
2016-04-21 22:23:06 +02:00
|
|
|
_span: Span,
|
2016-04-23 17:49:39 +02:00
|
|
|
variant: &ast::Variant,
|
|
|
|
variant_index: usize,
|
2016-04-20 14:55:08 +02:00
|
|
|
) -> Result<BinaryArm, Error> {
|
|
|
|
let variant_ident = variant.node.name;
|
2016-04-20 19:22:24 +02:00
|
|
|
let variant_index_ident = builder.id(format!("{}", variant_index));
|
2016-04-20 14:55:08 +02:00
|
|
|
|
|
|
|
match variant.node.data {
|
2016-04-20 18:01:53 +02:00
|
|
|
ast::VariantData::Unit(_) => {
|
|
|
|
let pat = builder.pat().path()
|
|
|
|
.id(type_ident).id(variant_ident)
|
|
|
|
.build();
|
|
|
|
|
2016-04-20 22:02:29 +02:00
|
|
|
let variant_val = builder.id(format!("{}::{}", type_ident, variant_ident));
|
|
|
|
|
2016-04-20 18:01:53 +02:00
|
|
|
Ok(BinaryArm {
|
|
|
|
size: quote_arm!(cx, $pat => { 0usize } ),
|
2016-04-20 19:22:24 +02:00
|
|
|
write: quote_arm!(cx, $pat => { buffer[0] = $variant_index_ident; Ok(()) } ),
|
2016-04-20 22:02:29 +02:00
|
|
|
read: quote_arm!(cx, $variant_index_ident => { Ok($variant_val) } ),
|
2016-04-20 18:01:53 +02:00
|
|
|
})
|
|
|
|
},
|
2016-04-20 14:55:08 +02:00
|
|
|
ast::VariantData::Tuple(ref fields, _) => {
|
|
|
|
let field_names: Vec<ast::Ident> = (0 .. fields.len())
|
|
|
|
.map(|i| builder.id(format!("__field{}", i)))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let pat = builder.pat().enum_()
|
|
|
|
.id(type_ident).id(variant_ident).build()
|
|
|
|
.with_pats(
|
|
|
|
field_names.iter()
|
|
|
|
.map(|field| builder.pat().ref_id(field))
|
|
|
|
)
|
|
|
|
.build();
|
|
|
|
|
|
|
|
let binary_expr = try!(binary_expr_struct(
|
|
|
|
cx,
|
|
|
|
&builder,
|
|
|
|
ty,
|
|
|
|
fields,
|
|
|
|
None,
|
2016-04-20 22:02:29 +02:00
|
|
|
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
|
2016-04-20 14:55:08 +02:00
|
|
|
));
|
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
|
2016-04-20 14:55:08 +02:00
|
|
|
Ok(BinaryArm {
|
2016-04-20 17:02:17 +02:00
|
|
|
size: quote_arm!(cx, $pat => { $size_expr } ),
|
2016-04-20 19:22:24 +02:00
|
|
|
write: quote_arm!(cx,
|
|
|
|
$pat => {
|
|
|
|
buffer[0] = $variant_index_ident;
|
|
|
|
let buffer = &mut buffer[1..];
|
|
|
|
$write_expr
|
|
|
|
}),
|
2016-04-20 22:02:29 +02:00
|
|
|
read: quote_arm!(cx, $variant_index_ident => { $read_expr } ),
|
2016-04-20 14:55:08 +02:00
|
|
|
})
|
2016-04-24 20:34:19 +02:00
|
|
|
},
|
2016-04-20 14:55:08 +02:00
|
|
|
ast::VariantData::Struct(ref fields, _) => {
|
|
|
|
let field_names: Vec<_> = (0 .. fields.len())
|
|
|
|
.map(|i| builder.id(format!("__field{}", i)))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let pat = builder.pat().struct_()
|
|
|
|
.id(type_ident).id(variant_ident).build()
|
|
|
|
.with_pats(
|
|
|
|
field_names.iter()
|
|
|
|
.zip(fields.iter())
|
2016-04-20 17:02:17 +02:00
|
|
|
.map(|(id, field)|(field.ident.unwrap(), builder.pat().ref_id(id))))
|
2016-04-20 14:55:08 +02:00
|
|
|
.build();
|
|
|
|
|
|
|
|
let binary_expr = try!(binary_expr_struct(
|
|
|
|
cx,
|
|
|
|
&builder,
|
|
|
|
ty,
|
|
|
|
fields,
|
|
|
|
None,
|
2016-04-20 22:02:29 +02:00
|
|
|
Some(builder.id(format!("{}::{}", type_ident, variant_ident))),
|
2016-04-20 14:55:08 +02:00
|
|
|
));
|
|
|
|
|
2016-04-20 17:02:17 +02:00
|
|
|
let (size_expr, write_expr, read_expr) = (binary_expr.size, vec![binary_expr.write], binary_expr.read);
|
2016-04-22 15:37:22 +02:00
|
|
|
|
2016-04-20 14:55:08 +02:00
|
|
|
Ok(BinaryArm {
|
2016-04-20 17:02:17 +02:00
|
|
|
size: quote_arm!(cx, $pat => { $size_expr } ),
|
2016-04-20 19:22:24 +02:00
|
|
|
write: quote_arm!(cx,
|
|
|
|
$pat => {
|
|
|
|
buffer[0] = $variant_index_ident;
|
|
|
|
let buffer = &mut buffer[1..];
|
|
|
|
$write_expr
|
|
|
|
}),
|
2016-05-16 18:33:32 +02:00
|
|
|
read: quote_arm!(cx, $variant_index_ident => { $read_expr } ),
|
2016-04-20 14:55:08 +02:00
|
|
|
})
|
2016-04-20 17:02:17 +02:00
|
|
|
},
|
2016-04-20 14:55:08 +02:00
|
|
|
}
|
|
|
|
}
|