Backport Beta 2.2.1 (#9905)
* Bump beta to version 2.2.1 * fix: Intermittent failing CI due to addr in use (#9885) Allow OS to set port at runtime * Use Weak reference in PubSubClient (#9886) * Fix json tracer overflow (#9873) * Fix json tracer overflow * Replace trace_executed with a direct trace push * Remove unused variable * Add test for 5a51 * Remove duplicate json! * Fix docker script (#9854) * Dockerfile: change source path of the newly added check_sync.sh (#9869) * Allow to seal work on latest block (#9876) * Allow to seal work on latest block. * Test from @todr to check sealing conditions. * gitlab-ci: make android release build succeed (#9743) * use docker cargo config file for android builds * make android build succeed * ethcore: use Machine::verify_transaction on parent block (#9900) * ethcore: use Machine::verify_transaction on parent block also fixes off-by-one activation of transaction permission contract * ethcore: clarify call to verify_transaction * foundation: #6692865, ropsten: #4417537, kovan: #9363457 * Remove rust-toolchain file (#9906) * EIP-712 implementation (#9631) * EIP-712 impl * added more tests * removed size parsing unwrap * corrected TYPE_REGEX to disallow zero sized fixed length arrays, replaced LinkedHashSet with IndexSet, added API spec to docs, fixed Type::Byte encoding branch * use Option<u64> instead of u64 for Type::Array::Length * replace `.iter()` with `.values()` Co-Authored-By: seunlanlege <seunlanlege@gmail.com> * tabify eip712.rs * use proper comments for docs * Cargo.lock: revert unrelated changes * tabify encode.rs * EIP 191 (#9701) * added sign_191 rpc method * fixed hash_structured_data return type * added ConfirmationPayload::SignMessage for non-prefixed signatures, added tests for sign191 * renamed WithValidator -> PresignedTransaction * rename applicationData to data in test * adds docs for EIP191Version, renamed SignRequest to EIP191SignRequest * light-fetch: Differentiate between out-of-gas/manual throw and use required gas from response on failure (#9824) * fix start_gas, handle OOG exceptions & NotEnoughGas * Change START_GAS: 50_000 -> 60_000 * When the `OutOfGas exception` is received then try to double the gas until it succeeds or block gas limit is reached * When `NotEnoughBasGas error` is received then use the required gas provided in the response * fix(light-fetch): ensure block_gas_limit is tried Try the `block_gas_limit` before regard the execution as an error * Update rpc/src/v1/helpers/light_fetch.rs Co-Authored-By: niklasad1 <niklasadolfsson1@gmail.com> * simplify cargo audit * Use block header for building finality (#9914) * ci: nuke the gitlab caches (#9855)
This commit is contained in:
172
util/EIP-712/src/parser.rs
Normal file
172
util/EIP-712/src/parser.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Solidity type-name parsing
|
||||
use lunarity_lexer::{Lexer, Token};
|
||||
use error::*;
|
||||
use toolshed::Arena;
|
||||
use std::{fmt, result};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Type {
|
||||
Address,
|
||||
Uint,
|
||||
Int,
|
||||
String,
|
||||
Bool,
|
||||
Bytes,
|
||||
Byte(u8),
|
||||
Custom(String),
|
||||
Array {
|
||||
length: Option<u64>,
|
||||
inner: Box<Type>
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Type> for String {
|
||||
fn from(field_type: Type) -> String {
|
||||
match field_type {
|
||||
Type::Address => "address".into(),
|
||||
Type::Uint => "uint".into(),
|
||||
Type::Int => "int".into(),
|
||||
Type::String => "string".into(),
|
||||
Type::Bool => "bool".into(),
|
||||
Type::Bytes => "bytes".into(),
|
||||
Type::Byte(len) => format!("bytes{}", len),
|
||||
Type::Custom(custom) => custom,
|
||||
Type::Array {
|
||||
inner,
|
||||
length
|
||||
} => {
|
||||
let inner: String = (*inner).into();
|
||||
match length {
|
||||
None => format!("{}[]", inner),
|
||||
Some(length) => format!("{}[{}]", inner, length)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Type {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
|
||||
let item: String = self.clone().into();
|
||||
write!(f, "{}", item)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Parser {
|
||||
arena: Arena,
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub fn new() -> Self {
|
||||
Parser {
|
||||
arena: Arena::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// the type string is being validated before it's parsed.
|
||||
pub fn parse_type(&self, field_type: &str) -> Result<Type> {
|
||||
#[derive(PartialEq)]
|
||||
enum State { Open, Close }
|
||||
|
||||
let mut lexer = Lexer::new(&self.arena, field_type);
|
||||
let mut token = None;
|
||||
let mut state = State::Close;
|
||||
let mut array_depth = 0;
|
||||
let mut current_array_length: Option<u64> = None;
|
||||
|
||||
while lexer.token != Token::EndOfProgram {
|
||||
let type_ = match lexer.token {
|
||||
Token::Identifier => Type::Custom(lexer.token_as_str().to_owned()),
|
||||
Token::TypeByte => Type::Byte(lexer.type_size.0),
|
||||
Token::TypeBytes => Type::Bytes,
|
||||
Token::TypeBool => Type::Bool,
|
||||
Token::TypeUint => Type::Uint,
|
||||
Token::TypeInt => Type::Int,
|
||||
Token::TypeString => Type::String,
|
||||
Token::TypeAddress => Type::Address,
|
||||
Token::LiteralInteger => {
|
||||
let length = lexer.token_as_str();
|
||||
current_array_length = Some(length
|
||||
.parse()
|
||||
.map_err(|_|
|
||||
ErrorKind::InvalidArraySize(length.into())
|
||||
)?
|
||||
);
|
||||
lexer.consume();
|
||||
continue;
|
||||
},
|
||||
Token::BracketOpen if token.is_some() && state == State::Close => {
|
||||
state = State::Open;
|
||||
lexer.consume();
|
||||
continue
|
||||
}
|
||||
Token::BracketClose if array_depth < 10 => {
|
||||
if state == State::Open && token.is_some() {
|
||||
let length = current_array_length.take();
|
||||
state = State::Close;
|
||||
token = Some(Type::Array {
|
||||
inner: Box::new(token.expect("if statement checks for some; qed")),
|
||||
length
|
||||
});
|
||||
lexer.consume();
|
||||
array_depth += 1;
|
||||
continue
|
||||
} else {
|
||||
return Err(ErrorKind::UnexpectedToken(lexer.token_as_str().to_owned(), field_type.to_owned()))?
|
||||
}
|
||||
}
|
||||
Token::BracketClose if array_depth == 10 => {
|
||||
return Err(ErrorKind::UnsupportedArrayDepth)?
|
||||
}
|
||||
_ => return Err(ErrorKind::UnexpectedToken(lexer.token_as_str().to_owned(), field_type.to_owned()))?
|
||||
};
|
||||
|
||||
token = Some(type_);
|
||||
lexer.consume();
|
||||
}
|
||||
|
||||
Ok(token.ok_or_else(|| ErrorKind::NonExistentType)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parser() {
|
||||
let parser = Parser::new();
|
||||
let source = "byte[][][7][][][][][][][]";
|
||||
parser.parse_type(source).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_array() {
|
||||
let parser = Parser::new();
|
||||
let source = "byte[][][7][][][][][][][][]";
|
||||
assert_eq!(parser.parse_type(source).is_err(), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_malformed_array_type() {
|
||||
let parser = Parser::new();
|
||||
let source = "byte[7[]uint][]";
|
||||
assert_eq!(parser.parse_type(source).is_err(), true)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user