2020-09-22 14:53:52 +02:00
|
|
|
// Copyright 2015-2020 Parity Technologies (UK) Ltd.
|
|
|
|
// This file is part of OpenEthereum.
|
2018-10-30 22:12:42 +01:00
|
|
|
|
2020-09-22 14:53:52 +02:00
|
|
|
// OpenEthereum is free software: you can redistribute it and/or modify
|
2018-10-30 22:12:42 +01:00
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
2020-09-22 14:53:52 +02:00
|
|
|
// OpenEthereum is distributed in the hope that it will be useful,
|
2018-10-30 22:12:42 +01:00
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
2020-09-22 14:53:52 +02:00
|
|
|
// along with OpenEthereum. If not, see <http://www.gnu.org/licenses/>.
|
2018-10-30 22:12:42 +01:00
|
|
|
|
|
|
|
//! Solidity type-name parsing
|
2018-12-28 10:36:55 +01:00
|
|
|
use crate::error::*;
|
2020-08-05 06:08:03 +02:00
|
|
|
use lunarity_lexer::{Lexer, Token};
|
2018-10-30 22:12:42 +01:00
|
|
|
use std::{fmt, result};
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub enum Type {
|
2020-08-05 06:08:03 +02:00
|
|
|
Address,
|
|
|
|
Uint,
|
|
|
|
Int,
|
|
|
|
String,
|
|
|
|
Bool,
|
|
|
|
Bytes,
|
|
|
|
Byte(u8),
|
|
|
|
Custom(String),
|
|
|
|
Array {
|
|
|
|
length: Option<u64>,
|
|
|
|
inner: Box<Type>,
|
|
|
|
},
|
2018-10-30 22:12:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Type> for String {
|
2020-08-05 06:08:03 +02:00
|
|
|
fn from(field_type: Type) -> String {
|
|
|
|
match field_type {
|
|
|
|
Type::Address => "address".into(),
|
|
|
|
Type::Uint => "uint".into(),
|
|
|
|
Type::Int => "int".into(),
|
|
|
|
Type::String => "string".into(),
|
|
|
|
Type::Bool => "bool".into(),
|
|
|
|
Type::Bytes => "bytes".into(),
|
|
|
|
Type::Byte(len) => format!("bytes{}", len),
|
|
|
|
Type::Custom(custom) => custom,
|
|
|
|
Type::Array { inner, length } => {
|
|
|
|
let inner: String = (*inner).into();
|
|
|
|
match length {
|
|
|
|
None => format!("{}[]", inner),
|
|
|
|
Some(length) => format!("{}[{}]", inner, length),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-10-30 22:12:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Display for Type {
|
2020-08-05 06:08:03 +02:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> {
|
|
|
|
let item: String = self.clone().into();
|
|
|
|
write!(f, "{}", item)
|
|
|
|
}
|
2018-10-30 22:12:42 +01:00
|
|
|
}
|
|
|
|
|
2019-11-11 21:57:38 +01:00
|
|
|
/// the type string is being validated before it's parsed.
|
|
|
|
pub fn parse_type(field_type: &str) -> Result<Type> {
|
2020-08-05 06:08:03 +02:00
|
|
|
#[derive(PartialEq)]
|
|
|
|
enum State {
|
|
|
|
Open,
|
|
|
|
Close,
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut lexer = Lexer::new(field_type);
|
|
|
|
let mut token = None;
|
|
|
|
let mut state = State::Close;
|
|
|
|
let mut array_depth = 0;
|
|
|
|
let mut current_array_length: Option<u64> = None;
|
|
|
|
|
|
|
|
while lexer.token != Token::EndOfProgram {
|
|
|
|
let type_ = match lexer.token {
|
|
|
|
Token::Identifier => Type::Custom(lexer.slice().to_owned()),
|
|
|
|
Token::TypeByte => Type::Byte(lexer.extras.0),
|
|
|
|
Token::TypeBytes => Type::Bytes,
|
|
|
|
Token::TypeBool => Type::Bool,
|
|
|
|
Token::TypeUint => Type::Uint,
|
|
|
|
Token::TypeInt => Type::Int,
|
|
|
|
Token::TypeString => Type::String,
|
|
|
|
Token::TypeAddress => Type::Address,
|
|
|
|
Token::LiteralInteger => {
|
|
|
|
let length = lexer.slice();
|
|
|
|
current_array_length = Some(
|
|
|
|
length
|
|
|
|
.parse()
|
|
|
|
.map_err(|_| ErrorKind::InvalidArraySize(length.into()))?,
|
|
|
|
);
|
|
|
|
lexer.advance();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Token::BracketOpen if token.is_some() && state == State::Close => {
|
|
|
|
state = State::Open;
|
|
|
|
lexer.advance();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
Token::BracketClose if array_depth < 10 => {
|
|
|
|
if state == State::Open && token.is_some() {
|
|
|
|
let length = current_array_length.take();
|
|
|
|
state = State::Close;
|
|
|
|
token = Some(Type::Array {
|
|
|
|
inner: Box::new(token.expect("if statement checks for some; qed")),
|
|
|
|
length,
|
|
|
|
});
|
|
|
|
lexer.advance();
|
|
|
|
array_depth += 1;
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
return Err(ErrorKind::UnexpectedToken(
|
|
|
|
lexer.slice().to_owned(),
|
|
|
|
field_type.to_owned(),
|
|
|
|
))?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Token::BracketClose if array_depth == 10 => {
|
|
|
|
return Err(ErrorKind::UnsupportedArrayDepth)?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(ErrorKind::UnexpectedToken(
|
|
|
|
lexer.slice().to_owned(),
|
|
|
|
field_type.to_owned(),
|
|
|
|
))?
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
token = Some(type_);
|
|
|
|
lexer.advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(token.ok_or(ErrorKind::NonExistentType)?)
|
2018-10-30 22:12:42 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2020-08-05 06:08:03 +02:00
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_parser() {
|
|
|
|
let source = "byte[][][7][][][][][][][]";
|
|
|
|
parse_type(source).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_nested_array() {
|
|
|
|
let source = "byte[][][7][][][][][][][][]";
|
|
|
|
assert_eq!(parse_type(source).is_err(), true);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_malformed_array_type() {
|
|
|
|
let source = "byte[7[]uint][]";
|
|
|
|
assert_eq!(parse_type(source).is_err(), true)
|
|
|
|
}
|
2018-10-30 22:12:42 +01:00
|
|
|
}
|