Skip to content

Commit

Permalink
Merge 96dcf9c into eb020d7
Browse files Browse the repository at this point in the history
  • Loading branch information
HalidOdat authored Apr 21, 2020
2 parents eb020d7 + 96dcf9c commit 1b2481c
Show file tree
Hide file tree
Showing 7 changed files with 221 additions and 90 deletions.
6 changes: 3 additions & 3 deletions boa/src/builtins/function/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ fn check_arguments_object() {
"#;

eprintln!("{}", forward(&mut engine, init));
let expected_return_val: f64 = 100.0;
let expected_return_val = 100;
let return_val = forward_val(&mut engine, "val").expect("value expected");
assert_eq!(return_val.is_double(), true);
assert_eq!(return_val.is_integer(), true);
assert_eq!(
from_value::<f64>(return_val).expect("Could not convert value to f64"),
from_value::<i32>(return_val).expect("Could not convert value to i32"),
expected_return_val
);
}
8 changes: 8 additions & 0 deletions boa/src/builtins/value/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,14 @@ impl ValueData {
}
}

/// Returns true if the value is integer
pub fn is_integer(&self) -> bool {
match *self {
ValueData::Integer(_) => true,
_ => false,
}
}

/// Returns true if the value is a number
pub fn is_num(&self) -> bool {
self.is_double()
Expand Down
16 changes: 14 additions & 2 deletions boa/src/syntax/ast/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,17 @@ impl Debug for VecToken {
}
}

/// Represents the type differenct types of numeric literals.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum NumericLiteral {
/// A floating point number
Number(f64),
/// A integer
Integer(u64),
// TODO: Add BigInt
}

/// Represents the type of Token and the data it has inside.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, PartialEq, Debug)]
Expand All @@ -57,7 +68,7 @@ pub enum TokenKind {
/// A `null` literal
NullLiteral,
/// A numeric literal
NumericLiteral(f64),
NumericLiteral(NumericLiteral),
/// A piece of punctuation
Punctuator(Punctuator),
/// A string literal
Expand All @@ -76,7 +87,8 @@ impl Display for TokenKind {
TokenKind::Identifier(ref ident) => write!(f, "{}", ident),
TokenKind::Keyword(ref word) => write!(f, "{}", word),
TokenKind::NullLiteral => write!(f, "null"),
TokenKind::NumericLiteral(ref num) => write!(f, "{}", num),
TokenKind::NumericLiteral(NumericLiteral::Number(num)) => write!(f, "{}", num),
TokenKind::NumericLiteral(NumericLiteral::Integer(num)) => write!(f, "{}", num),
TokenKind::Punctuator(ref punc) => write!(f, "{}", punc),
TokenKind::StringLiteral(ref lit) => write!(f, "{}", lit),
TokenKind::RegularExpressionLiteral(ref body, ref flags) => {
Expand Down
37 changes: 26 additions & 11 deletions boa/src/syntax/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ mod tests;

use crate::syntax::ast::{
punc::Punctuator,
token::{Token, TokenKind},
token::{NumericLiteral, Token, TokenKind},
};
use std::{
char::{decode_utf16, from_u32},
Expand Down Expand Up @@ -368,17 +368,17 @@ impl<'a> Lexer<'a> {

let num = match self.preview_next() {
None => {
self.push_token(TokenKind::NumericLiteral(0_f64));
self.push_token(TokenKind::NumericLiteral(NumericLiteral::Integer(0)));
return Ok(());
}
Some('x') | Some('X') => {
self.read_integer_in_base(16, buf)? as f64
NumericLiteral::Integer(self.read_integer_in_base(16, buf)?)
}
Some('o') | Some('O') => {
self.read_integer_in_base(8, buf)? as f64
NumericLiteral::Integer(self.read_integer_in_base(8, buf)?)
}
Some('b') | Some('B') => {
self.read_integer_in_base(2, buf)? as f64
NumericLiteral::Integer(self.read_integer_in_base(2, buf)?)
}
Some(ch) if (ch.is_ascii_digit() || ch == '.') => {
// LEGACY OCTAL (ONLY FOR NON-STRICT MODE)
Expand All @@ -399,16 +399,18 @@ impl<'a> Lexer<'a> {
}
}
}
dbg!(&gone_decimal);
if gone_decimal {
f64::from_str(&buf).map_err(|_e| LexerError::new("Could not convert value to f64"))?
NumericLiteral::Number(f64::from_str(&buf).map_err(|_e| LexerError::new("Could not convert value to f64"))?)
} else if buf.is_empty() {
0.0
NumericLiteral::Integer(0)
} else {
(u64::from_str_radix(&buf, 8).map_err(|_e| LexerError::new("Could not convert value to u64"))?) as f64
NumericLiteral::Integer(
u64::from_str_radix(&buf, 8).map_err(|_e| {dbg!(&_e); LexerError::new("Could not convert value to u64")})?)
}
}
Some(_) => {
0.0
NumericLiteral::Integer(0)
}
};

Expand All @@ -420,10 +422,12 @@ impl<'a> Lexer<'a> {
};
}
_ if ch.is_digit(10) => {
let mut is_floating_point = false;
let mut buf = ch.to_string();
'digitloop: while let Some(ch) = self.preview_next() {
match ch {
'.' => loop {
is_floating_point = true;
buf.push(self.next());

let c = match self.preview_next() {
Expand All @@ -450,6 +454,7 @@ impl<'a> Lexer<'a> {
}
},
'e' | 'E' => {
is_floating_point = true;
match self.preview_multiple_next(2).unwrap_or_default().to_digit(10) {
Some(0..=9) | None => {
buf.push(self.next());
Expand All @@ -469,9 +474,19 @@ impl<'a> Lexer<'a> {
_ => break,
}
}
// TODO make this a bit more safe -------------------------------VVVV

self.push_token(TokenKind::NumericLiteral(
f64::from_str(&buf).map_err(|_| LexerError::new("Could not convert value to f64"))?,
if is_floating_point {
// TODO make this a bit more safe -------------------------------VVVV
NumericLiteral::Number(
f64::from_str(&buf).map_err(|_| LexerError::new("Could not convert value to f64"))?
)
} else {
NumericLiteral::Integer(
u64::from_str(&buf).map_err(|_e| {dbg!(&_e); dbg!(&is_floating_point); LexerError::new("Could not convert value to u64")})?
)

}
))
}
_ if ch.is_alphabetic() || ch == '$' || ch == '_' => {
Expand Down
152 changes: 121 additions & 31 deletions boa/src/syntax/lexer/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -385,24 +385,78 @@ fn numbers() {
);

lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::NumericLiteral(2.0));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(52.0));
assert_eq!(lexer.tokens[3].kind, TokenKind::NumericLiteral(46.0));
assert_eq!(lexer.tokens[4].kind, TokenKind::NumericLiteral(7.89));
assert_eq!(lexer.tokens[5].kind, TokenKind::NumericLiteral(42.0));
assert_eq!(lexer.tokens[6].kind, TokenKind::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[7].kind, TokenKind::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[8].kind, TokenKind::NumericLiteral(0.005));
assert_eq!(lexer.tokens[9].kind, TokenKind::NumericLiteral(2.0));
assert_eq!(lexer.tokens[10].kind, TokenKind::NumericLiteral(83.0));
assert_eq!(lexer.tokens[11].kind, TokenKind::NumericLiteral(999.0));
assert_eq!(lexer.tokens[12].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[13].kind, TokenKind::NumericLiteral(0.1));
assert_eq!(lexer.tokens[14].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[15].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[16].kind, TokenKind::NumericLiteral(0.0));
assert_eq!(lexer.tokens[17].kind, TokenKind::NumericLiteral(0.12));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
assert_eq!(
lexer.tokens[1].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(2))
);
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(52))
);
assert_eq!(
lexer.tokens[3].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(46))
);
assert_eq!(
lexer.tokens[4].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(7.89))
);
assert_eq!(
lexer.tokens[5].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(42.0))
);
assert_eq!(
lexer.tokens[6].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(5000.0))
);
assert_eq!(
lexer.tokens[7].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(5000.0))
);
assert_eq!(
lexer.tokens[8].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(0.005))
);
assert_eq!(
lexer.tokens[9].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(2))
);
assert_eq!(
lexer.tokens[10].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(83))
);
assert_eq!(
lexer.tokens[11].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(999.0))
);
assert_eq!(
lexer.tokens[12].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(10.0))
);
assert_eq!(
lexer.tokens[13].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(0.1))
);
assert_eq!(
lexer.tokens[14].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(10.0))
);
assert_eq!(
lexer.tokens[15].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(10.0))
);
assert_eq!(
lexer.tokens[16].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(0.0))
);
assert_eq!(
lexer.tokens[17].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(0.12))
);
}

#[test]
Expand All @@ -415,7 +469,10 @@ fn test_single_number_without_semicolon() {
fn test_number_followed_by_dot() {
let mut lexer = Lexer::new("1..");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(1.0))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Dot));
}

Expand Down Expand Up @@ -443,55 +500,88 @@ fn test_regex_literal_flags() {
fn test_addition_no_spaces() {
let mut lexer = Lexer::new("1+1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
}

#[test]
fn test_addition_no_spaces_left_side() {
let mut lexer = Lexer::new("1+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
}

#[test]
fn test_addition_no_spaces_right_side() {
let mut lexer = Lexer::new("1 +1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
}

#[test]
fn test_addition_no_spaces_e_number_left_side() {
let mut lexer = Lexer::new("1e2+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(100.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(100.0))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
}

#[test]
fn test_addition_no_spaces_e_number_right_side() {
let mut lexer = Lexer::new("1 +1e3");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Integer(1))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1000.0));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(1000.0))
);
}

#[test]
fn test_addition_no_spaces_e_number() {
let mut lexer = Lexer::new("1e3+1e11");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1000.0));
assert_eq!(
lexer.tokens[0].kind,
TokenKind::NumericLiteral(NumericLiteral::Number(1000.0))
);
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(
lexer.tokens[2].kind,
TokenKind::NumericLiteral(100_000_000_000.0)
TokenKind::NumericLiteral(NumericLiteral::Number(100_000_000_000.0))
);
}
Loading

0 comments on commit 1b2481c

Please sign in to comment.