-
Notifications
You must be signed in to change notification settings - Fork 197
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
881 additions
and
1 deletion.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
mod token; | ||
use crate::node::Span; | ||
use logos::Logos; | ||
pub use token::{ | ||
Token, | ||
TokenKind, | ||
}; | ||
|
||
pub struct Lexer<'a> { | ||
inner: logos::Lexer<'a, TokenKind>, | ||
} | ||
|
||
impl<'a> Lexer<'a> { | ||
pub fn new(s: &'a str) -> Lexer { | ||
Lexer { | ||
inner: TokenKind::lexer(s), | ||
} | ||
} | ||
} | ||
|
||
impl<'a> Iterator for Lexer<'a> { | ||
type Item = Token<'a>; | ||
|
||
fn next(&mut self) -> Option<Self::Item> { | ||
let kind = self.inner.next()?; | ||
let text = self.inner.slice(); | ||
let span = self.inner.span(); | ||
|
||
Some(Token { | ||
kind, | ||
text, | ||
span: Span { | ||
start: span.start, | ||
end: span.end, | ||
}, | ||
}) | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
use crate::lexer::{ | ||
Lexer, | ||
TokenKind, | ||
}; | ||
use TokenKind::*; | ||
|
||
fn check(input: &str, expected: &[TokenKind]) { | ||
let lex = Lexer::new(input); | ||
|
||
let actual = lex.map(|t| t.kind).collect::<Vec<_>>(); | ||
|
||
assert!( | ||
actual.iter().eq(expected.iter()), | ||
"\nexpected: {:?}\n actual: {:?}", | ||
expected, | ||
actual | ||
); | ||
} | ||
|
||
#[test] | ||
fn basic() { | ||
// "contract Foo:\n x: u32\n def f() -> u32:\n return self.x\n", | ||
check( | ||
"contract Foo:\n x: u32\n def f() -> u32:", | ||
&[ | ||
Contract, Spaces, Ident, Colon, // contract Foo: | ||
Newlines, Spaces, Ident, Colon, Spaces, UintType, // x: u32 | ||
Newlines, Spaces, Def, Spaces, Ident, ParenOpen, ParenClose, Spaces, Arrow, Spaces, | ||
UintType, Colon, | ||
], | ||
); | ||
} | ||
|
||
#[test] | ||
fn strings() { | ||
let rawstr = r#""string \t with \n escapes \" \"""#; | ||
let mut lex = Lexer::new(rawstr); | ||
let lexedstr = lex.next().unwrap(); | ||
assert!(lexedstr.kind == Text); | ||
assert!(lexedstr.text == rawstr); | ||
assert!(lex.next() == None); | ||
} | ||
|
||
#[test] | ||
fn errors() { | ||
check( | ||
"contract Foo@ 5u8 \n self.bar", | ||
&[ | ||
Contract, Spaces, Ident, Error, Spaces, Int, UintType, Spaces, Newlines, Spaces, | ||
Ident, Dot, Ident, | ||
], | ||
); | ||
} | ||
|
||
#[test] | ||
fn tabs_and_comment() { | ||
check( | ||
"\n\t \tcontract \n \t Foo # hi mom!\n ", | ||
&[ | ||
Newlines, Tabs, Spaces, Tabs, Contract, Spaces, Newlines, Spaces, Tabs, Spaces, | ||
Ident, Spaces, Comment, Newlines, Spaces, | ||
], | ||
); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,196 @@ | ||
use crate::node::Span; | ||
use logos::Logos; | ||
|
||
#[derive(Debug, PartialEq)] | ||
pub struct Token<'a> { | ||
pub kind: TokenKind, | ||
pub text: &'a str, | ||
pub span: Span, | ||
} | ||
|
||
#[derive(Debug, Copy, Clone, PartialEq, Logos)] | ||
pub enum TokenKind { | ||
#[error] | ||
Error, | ||
|
||
#[regex("[a-zA-Z_][a-zA-Z0-9_]*")] | ||
Ident, | ||
// #[token("self")] | ||
// IdentSelf, | ||
// #[token("address")] | ||
// AddressType, | ||
#[regex("i(8|16|24|32|40|48|56|64|72|80|88|96|104|112|120|128|136|144)")] | ||
#[regex("i(152|160|168|176|184|192|200|208|216|224|232|240|248|256)")] | ||
IntType, | ||
#[regex("u(8|16|24|32|40|48|56|64|72|80|88|96|104|112|120|128|136|144)")] | ||
#[regex("u(152|160|168|176|184|192|200|208|216|224|232|240|248|256)")] | ||
UintType, | ||
|
||
#[regex(r"#[^\n]*")] | ||
Comment, | ||
#[regex(r"\n*")] | ||
Newlines, | ||
#[regex(" +")] | ||
Spaces, | ||
#[regex(r"\t+")] | ||
Tabs, | ||
|
||
#[regex("[0-9]+")] | ||
Int, | ||
#[regex("0[xX](?:_?[0-9a-fA-F])")] | ||
Hex, | ||
// Float, | ||
#[regex(r#""([^"\\]|\\.)*""#)] | ||
#[regex(r#"'([^'\\]|\\.)*'"#)] | ||
Text, | ||
#[token("true")] | ||
True, | ||
#[token("false")] | ||
False, | ||
// #[token("None")] // ? | ||
// None, | ||
#[token("assert")] | ||
Assert, | ||
#[token("break")] | ||
Break, | ||
#[token("continue")] | ||
Continue, | ||
#[token("contract")] | ||
Contract, | ||
#[token("def")] | ||
Def, | ||
#[token("const")] | ||
Const, | ||
#[token("elif")] | ||
Elif, | ||
#[token("else")] | ||
Else, | ||
#[token("emit")] | ||
Emit, | ||
#[token("event")] | ||
Event, | ||
#[token("if")] | ||
If, | ||
#[token("import")] | ||
Import, | ||
#[token("pass")] | ||
Pass, | ||
#[token("for")] | ||
For, | ||
#[token("from")] | ||
From, | ||
#[token("pub")] | ||
Pub, | ||
#[token("return")] | ||
Return, | ||
#[token("struct")] | ||
Struct, | ||
#[token("type")] | ||
Type, | ||
#[token("while")] | ||
While, | ||
|
||
#[token("and")] | ||
And, | ||
#[token("as")] | ||
As, | ||
#[token("in")] | ||
In, | ||
#[token("is")] | ||
Is, | ||
#[token("not")] | ||
Not, | ||
#[token("or")] | ||
Or, | ||
// Symbols | ||
#[token("(")] | ||
ParenOpen, | ||
#[token(")")] | ||
ParenClose, | ||
#[token("[")] | ||
BracketOpen, | ||
#[token("]")] | ||
BracketClose, | ||
#[token("{")] | ||
BraceOpen, | ||
#[token("}")] | ||
BraceClose, | ||
#[token(":")] | ||
Colon, | ||
#[token("::")] | ||
ColonColon, | ||
#[token(",")] | ||
Comma, | ||
#[token(";")] | ||
Semi, | ||
#[token("+")] | ||
Plus, | ||
#[token("-")] | ||
Minus, | ||
#[token("*")] | ||
Star, | ||
#[token("/")] | ||
Slash, | ||
#[token("|")] | ||
Pipe, | ||
#[token("&")] | ||
Amper, | ||
#[token("<")] | ||
Lt, | ||
#[token("<<")] | ||
LtLt, | ||
#[token(">")] | ||
Gt, | ||
#[token(">>")] | ||
GtGt, | ||
#[token("=")] | ||
Eq, | ||
#[token(".")] | ||
Dot, | ||
#[token("%")] | ||
Percent, | ||
#[token("==")] | ||
EqEq, | ||
#[token("!=")] | ||
NotEq, | ||
#[token("<=")] | ||
LtEq, | ||
#[token(">=")] | ||
GtEq, | ||
#[token("~")] | ||
Tilde, | ||
#[token("^")] | ||
Hat, | ||
#[token("**")] | ||
StarStar, | ||
#[token("**=")] | ||
StarStarEq, | ||
#[token("+=")] | ||
PlusEq, | ||
#[token("-=")] | ||
MinusEq, | ||
#[token("*=")] | ||
StarEq, | ||
#[token("/=")] | ||
SlashEq, | ||
#[token("%=")] | ||
PercentEq, | ||
#[token("&=")] | ||
AmperEq, | ||
#[token("|=")] | ||
PipeEq, | ||
#[token("^=")] | ||
HatEq, | ||
#[token("<<=")] | ||
LtLtEq, | ||
#[token(">>=")] | ||
GtGtEq, | ||
#[token("//")] | ||
SlashSlash, | ||
#[token("//=")] | ||
SlashSlashEq, | ||
#[token("...")] | ||
Ellipsis, | ||
#[token("->")] | ||
Arrow, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.