From 724fcc29118d779cefde5e0a263ad74d67c4bfae Mon Sep 17 00:00:00 2001 From: Ben Date: Tue, 15 Oct 2024 18:00:05 +0100 Subject: [PATCH] More fixes - More errors (still non-final) - Change variant name of `default` TS function for `.d.ts` file - Refactor some lexer APIs (add `parse_comment_literal`) - Get parser tests passing - Remove EMPTY from `bracketed_items_from_reader` --- checker/src/synthesis/declarations.rs | 5 +- checker/src/synthesis/hoisting.rs | 2 +- parser/src/comments.rs | 9 +- .../src/declarations/classes/class_member.rs | 9 +- parser/src/declarations/classes/mod.rs | 6 +- parser/src/declarations/export.rs | 50 ++--- parser/src/declarations/import.rs | 3 +- parser/src/declarations/mod.rs | 116 ++++------- parser/src/errors.rs | 28 ++- parser/src/expressions/mod.rs | 191 +++--------------- parser/src/expressions/object_literal.rs | 24 +-- parser/src/expressions/template_literal.rs | 6 +- parser/src/extensions/jsx.rs | 24 ++- parser/src/functions/mod.rs | 63 ++++-- parser/src/lexer.rs | 130 +++++++++--- parser/src/lib.rs | 14 -- parser/src/modules.rs | 2 +- parser/src/property_key.rs | 4 +- parser/src/statements/if_statement.rs | 6 +- parser/src/statements/mod.rs | 6 +- parser/src/statements/switch_statement.rs | 7 +- parser/src/types/enum_declaration.rs | 9 +- parser/src/types/interface.rs | 10 +- parser/src/types/type_annotations.rs | 16 +- parser/src/variable_fields.rs | 17 +- parser/tests/statements_and_declarations.rs | 15 +- parser/tests/type_annotations.rs | 2 +- 27 files changed, 335 insertions(+), 439 deletions(-) diff --git a/checker/src/synthesis/declarations.rs b/checker/src/synthesis/declarations.rs index 827e375c..df4f4891 100644 --- a/checker/src/synthesis/declarations.rs +++ b/checker/src/synthesis/declarations.rs @@ -128,7 +128,10 @@ pub(crate) fn synthesise_declaration( ); } } - parser::declarations::ExportDeclaration::DefaultFunction { position, .. } => { + parser::declarations::ExportDeclaration::TSDefaultFunctionDeclaration { + position, + .. + } => { checking_data.diagnostics_container.add_error( TypeCheckError::FunctionWithoutBodyNotAllowedHere { position: position.with_source(environment.get_source()), diff --git a/checker/src/synthesis/hoisting.rs b/checker/src/synthesis/hoisting.rs index 4f9a2ea3..19975f43 100644 --- a/checker/src/synthesis/hoisting.rs +++ b/checker/src/synthesis/hoisting.rs @@ -403,7 +403,7 @@ pub(crate) fn hoist_statements( ); } Declaration::Export(Decorated { - on: ExportDeclaration::DefaultFunction { position, .. }, + on: ExportDeclaration::TSDefaultFunctionDeclaration { position, .. }, .. }) => { // TODO under definition file diff --git a/parser/src/comments.rs b/parser/src/comments.rs index 797c14a7..b3d5ad51 100644 --- a/parser/src/comments.rs +++ b/parser/src/comments.rs @@ -99,17 +99,16 @@ impl ASTNode for WithComment { fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { let start = reader.get_start(); if reader.is_operator_advance("/*") { - let comment = reader.parse_until("*/").expect("TODO"); - dbg!(&comment); + let comment = reader.parse_comment_literal(true)?.to_owned(); let item = T::from_reader(reader)?; let position = start.union(item.get_position()); - Ok(Self::PrefixComment(comment.to_owned(), item, position)) + Ok(Self::PrefixComment(comment, item, position)) } else { let item = T::from_reader(reader)?; if reader.is_operator_advance("/*") { - let comment = reader.parse_until("*/").expect("TODO"); + let comment = reader.parse_comment_literal(true)?.to_owned(); let position = start.union(reader.get_end()); - Ok(Self::PostfixComment(item, comment.to_owned(), position)) + Ok(Self::PostfixComment(item, comment, position)) } else { Ok(Self::None(item)) } diff --git a/parser/src/declarations/classes/class_member.rs b/parser/src/declarations/classes/class_member.rs index 8bf65fc2..d40c095d 100644 --- a/parser/src/declarations/classes/class_member.rs +++ b/parser/src/declarations/classes/class_member.rs @@ -71,7 +71,7 @@ impl ASTNode for ClassMember { todo!("comment; return Ok(Self::Comment(comment, is_multiline, span));"); } - // TODO temp fixes + // TODO temp fixes. Should be recorded let _ = reader.is_keyword_advance("declare"); let _ = reader.is_keyword_advance("public"); let _ = reader.is_keyword_advance("private"); @@ -130,8 +130,11 @@ impl ASTNode for ClassMember { Ok(ClassMember::Method(is_static, function)) } else { if !header.is_no_modifiers() { - todo!() - // return crate::throw_unexpected_token(reader, &[TSXToken::OpenParentheses]); + // TODO "" + return Err(crate::ParseError::new( + crate::ParseErrors::ExpectedOperator { expected: "(", found: "" }, + reader.next_item_span(), + )); } let is_optional = reader.is_operator_advance("?:"); let type_annotation = if is_optional || reader.is_operator_advance(":") { diff --git a/parser/src/declarations/classes/mod.rs b/parser/src/declarations/classes/mod.rs index 1565068a..5c43a726 100644 --- a/parser/src/declarations/classes/mod.rs +++ b/parser/src/declarations/classes/mod.rs @@ -73,10 +73,10 @@ impl AST break; } let value = Decorated::::from_reader(reader)?; + if let ClassMember::Property { .. } | ClassMember::Indexer { .. } = &value.on { + reader.expect_semi_colon()?; + } members.push(value); - - // TODO comments probably don't need this - reader.expect_semi_colon()?; } let end = reader.expect('}')?; diff --git a/parser/src/declarations/export.rs b/parser/src/declarations/export.rs index 03dd68de..62fbabd8 100644 --- a/parser/src/declarations/export.rs +++ b/parser/src/declarations/export.rs @@ -17,16 +17,12 @@ use visitable_derive::Visitable; #[derive(Debug, PartialEq, Clone, Visitable, get_field_by_type::GetFieldByType)] #[get_field_by_type_target(Span)] pub enum ExportDeclaration { - Item { - exported: Exportable, - position: Span, - }, - // `export default ...` - Default { - expression: Box, - position: Span, - }, - DefaultFunction { + /// `export *Exportable*` + Item { exported: Exportable, position: Span }, + /// `export default ...` + Default { expression: Box, position: Span }, + /// In TypeScript you can `export default name` in type definition modules + TSDefaultFunctionDeclaration { /// Technically not allowed in TypeScript is_async: bool, identifier: Option, @@ -75,6 +71,8 @@ impl ASTNode for ExportDeclaration { if reader.get_options().type_definition_module && crate::lexer::utilities::is_function_header(reader.get_current()) { + // Always have == .d.ts file here + // Unfortuantly have to do quite a bit of parsing here let is_async = reader.is_operator_advance("async"); let _ = reader.expect_keyword("function"); @@ -93,8 +91,7 @@ impl ASTNode for ExportDeclaration { }; let position = start.union(reader.get_end()); - - Ok(ExportDeclaration::DefaultFunction { + Ok(ExportDeclaration::TSDefaultFunctionDeclaration { position, is_async, identifier, @@ -223,28 +220,11 @@ impl ASTNode for ExportDeclaration { return Ok(Self::Item { exported: Exportable::Namespace(namespace), position }); } - todo!("{:?}", reader.get_current().get(..20)) - // } - // Token(TSXToken::Keyword(kw), _) if kw.is_in_function_header() => { - // let function_declaration = StatementFunction::from_reader(reader)?; - // let position = start.union(function_declaration.get_position()); - // Ok(Self::Variable { - // exported: Exportable::Function(function_declaration), - // position, - // }) - // } - // _ => throw_unexpected_token( - // reader, - // &[ - // TSXToken::Keyword(TSXKeyword::Class), - // TSXToken::Keyword(TSXKeyword::Function), - // TSXToken::Keyword(TSXKeyword::Const), - // TSXToken::Keyword(TSXKeyword::Let), - // TSXToken::Keyword(TSXKeyword::Interface), - // TSXToken::Keyword(TSXKeyword::Type), - // TSXToken::OpenBrace, - // ], - // ), + // TODO vary list on certain parameters + Err(crate::lexer::utilities::expected_one_of_keywords( + reader, + &["let", "const", "function", "class", "enum", "type", "interface", "{"], + )) } } @@ -317,7 +297,7 @@ impl ASTNode for ExportDeclaration { buf.push_str("export default "); expression.to_string_from_buffer(buf, options, local); } - ExportDeclaration::DefaultFunction { + ExportDeclaration::TSDefaultFunctionDeclaration { is_async, identifier, parameters, diff --git a/parser/src/declarations/import.rs b/parser/src/declarations/import.rs index 7a138b9b..bdd9a603 100644 --- a/parser/src/declarations/import.rs +++ b/parser/src/declarations/import.rs @@ -218,8 +218,7 @@ pub(crate) fn import_specifier_and_parts_from_reader( } else if reader.starts_with_string_delimeter() || reader.is_keyword("from") { ImportedItems::Parts(None) } else { - todo!("error for: {:?}", reader.get_current().get(..20).unwrap_or(reader.get_current())) - // return throw_unexpected_token(reader, &[TSXToken::Multiply, TSXToken::OpenBrace]); + return Err(crate::lexer::utilities::expected_one_of_keywords(reader, &["*", "["])); }; Ok(PartsResult { diff --git a/parser/src/declarations/mod.rs b/parser/src/declarations/mod.rs index 52551a79..14e73a80 100644 --- a/parser/src/declarations/mod.rs +++ b/parser/src/declarations/mod.rs @@ -65,11 +65,12 @@ pub enum Declaration { impl Declaration { // TODO strict mode can affect result + // TODO reuse + // Warning expects skip to have been called pub(crate) fn is_declaration_start(reader: &crate::new::Lexer) -> bool { - let declaration_keyword = reader.is_one_of_keywords(&[ + let mut declaration_keyword = reader.is_one_of_keywords(&[ "let", "const", - "function", "class", "enum", "interface", @@ -78,76 +79,30 @@ impl Declaration { "declare", "import", "export", + // Extra "from", ]); - if let Some(name @ ("from" | "import" | "export" | "namespace" | "type")) = + if let Some("from") = declaration_keyword { + reader.get_options().reversed_imports + } else if let Some(name @ ("import" | "export" | "namespace" | "type")) = declaration_keyword { - if let (false, "from") = (reader.get_options().reversed_imports, name) { - return false; - } - let after_declaration_keyword = reader.get_current()[name.len()..].trim_start(); + // TODO more (is operator like?) let is_declaration_keyword_expression = after_declaration_keyword.starts_with('(') || after_declaration_keyword.starts_with('.') || after_declaration_keyword.starts_with('[') || after_declaration_keyword.starts_with('(') || after_declaration_keyword.starts_with('='); + !is_declaration_keyword_expression + } else if declaration_keyword.is_some() { + true } else { - declaration_keyword.is_some() + crate::lexer::utilities::is_function_header(reader.get_current()) } - - // #[cfg(feature = "extras")] - // return result - // || matches!(token, TSXToken::Keyword(kw) if options.custom_function_headers && kw.is_special_function_header()) - // || (matches!(token, TSXToken::Keyword(TSXKeyword::Namespace) if cfg!(feature = "full-typescript"))) - // || { - // let TSXToken::Keyword(token) = *token else { return false }; - // let Some(Token(after, _)) = reader.peek_n(1) else { return false }; - - // #[allow(clippy::match_same_arms)] - // match (token, after) { - // // For dynamic import - // ( - // TSXKeyword::Import, - // TSXToken::OpenBrace - // | TSXToken::Keyword(..) - // | TSXToken::Identifier(..) - // | TSXToken::StringLiteral(..) - // | TSXToken::Multiply, - // ) => true, - // (TSXKeyword::Declare | TSXKeyword::Interface, _) => options.type_annotations, - // (TSXKeyword::Async, TSXToken::Keyword(TSXKeyword::Function)) => true, - // (TSXKeyword::Async, TSXToken::Keyword(kw)) => { - // options.custom_function_headers && kw.is_special_function_header() - // } - // // Extra - // (TSXKeyword::From, TSXToken::StringLiteral(..)) => true, - // (..) => false, - // } - // }; - - // #[cfg(not(feature = "extras"))] - // return result || { - // let TSXToken::Keyword(token) = *token else { return false }; - - // // For dynamic import - // matches!(token, TSXKeyword::Import) - // && matches!( - // reader.peek_n(1), - // Some(Token( - // TSXToken::OpenBrace - // | TSXToken::Keyword(..) | TSXToken::Identifier(..) - // | TSXToken::StringLiteral(..) - // | TSXToken::Multiply, - // _ - // )) - // ) - // }; - // } } } @@ -230,8 +185,7 @@ impl crate::ASTNode for Declaration { &["let", "const", "var", "class", "type", "async", "function", "namespace"], )) } - } else if reader.is_keyword("function") || reader.is_keyword("async") { - // TODO more above ^^^ + } else if crate::lexer::utilities::is_function_header(reader.get_current()) { let function = StatementFunction::from_reader(reader)?; Ok(Declaration::Function(Decorated::new(decorators, function))) } else { @@ -253,7 +207,7 @@ impl crate::ASTNode for Declaration { // Ok(Declaration::Function(Decorated::new(decorators, function))) // } - // TODO vary list on certain paramters + // TODO vary list on certain parameters Err(crate::lexer::utilities::expected_one_of_keywords( reader, &[ @@ -355,8 +309,14 @@ impl crate::ASTNode for ImportExportPart { let name = crate::VariableIdentifier::Standard(name, position); Ok(Self { just_type, name, alias: None, position, _marker: Default::default() }) } else { - todo!() - // crate::throw_unexpected_token(reader, &[TSXToken::Keyword(TSXKeyword::As)]) + Err(ParseError::new( + ParseErrors::ExpectedKeyword { + expected: "as", + // TODO + found: reader.get_current(), + }, + reader.next_item_span(), + )) } } else { let name = crate::VariableIdentifier::from_reader(reader)?; @@ -438,7 +398,7 @@ impl self_rust_tokenize::SelfRustTokenize for ImportExportPar &self, _token_stream: &mut self_rust_tokenize::proc_macro2::TokenStream, ) { - todo!() + todo!("import export part to token stream") } } @@ -464,7 +424,7 @@ impl ImportExportName { reader.skip(); let start = reader.get_start(); if reader.starts_with_string_delimeter() { - let (content, quoted) = reader.parse_string_literal().expect("TODO"); + let (content, quoted) = reader.parse_string_literal()?; let position = start.with_length(content.len() + 2); Ok((ImportExportName::Quoted(content.to_owned(), quoted), position)) } else { @@ -523,25 +483,21 @@ impl ImportLocation { // )); // } // } + // else if options.interpolation_points + // && matches!(&token.0, TSXToken::Identifier(i) if i == crate::marker::MARKER) + // { + // Ok((Self::Marker(state.new_partial_point_marker(token.1)), source_map::End(token.1 .0))) + // todo!() + // Err(ParseError::new( + // ParseErrors::ExpectedStringLiteral { found: token.0 }, + // token.1.with_length(0), + // )) reader.skip(); - if reader.starts_with_string_delimeter() { - let start = reader.get_start(); - let (content, quoted) = reader.parse_string_literal().expect("TODO"); - Ok((ImportLocation::Quoted(content.to_owned(), quoted))) - } else { - todo!("{:?}", reader.get_current().get(..20)) - // else if options.interpolation_points - // && matches!(&token.0, TSXToken::Identifier(i) if i == crate::marker::MARKER) - // { - // Ok((Self::Marker(state.new_partial_point_marker(token.1)), source_map::End(token.1 .0))) - // todo!() - // Err(ParseError::new( - // ParseErrors::ExpectedStringLiteral { found: token.0 }, - // token.1.with_length(0), - // )) - } + let start = reader.get_start(); + let (content, quoted) = reader.parse_string_literal()?; + Ok((ImportLocation::Quoted(content.to_owned(), quoted))) } pub(crate) fn to_string_from_buffer(&self, buf: &mut T) { diff --git a/parser/src/errors.rs b/parser/src/errors.rs index 37914867..104b7b52 100644 --- a/parser/src/errors.rs +++ b/parser/src/errors.rs @@ -13,7 +13,8 @@ pub enum ParseErrors<'a> { expected: &'static str, found: &'a str, }, - ExpectedOneOfKeywords { + // Keywords and/or operators + ExpectedOneOfItems { expected: &'static [&'static str], found: &'a str, }, @@ -21,10 +22,6 @@ pub enum ParseErrors<'a> { expected: &'static str, found: &'a str, }, - ExpectedOneOfOperators { - expected: &'static [&'static str], - found: &'a str, - }, ClosingTagDoesNotMatch { expected: &'a str, found: &'a str, @@ -63,6 +60,12 @@ pub enum ParseErrors<'a> { ExpectedDeclaration, CannotHaveRegularMemberAfterSpread, InvalidLHSOfIs, + NoNewLinesInString, + InvalidNumber, + InvalidRegularExpression, + /// For strings, regular expressions, multiline comments. + /// TODO specify by field + UnexpectedEnd, /// TODO this could be set to collect, rather than breaking (https://github.com/kaleidawave/ezno/issues/203) TypeAnnotationUsed, } @@ -82,8 +85,7 @@ impl<'a> Display for ParseErrors<'a> { ParseErrors::ExpectedOperator { expected, found } => { write!(f, "Expected {expected} found {found}") } - ParseErrors::ExpectedOneOfKeywords { expected, found } - | ParseErrors::ExpectedOneOfOperators { expected, found } => { + ParseErrors::ExpectedOneOfItems { expected, found } => { f.write_str("Expected ")?; utilities::format_list(f, expected)?; write!(f, " found {found}") @@ -91,6 +93,18 @@ impl<'a> Display for ParseErrors<'a> { ParseErrors::ExpectedKeyword { expected, found } => { write!(f, "Expected {expected:?}, found {found:?}") } + ParseErrors::NoNewLinesInString => { + write!(f, "Cannot use new lines in string") + } + ParseErrors::InvalidNumber => { + write!(f, "Invalid number") + } + ParseErrors::InvalidRegularExpression => { + write!(f, "Invalid regular expression") + } + ParseErrors::UnexpectedEnd => { + write!(f, "Unexpected end") + } // ParseErrors::UnexpectedSymbol(invalid_character) => Display::fmt(invalid_character, f), ParseErrors::ClosingTagDoesNotMatch { expected, found } => { write!(f, "Closing tag does not match, expected found ") diff --git a/parser/src/expressions/mod.rs b/parser/src/expressions/mod.rs index 0a87065f..ef43f2fb 100644 --- a/parser/src/expressions/mod.rs +++ b/parser/src/expressions/mod.rs @@ -246,11 +246,11 @@ impl Expression { let start = reader.get_start(); let first_expression = { if reader.starts_with_string_delimeter() { - let (content, quoted) = reader.parse_string_literal().expect("TODO"); + let (content, quoted) = reader.parse_string_literal()?; let position = start.with_length(content.len() + 2); Expression::StringLiteral(content.to_owned(), quoted, position) } else if reader.starts_with_number() { - let (value, length) = reader.parse_number_literal().expect("TODO"); + let (value, length) = reader.parse_number_literal()?; let position = start.with_length(length as usize); Expression::NumberLiteral(value, position) } @@ -264,12 +264,7 @@ impl Expression { else if reader.starts_with_str("//") || reader.starts_with_str("/*") { let is_multiline = reader.starts_with_str("/*"); reader.advance(2); - let content = if is_multiline { - reader.parse_until("*/").expect("TODO").to_owned() - } else { - reader.parse_until("\n").expect("TODO").to_owned() - }; - + let content = reader.parse_comment_literal(is_multiline)?.to_owned(); let expression = Self::from_reader_with_precedence(reader, return_precedence)?; let position = start.union(expression.get_position()); Expression::Comment { @@ -280,7 +275,7 @@ impl Expression { prefix: true, } } else if reader.starts_with('/') { - let (pattern, flags, length) = reader.parse_regex_literal().expect("TODO"); + let (pattern, flags, length) = reader.parse_regex_literal()?; let position = start.with_length(length as usize); Expression::RegexLiteral { pattern: pattern.to_owned(), @@ -322,134 +317,17 @@ impl Expression { TemplateLiteral::from_reader(reader).map(Expression::TemplateLiteral)? } else if crate::lexer::utilities::is_function_header(reader.get_current()) { // TODO more cases here - if reader.get_current().starts_with("async function") { - // #[cfg(feature = "extras")] - // { - // use crate::functions::FunctionLocationModifier; - // let (generator_keyword, token) = - // if let Token(TSXToken::Keyword(TSXKeyword::Generator), _) = token { - // (Some(token.get_span()), reader.next().unwrap()) - // } else { - // (None, token) - // }; - - // let (location, token) = match token { - // Token(TSXToken::Keyword(TSXKeyword::Server), _) => { - // (Some(FunctionLocationModifier::Server), reader.next().unwrap()) - // } - // Token(TSXToken::Keyword(TSXKeyword::Worker), _) => { - // (Some(FunctionLocationModifier::Worker), reader.next().unwrap()) - // } - // token => (None, token), - // }; - - // // Here because `token` (can't use `.expect()`) - // let Token(TSXToken::Keyword(TSXKeyword::Function), function_start) = token - // else { - // return throw_unexpected_token_with_token( - // token, - // &[TSXToken::Keyword(TSXKeyword::Function)], - // ); - // }; - - // let function_end = - // function_start.get_end_after(TSXKeyword::Function.length() as usize); - - // if generator_keyword.is_some() { - // let position = start.union(function_end); - - // let header = FunctionHeader::ChadFunctionHeader { - // is_async, - // is_generator: true, - // location, - // position, - // }; - - // let name = if let Some(Token(TSXToken::OpenParentheses, _)) = - // reader.peek() - // { - // None - // } else { - // let (token, span) = - // token_as_identifier(reader.next().unwrap(), "function name")?; - // Some(crate::VariableIdentifier::Standard(token, span)) - // }; - - // FunctionBase::from_reader_with_header_and_name( - // reader, - // state, - // options, - // (Some(header.get_position().get_start()), header), - // ExpressionPosition(name), - // ) - // .map(Expression::ExpressionFunction)? - // } else { - // let is_generator = reader - // .conditional_next(|tok| matches!(tok, TSXToken::Multiply)) - // .map(|token| token.get_span()); - - // let end = is_generator - // .as_ref() - // .map_or(function_end, Span::get_end); - - // let header = FunctionHeader::VirginFunctionHeader { - // position: start.union(end), - // is_async, - // location, - // is_generator, - // }; - - // let name = if let Some(Token(TSXToken::OpenParentheses, _)) = - // reader.peek() - // { - // None - // } else { - // let (token, span) = - // token_as_identifier(reader.next().unwrap(), "function name")?; - // Some(crate::VariableIdentifier::Standard(token, span)) - // }; - - // FunctionBase::from_reader_with_header_and_name( - // reader, - // state, - // options, - // (Some(header.get_position().get_start()), header), - // ExpressionPosition(name), - // ) - // .map(Expression::ExpressionFunction)? - // } - // } - - // #[cfg(not(feature = "extras"))] - { - reader.advance("async function".len() as u32); - let is_generator = reader.is_operator_advance("*"); - - let header = FunctionHeader::VirginFunctionHeader { - position: start.union(reader.get_end()), - is_async: true, - is_generator, - // TODO - location: None, - }; - - let name = if reader.is_one_of_operators(&["(", "<"]).is_some() { - None - } else { - reader.skip(); - let start = reader.get_start(); - let content = - reader.parse_identifier("function name").expect("TODO").to_owned(); - let position = start.with_length(content.len()); - Some(crate::VariableIdentifier::Standard(content, position)) - }; - FunctionBase::from_reader_with_header_and_name( - reader, - header, - ExpressionPosition(name), - ) - .map(Expression::ExpressionFunction)? - } + let mut current = reader.get_current(); + if current.starts_with("async") { + current = ¤t["async".len()..].trim_start(); + } + if current.starts_with("server") { + current = ¤t["server".len()..].trim_start(); + } else if current.starts_with("worker") { + current = ¤t["worker".len()..].trim_start(); + } + if current.starts_with("function") { + Expression::ExpressionFunction(ExpressionFunction::from_reader(reader)?) } else { Expression::ArrowFunction(ArrowFunction::from_reader(reader)?) } @@ -482,15 +360,6 @@ impl Expression { let operand = VariableOrPropertyAccess::from_reader(reader)?; let position = start.union(operand.get_position()); Expression::UnaryPrefixAssignmentOperation { operand, operator, position } - } else if reader.is_keyword("async") || reader.is_keyword("function") { - // TODO async () handling - if reader.is_keyword("function") { - FunctionBase::from_reader(reader).map(Expression::ExpressionFunction)? - } else { - // TODO generator keyword as well - // TODO arrow functions - todo!() - } } else if reader.is_keyword("class") { ClassDeclaration::from_reader(reader).map(Expression::ClassExpression)? } else if let Some(op) = reader.is_one_of_operators(&["+", "-", "~", "!"]) { @@ -582,7 +451,10 @@ impl Expression { reader.expect(']')?; SuperReference::Index { indexer: Box::new(indexer) } } else { - todo!() + return Err(crate::lexer::utilities::expected_one_of_keywords( + reader, + &[".", "(", "["], + )); }; Expression::SuperExpression(inner, start.union(reader.get_end())) } else if reader.is_keyword_advance("import") { @@ -713,11 +585,7 @@ impl Expression { if reader.starts_with_str("//") || reader.starts_with_str("/*") { let is_multiline = reader.starts_with_str("/*"); reader.advance(2); - let content = if is_multiline { - reader.parse_until("*/").expect("TODO").to_owned() - } else { - reader.parse_until("\n").expect("TODO").to_owned() - }; + let content = reader.parse_comment_literal(is_multiline)?.to_owned(); let position = top.get_position().union(reader.get_end()); top = Expression::Comment { is_multiline, @@ -955,7 +823,7 @@ impl Expression { // let marker = state.new_partial_point_marker(accessor_position); // let position = accessor_position.union(source_map::End(at.0)); // (PropertyReference::Marker(marker), position) - todo!() + todo!("TODO partial syntax else error") } else { let is_private = reader.is_operator_advance("#"); let property = reader.parse_identifier("property name")?.to_owned(); @@ -2034,13 +1902,7 @@ impl ASTNode for FunctionArgument { } else if reader.starts_with_str("//") || reader.starts_with_str("/*") { let is_multiline = reader.starts_with_str("/*"); reader.advance(2); - let content = if is_multiline { - reader.parse_until("*/").expect("TODO") - } else { - reader.parse_until("\n").expect("TODO") - } - .to_owned(); - + let content = reader.parse_comment_literal(is_multiline)?.to_owned(); if reader.is_one_of_operators(&[")", "}", ","]).is_some() { let position = start.union(reader.get_end()); return Ok(Self::Comment { content, is_multiline, position }); @@ -2118,7 +1980,12 @@ impl ASTNode for ArrayElement { } fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { - FunctionArgument::from_reader(reader).map(Some).map(Self) + // This is allowed for some reason + if reader.is_one_of_operators(&[",", "]"]).is_some() { + Ok(Self(None)) + } else { + FunctionArgument::from_reader(reader).map(Some).map(Self) + } } fn to_string_from_buffer( @@ -2149,8 +2016,6 @@ impl ArrayElement { } impl ListItem for ArrayElement { - const EMPTY: Option = Some(Self(None)); - type LAST = (); } diff --git a/parser/src/expressions/object_literal.rs b/parser/src/expressions/object_literal.rs index 7a668837..6a30845f 100644 --- a/parser/src/expressions/object_literal.rs +++ b/parser/src/expressions/object_literal.rs @@ -191,20 +191,16 @@ impl ASTNode for ObjectLiteralMember { fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { let start = reader.get_start(); - if reader.is_operator_advance("//") { - let content = reader.parse_until("\n").expect("TODO"); - return Ok(Self::Comment( - content.to_owned(), - false, - start.with_length(2 + content.len()), - )); - } else if reader.is_operator_advance("/*") { - let content = reader.parse_until("*/").expect("TODO"); - return Ok(Self::Comment( - content.to_owned(), - true, - start.with_length(4 + content.len()), - )); + if reader.starts_with_str("//") || reader.starts_with_str("/*") { + let is_multiline = reader.starts_with_str("/*"); + reader.advance(2); + let content = reader.parse_comment_literal(is_multiline)?.to_owned(); + let position = if is_multiline { + start.with_length(2 + content.len()) + } else { + start.with_length(4 + content.len()) + }; + return Ok(Self::Comment(content.to_owned(), false, position)); } if reader.is_operator_advance("...") { diff --git a/parser/src/expressions/template_literal.rs b/parser/src/expressions/template_literal.rs index fe5bddb1..2bbd723e 100644 --- a/parser/src/expressions/template_literal.rs +++ b/parser/src/expressions/template_literal.rs @@ -23,7 +23,11 @@ impl ASTNode for TemplateLiteral { let mut parts = Vec::new(); loop { - let (content, found) = reader.parse_until_one_of(&["${", "`"]).unwrap(); + let (content, found) = reader.parse_until_one_of(&["${", "`"]).map_err(|()| { + // TODO might be a problem + let position = reader.get_start().with_length(reader.get_current().len()); + crate::ParseError::new(crate::ParseErrors::UnexpectedEnd, position) + })?; if let "${" = found { let expression = MultipleExpression::from_reader(reader)?; reader.expect('}')?; diff --git a/parser/src/extensions/jsx.rs b/parser/src/extensions/jsx.rs index 7d92b95b..35912d90 100644 --- a/parser/src/extensions/jsx.rs +++ b/parser/src/extensions/jsx.rs @@ -111,7 +111,14 @@ impl ASTNode for JSXElement { }); } else if html_tag_contains_literal_content(&tag_name) { // TODO could embedded parser? - let content = reader.parse_until("')?; return Ok(JSXElement { @@ -380,14 +387,25 @@ impl ASTNode for JSXNode { Ok(JSXNode::InterpolatedExpression(Box::new(expression), start.union(end))) } else if reader.starts_with_str("").expect("TODO").to_owned(); + let content = reader + .parse_until("-->") + .map_err(|()| { + // TODO might be a problem + let position = reader.get_start().with_length(reader.get_current().len()); + ParseError::new(crate::ParseErrors::UnexpectedEnd, position) + })? + .to_owned(); let position = start.with_length(content.len()); Ok(JSXNode::Comment(content, position)) } else if reader.starts_with_str("<") { let element = JSXElement::from_reader(reader)?; Ok(JSXNode::Element(element)) } else { - let (content, _) = reader.parse_until_one_of_no_advance(&["<", "{"]).expect("TODO"); + let (content, _) = reader.parse_until_one_of_no_advance(&["<", "{"]).map_err(|()| { + // TODO might be a problem + let position = reader.get_start().with_length(reader.get_current().len()); + ParseError::new(crate::ParseErrors::UnexpectedEnd, position) + })?; let position = start.with_length(content.len()); Ok(JSXNode::TextNode(content.trim_start().into(), position)) } diff --git a/parser/src/functions/mod.rs b/parser/src/functions/mod.rs index 3bc1ebc1..d54e81cf 100644 --- a/parser/src/functions/mod.rs +++ b/parser/src/functions/mod.rs @@ -380,10 +380,11 @@ pub enum FunctionHeader { is_generator: bool, position: Span, }, + /// Always is_generator #[cfg(feature = "extras")] ChadFunctionHeader { is_async: bool, - is_generator: bool, + // is_generator: bool, location: Option, position: Span, }, @@ -399,22 +400,39 @@ impl ASTNode for FunctionHeader { } fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { + fn parse_location(reader: &mut crate::new::Lexer) -> Option { + if reader.is_keyword_advance("server") { + Some(FunctionLocationModifier::Server) + } else if reader.is_keyword_advance("worker") { + Some(FunctionLocationModifier::Worker) + } else { + None + } + } + let start = reader.get_start(); let is_async = reader.is_operator_advance("async"); - reader.expect_keyword("function")?; - - let is_generator = reader.is_operator_advance("*"); - - // TODO - Ok(Self::VirginFunctionHeader { - is_async, - // TODO first thing - // Option, - location: None, - is_generator, - position: start.union(reader.get_end()), - }) + if reader.is_keyword_advance("generator") { + let location = parse_location(reader); + let _ = reader.expect_keyword("function")?; + Ok(Self::ChadFunctionHeader { + is_async, + location, + // is_generator: true, + position: start.union(reader.get_end()), + }) + } else { + let location = parse_location(reader); + let _ = reader.expect_keyword("function")?; + let is_generator = reader.is_operator_advance("*"); + Ok(Self::VirginFunctionHeader { + is_async, + location, + is_generator, + position: start.union(reader.get_end()), + }) + } } fn to_string_from_buffer( @@ -441,7 +459,7 @@ impl FunctionHeader { match self { FunctionHeader::VirginFunctionHeader { is_generator, .. } => *is_generator, #[cfg(feature = "extras")] - FunctionHeader::ChadFunctionHeader { is_generator, .. } => *is_generator, + FunctionHeader::ChadFunctionHeader { .. } => true, } } @@ -567,23 +585,28 @@ pub(crate) fn get_method_name( Ok((function_header, key)) } -// #[cfg(feature = "full-typescript")] /// None if overloaded (declaration only) +#[cfg(feature = "full-typescript")] #[apply(derive_ASTNode)] #[derive(Debug, Clone, PartialEq, visitable_derive::Visitable)] pub struct FunctionBody(pub Option); -// #[cfg(not(feature = "full-typescript"))] -// pub type FunctionBody = Block; +#[cfg(not(feature = "full-typescript"))] +pub type FunctionBody = Block; +#[cfg(feature = "full-typescript")] impl ASTNode for FunctionBody { fn get_position(&self) -> Span { self.0.as_ref().map_or(source_map::Nullable::NULL, |Block(_, pos)| *pos) } fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { - reader.skip(); - let body = if reader.starts_with('{') { Some(Block::from_reader(reader)?) } else { None }; + // If type annotations. Allow elided bodies for function overloading + let body = if reader.is_operator("{") || !reader.get_options().type_annotations { + Some(Block::from_reader(reader)?) + } else { + None + }; Ok(Self(body)) } diff --git a/parser/src/lexer.rs b/parser/src/lexer.rs index 91c3078c..ddec0e11 100644 --- a/parser/src/lexer.rs +++ b/parser/src/lexer.rs @@ -1,6 +1,3 @@ -#![allow(clippy::as_conversions, clippy::cast_possible_truncation)] -#![allow(unused)] - use crate::{ errors::{ParseError, ParseErrors}, marker::Marker, @@ -436,14 +433,26 @@ impl<'a> Lexer<'a> { Err(()) } - // TODO proper error type - pub fn parse_string_literal(&mut self) -> Result<(&'a str, crate::Quoted), ()> { + pub fn starts_with_string_delimeter(&self) -> bool { + self.starts_with('"') || self.starts_with('\'') + } + + pub fn parse_string_literal(&mut self) -> Result<(&'a str, crate::Quoted), ParseError> { let current = self.get_current(); let mut chars = current.char_indices(); let quoted = match chars.next() { Some((_, '"')) => crate::Quoted::Double, Some((_, '\'')) => crate::Quoted::Single, - _ => return Err(()), + _ => { + return Err(ParseError::new( + ParseErrors::ExpectedOneOfItems { + expected: &["\"", "'"], + // TODO bad + found: self.get_current(), + }, + self.get_start().with_length(1), + )); + } }; let mut escaped = false; for (idx, chr) in chars { @@ -463,10 +472,16 @@ impl<'a> Lexer<'a> { } if let '\n' = chr { - return Err(()); + return Err(ParseError::new( + ParseErrors::NoNewLinesInString, + self.get_start().with_length(idx), + )); } } - Err(()) + Err(ParseError::new( + ParseErrors::UnexpectedEnd, + self.get_start().with_length(self.get_current().len()), + )) } pub fn starts_with_number(&self) -> bool { @@ -476,14 +491,10 @@ impl<'a> Lexer<'a> { .is_some_and(|b| (b'0'..=b'9').contains(&b) || *b == b'.') } - pub fn starts_with_string_delimeter(&self) -> bool { - self.starts_with('"') || self.starts_with('\'') - } - // TODO errors + some parts are weird pub fn parse_number_literal( &mut self, - ) -> Result<(crate::number::NumberRepresentation, u32), ()> { + ) -> Result<(crate::number::NumberRepresentation, u32), ParseError> { use std::str::FromStr; enum NumberLiteralType { @@ -510,7 +521,12 @@ impl<'a> Lexer<'a> { } Some('0'..='9') => NumberLiteralType::Decimal { fractional: false }, Some('.') => NumberLiteralType::Decimal { fractional: true }, - Some(_) | None => return Err(()), + Some(_) | None => { + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(1), + )) + } }; for (idx, chr) in chars { @@ -536,20 +552,29 @@ impl<'a> Lexer<'a> { } } else { // LexingErrors::NumberLiteralBaseSpecifierMustPrecededWithZero - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } '0'..='9' | 'a'..='f' | 'A'..='F' => match state { NumberLiteralType::BinaryLiteral => { if !matches!(chr, '0' | '1') { // (LexingErrors::InvalidNumeralItemBecauseOfLiteralKind) - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } NumberLiteralType::OctalLiteral => { if !matches!(chr, '0'..='7') { // (LexingErrors::InvalidNumeralItemBecauseOfLiteralKind) - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } // Handling for 'e' & 'E' @@ -560,13 +585,19 @@ impl<'a> Lexer<'a> { state = NumberLiteralType::Exponent; } else if !chr.is_ascii_digit() { // (LexingErrors::InvalidNumeralItemBecauseOfLiteralKind) - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } NumberLiteralType::Exponent => { if !chr.is_ascii_digit() { // (LexingErrors::InvalidNumeralItemBecauseOfLiteralKind) - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } // all above allowed @@ -585,13 +616,19 @@ impl<'a> Lexer<'a> { return Ok((number, length)); } else if current[..idx].ends_with(['_']) { // (LexingErrors::InvalidUnderscore) - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } else { *fractional = true; } } else { // (LexingErrors::NumberLiteralCannotHaveDecimalPoint); - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } '_' => { @@ -613,7 +650,10 @@ impl<'a> Lexer<'a> { }; if invalid { // (LexingErrors::InvalidUnderscore); - return Err(()); + return Err(ParseError::new( + ParseErrors::InvalidNumber, + self.get_start().with_length(idx), + )); } } 'n' if matches!(state, NumberLiteralType::Decimal { fractional: false }) => { @@ -651,10 +691,10 @@ impl<'a> Lexer<'a> { let number = crate::number::NumberRepresentation::from_str(current).expect("bad number"); let length = current.len() as u32; self.head += length; - return Ok((number, length)); + Ok((number, length)) } - pub fn parse_regex_literal(&mut self) -> Result<(&'a str, Option<&'a str>, usize), ()> { + pub fn parse_regex_literal(&mut self) -> Result<(&'a str, Option<&'a str>, usize), ParseError> { let mut escaped = false; let mut after_last_slash = false; let mut in_set = false; @@ -681,7 +721,10 @@ impl<'a> Lexer<'a> { in_set = false; } '\n' => { - todo!("new line in regex") + return Err(ParseError::new( + ParseErrors::InvalidRegularExpression, + self.get_start().with_length(idx), + )); } _ => { escaped = false; @@ -709,6 +752,18 @@ impl<'a> Lexer<'a> { Ok((regex, (!regex_flag.is_empty()).then_some(regex_flag), flag_content)) } + pub fn parse_comment_literal(&mut self, is_multiline: bool) -> Result<&str, ParseError> { + if is_multiline { + self.parse_until("*/").map_err(|()| { + // TODO might be a problem + let position = self.get_start().with_length(self.get_current().len()); + ParseError::new(ParseErrors::UnexpectedEnd, position) + }) + } else { + Ok(self.parse_until("\n").expect("Always should have found end of line or file")) + } + } + // TODO also can exit if there is `=` or `:` and = 0 in some examples pub fn after_brackets(&self) -> &'a str { let current = self.get_current(); @@ -888,6 +943,11 @@ impl<'a> Lexer<'a> { (false, None) } } + + // TODO test + pub fn next_item_span(&self) -> Span { + self.get_start().with_length(utilities::next_empty_occurance(self.get_current())) + } } pub(crate) mod utilities { @@ -910,12 +970,20 @@ pub(crate) mod utilities { 0 } - /// TODO lots more, also pub fn is_function_header(str: &str) -> bool { - str.starts_with("async ") || { - str.starts_with("function") - && !is_valid_identifier(str["function".len()..].chars().next().expect("TODO")) - } + let str = str.trim_start(); + // TODO + let extras = true; + str.starts_with("async ") + || { + str.starts_with("function") + && !str["function".len()..].chars().next().is_some_and(is_valid_identifier) + } || (extras && { + // TODO + after is "function" + str.starts_with("generator ") + || str.starts_with("worker ") + || str.starts_with("server ") + }) } /// TODO this could be set to collect, rather than breaking (https://github.com/kaleidawave/ezno/issues/203) @@ -937,7 +1005,7 @@ pub(crate) mod utilities { let current = reader.get_current(); let found = ¤t[..self::next_empty_occurance(current)]; let position = reader.get_start().with_length(found.len()); - let reason = crate::ParseErrors::ExpectedOneOfKeywords { expected, found }; + let reason = crate::ParseErrors::ExpectedOneOfItems { expected, found }; crate::ParseError::new(reason, position) } } diff --git a/parser/src/lib.rs b/parser/src/lib.rs index e9a04b04..659b6801 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -393,7 +393,6 @@ impl ExpressionOrStatementPosition for ExpressionPosition { pub trait ListItem: Sized { type LAST; const LAST_PREFIX: Option<&'static str> = None; - const EMPTY: Option = None; #[allow(unused)] fn parse_last_item(reader: &mut crate::new::Lexer) -> ParseResult { @@ -413,19 +412,6 @@ pub(crate) fn bracketed_items_from_reader( // } let mut nodes: Vec = Vec::new(); loop { - if let Some(empty) = T::EMPTY { - // if matches!(next, TSXToken::Comma) || *next == end { - // if matches!(next, TSXToken::Comma) || (*next == end && !nodes.is_empty()) { - // nodes.push(empty); - // } - // let Token(token, s) = reader.next().unwrap(); - // if token == end { - // return Ok((nodes, None, s.get_end_after(token.length() as usize))); - // } - // continue; - // } - } - if reader.is_operator_advance(end) { return Ok((nodes, None)); } diff --git a/parser/src/modules.rs b/parser/src/modules.rs index 560fa724..950b72a0 100644 --- a/parser/src/modules.rs +++ b/parser/src/modules.rs @@ -42,7 +42,7 @@ impl ASTNode for Module { fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { let span = Span { start: 0, source: (), end: reader.get_current().len() as u32 }; let hashbang_comment = if reader.is_operator_advance("#!") { - let hashbang_comment = reader.parse_until("\n").expect("TODO"); + let hashbang_comment = reader.parse_comment_literal(false)?; Some(hashbang_comment.to_owned()) } else { None diff --git a/parser/src/property_key.rs b/parser/src/property_key.rs index 68ea1586..e1fed79d 100644 --- a/parser/src/property_key.rs +++ b/parser/src/property_key.rs @@ -115,11 +115,11 @@ impl ASTNode for PropertyKey { fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { let start = reader.get_start(); if reader.starts_with('"') || reader.starts_with('\'') { - let (content, quoted) = reader.parse_string_literal().expect("TODO"); + let (content, quoted) = reader.parse_string_literal()?; let position = start.with_length(content.len() + 2); Ok(Self::StringLiteral(content.to_owned(), quoted, position)) } else if reader.starts_with_number() { - let (value, length) = reader.parse_number_literal().expect("TODO"); + let (value, length) = reader.parse_number_literal()?; let position = start.with_length(length as usize); Ok(Self::NumberLiteral(value, position)) } else if reader.is_operator_advance("[") { diff --git a/parser/src/statements/if_statement.rs b/parser/src/statements/if_statement.rs index c66794a5..313d5d0a 100644 --- a/parser/src/statements/if_statement.rs +++ b/parser/src/statements/if_statement.rs @@ -56,11 +56,7 @@ impl ASTNode for IfStatement { if reader.is_one_of(&["//", "/*"]).is_some() { let is_multiline = reader.starts_with_str("/*"); reader.advance(2); - let _content = if is_multiline { - reader.parse_until("*/").expect("TODO").to_owned() - } else { - reader.parse_until("\n").expect("TODO").to_owned() - }; + let _content = reader.parse_comment_literal(is_multiline)?; continue; } diff --git a/parser/src/statements/mod.rs b/parser/src/statements/mod.rs index 06d43be2..a2250324 100644 --- a/parser/src/statements/mod.rs +++ b/parser/src/statements/mod.rs @@ -146,10 +146,10 @@ impl ASTNode for Statement { } else if reader.is_operator_advance(";") { Ok(Statement::AestheticSemiColon(start.with_length(1))) } else if reader.is_operator_advance("//") { - let content = reader.parse_until("\n").expect("TODO"); + let content = reader.parse_comment_literal(false)?; Ok(Statement::Comment(content.to_owned(), start.with_length(2 + content.len()))) } else if reader.is_operator_advance("/*") { - let content = reader.parse_until("*/").expect("TODO"); + let content = reader.parse_comment_literal(true)?; Ok(Statement::MultiLineComment( content.to_owned(), start.with_length(4 + content.len()), @@ -285,7 +285,7 @@ impl ASTNode for VarVariableStatement { fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { let start = reader.get_start(); - reader.expect_keyword("var").expect("TODO"); + let _ = reader.expect_keyword("var")?; let mut declarations = Vec::new(); loop { let value = VariableDeclarationItem::>::from_reader(reader)?; diff --git a/parser/src/statements/switch_statement.rs b/parser/src/statements/switch_statement.rs index 4d33f48e..8fe21f84 100644 --- a/parser/src/statements/switch_statement.rs +++ b/parser/src/statements/switch_statement.rs @@ -50,11 +50,8 @@ impl ASTNode for SwitchStatement { } else if reader.is_one_of(&["//", "/*"]).is_some() { let is_multiline = reader.starts_with_str("/*"); reader.advance(2); - let _content = if is_multiline { - reader.parse_until("*/").expect("TODO").to_owned() - } else { - reader.parse_until("\n").expect("TODO").to_owned() - }; + let _content = reader.parse_comment_literal(is_multiline)?; + // Skip for now continue; } else { return Err(crate::lexer::utilities::expected_one_of_keywords( diff --git a/parser/src/types/enum_declaration.rs b/parser/src/types/enum_declaration.rs index 9e33299b..8d0b59cf 100644 --- a/parser/src/types/enum_declaration.rs +++ b/parser/src/types/enum_declaration.rs @@ -29,11 +29,10 @@ impl ASTNode for EnumDeclaration { break; } // TODO temp - if reader.is_operator_advance("//") { - let _content = reader.parse_until("\n").expect("TODO"); - continue; - } else if reader.is_operator_advance("/*") { - let _content = reader.parse_until("*/").expect("TODO"); + if reader.is_one_of(&["//", "/*"]).is_some() { + let is_multiline = reader.starts_with_str("/*"); + reader.advance(2); + let _content = reader.parse_comment_literal(is_multiline)?; continue; } members.push(EnumMember::from_reader(reader)?); diff --git a/parser/src/types/interface.rs b/parser/src/types/interface.rs index c5a722b0..be6bc54d 100644 --- a/parser/src/types/interface.rs +++ b/parser/src/types/interface.rs @@ -299,11 +299,7 @@ impl ASTNode for InterfaceMember { let start = reader.get_start(); let is_multiline = comment_prefix == "/*"; reader.advance(2); - let content = if is_multiline { - reader.parse_until("*/").expect("TODO").to_owned() - } else { - reader.parse_until("\n").expect("TODO").to_owned() - }; + let content = reader.parse_comment_literal(is_multiline)?.to_owned(); let position = start.union(reader.get_end()); Ok(InterfaceMember::Comment(content, is_multiline, position)) } else { @@ -312,11 +308,11 @@ impl ASTNode for InterfaceMember { let name = if reader.is_operator_advance("[") { if reader.starts_with_string_delimeter() { - let (content, quoted) = reader.parse_string_literal().expect("TODO"); + let (content, quoted) = reader.parse_string_literal()?; let position = start.with_length(content.len() + 2); PropertyKey::StringLiteral(content.to_owned(), quoted, position) } else if reader.starts_with_number() { - let (value, length) = reader.parse_number_literal().expect("TODO"); + let (value, length) = reader.parse_number_literal()?; let position = start.with_length(length as usize); PropertyKey::NumberLiteral(value, position) } else { diff --git a/parser/src/types/type_annotations.rs b/parser/src/types/type_annotations.rs index 202daeb2..176ca6f0 100644 --- a/parser/src/types/type_annotations.rs +++ b/parser/src/types/type_annotations.rs @@ -584,16 +584,16 @@ impl TypeAnnotation { // name, // } } else if reader.starts_with_number() { - let (value, length) = reader.parse_number_literal().expect("TODO"); + let (value, length) = reader.parse_number_literal()?; let position = start.with_length(length as usize); Self::NumberLiteral(value, position) } else if reader.is_operator_advance("-") { - let (value, length) = reader.parse_number_literal().expect("TODO"); + let (value, length) = reader.parse_number_literal()?; let position = start.with_length(length as usize); // important negation here Self::NumberLiteral(value.neg(), position) } else if reader.starts_with('"') || reader.starts_with('\'') { - let (content, quoted) = reader.parse_string_literal().expect("TODO"); + let (content, quoted) = reader.parse_string_literal()?; let position = start.with_length(content.len() + 2); Self::StringLiteral(content.to_owned(), quoted, position) } else if reader.starts_with('@') { @@ -648,7 +648,11 @@ impl TypeAnnotation { let mut parts = Vec::new(); let result; loop { - let (content, found) = reader.parse_until_one_of(&["${", "`"]).unwrap(); + let (content, found) = reader.parse_until_one_of(&["${", "`"]).map_err(|()| { + // TODO might be a problem + let position = reader.get_start().with_length(reader.get_current().len()); + ParseError::new(ParseErrors::UnexpectedEnd, position) + })?; if let "${" = found { let expression = AnnotationWithBinder::from_reader(reader)?; reader.expect('}')?; @@ -965,7 +969,7 @@ impl ASTNode for TypeAnnotationFunctionParameters { let position = reader.get_start().with_length(2); let found = &reader.get_current()[..2]; let reason = - ParseErrors::ExpectedOneOfOperators { expected: &["?:", ":"], found }; + ParseErrors::ExpectedOneOfItems { expected: &["?:", ":"], found }; return Err(ParseError::new(reason, position)); } } else { @@ -1090,8 +1094,6 @@ impl ASTNode for TupleLiteralElement { } impl ListItem for TupleLiteralElement { - const EMPTY: Option = None; - type LAST = (); } diff --git a/parser/src/variable_fields.rs b/parser/src/variable_fields.rs index fcf85fff..bd37178d 100644 --- a/parser/src/variable_fields.rs +++ b/parser/src/variable_fields.rs @@ -232,10 +232,8 @@ impl ASTNode for ArrayDestructuringField { } fn from_reader(reader: &mut crate::new::Lexer) -> ParseResult { - // dbg!(reader.get_some_current()); - // Allowed - if reader.is_operator(",") || reader.is_operator("]") { + if reader.is_one_of_operators(&[",", "]"]).is_some() { Ok(Self::None) } else { let name = T::from_reader(reader)?; @@ -277,13 +275,12 @@ impl ASTNode for ArrayDestructuringField { } impl ListItem for WithComment> { - const EMPTY: Option = Some(WithComment::None(ArrayDestructuringField::None)); - const LAST_PREFIX: Option<&'static str> = Some("..."); type LAST = SpreadDestructuringField; fn parse_last_item(reader: &mut crate::new::Lexer) -> ParseResult { + reader.skip(); let start = reader.get_start(); reader.expect_operator("...")?; let node = T::from_reader(reader)?; @@ -379,7 +376,7 @@ impl ASTNode for ObjectDestructuringField { Ok(Self::Name(standard, annotation, default_value, position)) } else { - todo!("expect colon") + todo!("expect colon error") } } @@ -517,7 +514,7 @@ impl Visitable for WithComment, ) { - todo!() + todo!("visit array destructuring field") } fn visit_mut( @@ -527,7 +524,7 @@ impl Visitable for WithComment, ) { - todo!() + todo!("visit array destructuring field") } } @@ -596,7 +593,7 @@ impl Visitable for WithComment, ) { - todo!() + todo!("visit object destructuring field") } fn visit_mut( @@ -606,7 +603,7 @@ impl Visitable for WithComment, ) { - todo!() + todo!("visit object destructuring field") } } diff --git a/parser/tests/statements_and_declarations.rs b/parser/tests/statements_and_declarations.rs index f1c7c9f3..9204f9cd 100644 --- a/parser/tests/statements_and_declarations.rs +++ b/parser/tests/statements_and_declarations.rs @@ -156,16 +156,11 @@ export { name1 as default }; export default expression; export default function functionName() { } -export default class ClassName { -} -export default function* generatorFunctionName() { -} -export default function () { -} -export default class { -} -export default function* () { -} +export default class ClassName { } +export default function* generatorFunctionName() { } +export default function () { } +export default class { } +export default function* () { } export interface X { property: number } diff --git a/parser/tests/type_annotations.rs b/parser/tests/type_annotations.rs index 3b20d929..5b68e44d 100644 --- a/parser/tests/type_annotations.rs +++ b/parser/tests/type_annotations.rs @@ -86,7 +86,7 @@ function makeDate(timestamp: number): Date function makeDate(m: number, d: number, y: number): Date function makeDate(mOrTimestamp: number, d?: number, y?: number): Date {} class X { - constructor(a: string) + constructor(a: number | string) constructor(a: number) {} makeDate(timestamp: number): Date makeDate(m: number, d: number, y: number): Date