diff --git a/.github/workflows/performance-and-size.yml b/.github/workflows/performance-and-size.yml index 21330cb2..a3c964c7 100644 --- a/.github/workflows/performance-and-size.yml +++ b/.github/workflows/performance-and-size.yml @@ -32,7 +32,14 @@ jobs: env: CARGO_PROFILE_RELEASE_DEBUG: true - - name: Run parser (and stringer) performance + - name: Download files + run: | + curl -O https://gist.githubusercontent.com/kaleidawave/5dcb9ec03deef1161ebf0c9d6e4b88d8/raw/03156048e214af0ceee4005ba8b86f96690dcbb2/demo.ts > demo.ts + + curl https://esm.sh/v128/react-dom@18.2.0/es2022/react-dom.mjs > react.js + + - name: Run parser, minfier, stringer performance + shell: bash run: | curl https://esm.sh/v128/react-dom@18.2.0/es2022/react-dom.mjs > react.js @@ -42,18 +49,19 @@ jobs: echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - name: Run checker performance + shell: bash + if: false run: | - curl -O https://gist.githubusercontent.com/kaleidawave/5dcb9ec03deef1161ebf0c9d6e4b88d8/raw/03156048e214af0ceee4005ba8b86f96690dcbb2/demo.ts > demo.ts - echo "### Output">> $GITHUB_STEP_SUMMARY echo "\`\`\`shell">> $GITHUB_STEP_SUMMARY - ./target/release/ezno check demo.ts &>> $GITHUB_STEP_SUMMARY + ./target/release/ezno check demo.ts >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY echo "### Hyperfine">> $GITHUB_STEP_SUMMARY echo "\`\`\`shell">> $GITHUB_STEP_SUMMARY hyperfine './target/release/ezno check demo.ts' >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY + - name: Print (linux) binary size run: | echo "Binary is $(stat -c %s ./target/release/ezno) bytes" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/pull-request-bot.yml b/.github/workflows/pull-request-bot.yml index 69f2b312..9a9b7011 100644 --- a/.github/workflows/pull-request-bot.yml +++ b/.github/workflows/pull-request-bot.yml @@ -2,7 +2,6 @@ name: PR Checker on: pull_request_target: - branches: [main] types: [opened] jobs: diff --git a/checker/src/synthesis/expressions.rs b/checker/src/synthesis/expressions.rs index 18a271f2..0499a406 100644 --- a/checker/src/synthesis/expressions.rs +++ b/checker/src/synthesis/expressions.rs @@ -706,7 +706,7 @@ pub(super) fn synthesise_expression( SpecialOperators::InExpression { .. } => todo!(), SpecialOperators::InstanceOfExpression { .. } => todo!(), }, - Expression::DynamicImport { path, position } => todo!(), + Expression::DynamicImport { .. } => todo!(), Expression::IsExpression(is_expr) => { Instance::RValue(synthesise_is_expression(is_expr, environment, checking_data)) } diff --git a/checker/src/synthesis/hoisting.rs b/checker/src/synthesis/hoisting.rs index 40a35da7..85539f7a 100644 --- a/checker/src/synthesis/hoisting.rs +++ b/checker/src/synthesis/hoisting.rs @@ -82,13 +82,16 @@ pub(crate) fn hoist_statements( environment.new_alias(&alias.type_name.name, to, &mut checking_data.types); } parser::Declaration::Import(import) => { - let kind = match &import.kind { - parser::declarations::import::ImportKind::Parts(parts) => { + let kind = match &import.items { + parser::declarations::import::ImportedItems::Parts(parts) => { crate::behavior::modules::ImportKind::Parts( - parts.iter().filter_map(|item| import_part_to_name_pair(item)), + parts + .iter() + .flatten() + .filter_map(|item| import_part_to_name_pair(item)), ) } - parser::declarations::import::ImportKind::All { under } => match under { + parser::declarations::import::ImportedItems::All { under } => match under { VariableIdentifier::Standard(under, position) => { crate::behavior::modules::ImportKind::All { under, @@ -97,9 +100,6 @@ pub(crate) fn hoist_statements( } VariableIdentifier::Cursor(_, _) => todo!(), }, - parser::declarations::import::ImportKind::SideEffect => { - crate::behavior::modules::ImportKind::SideEffect - } }; let default_import = import.default.as_ref().and_then(|default_identifier| { match default_identifier { @@ -109,47 +109,54 @@ pub(crate) fn hoist_statements( VariableIdentifier::Cursor(..) => None, } }); - environment.import_items( - &import.from, - import.position.clone(), - default_import, - kind, - checking_data, - false, - ); + if let Some(path) = import.from.get_path() { + environment.import_items( + path, + import.position.clone(), + default_import, + kind, + checking_data, + false, + ); + } } parser::Declaration::Export(export) => { if let ExportDeclaration::Variable { exported, position } = &export.on { // Imports & types match exported { Exportable::ImportAll { r#as, from } => { - environment.import_items::, _, _>( - from, - position.clone(), - None, - match r#as { + if let Some(path) = from.get_path() { + let kind = match r#as { Some(VariableIdentifier::Standard(name, pos)) => { ImportKind::All { under: name, position: pos.clone() } } Some(VariableIdentifier::Cursor(_, _)) => todo!(), None => ImportKind::Everything, - }, - checking_data, - true, - ); + }; + environment.import_items::, _, _>( + path, + position.clone(), + None, + kind, + checking_data, + true, + ); + } } Exportable::ImportParts { parts, from } => { let parts = parts.iter().filter_map(|item| export_part_to_name_pair(item)); - environment.import_items( - from, - position.clone(), - None, - crate::behavior::modules::ImportKind::Parts(parts), - checking_data, - true, - ); + if let Some(path) = from.get_path() { + environment.import_items( + path, + position.clone(), + None, + crate::behavior::modules::ImportKind::Parts(parts), + checking_data, + true, + ); + } } Exportable::TypeAlias(alias) => { let to = synthesise_type_annotation( @@ -386,6 +393,7 @@ fn import_part_to_name_pair(item: &parser::declarations::ImportPart) -> Option item, + _ => todo!(), }, r#as: &name, position: position.clone(), @@ -417,6 +425,7 @@ pub(super) fn export_part_to_name_pair( r#as: match alias { parser::declarations::ImportExportName::Reference(item) | parser::declarations::ImportExportName::Quoted(item, _) => item, + _ => todo!(), }, position: position.clone(), }) diff --git a/checker/src/synthesis/mod.rs b/checker/src/synthesis/mod.rs index f1a8aff6..2bcf7feb 100644 --- a/checker/src/synthesis/mod.rs +++ b/checker/src/synthesis/mod.rs @@ -34,7 +34,7 @@ pub(super) fn property_key_as_type TypeId { match property_key { - PropertyKey::StringLiteral(value, _) | PropertyKey::Ident(value, _, _) => { + PropertyKey::StringLiteral(value, ..) | PropertyKey::Ident(value, _, _) => { types.new_constant_type(Constant::String(value.clone())) } PropertyKey::NumberLiteral(number, _) => { diff --git a/checker/src/synthesis/type_annotations.rs b/checker/src/synthesis/type_annotations.rs index 29ac5b7d..a9d70c83 100644 --- a/checker/src/synthesis/type_annotations.rs +++ b/checker/src/synthesis/type_annotations.rs @@ -60,7 +60,7 @@ pub(super) fn synthesise_type_annotation( CommonTypes::Number => TypeId::NUMBER_TYPE, CommonTypes::Boolean => TypeId::BOOLEAN_TYPE, }, - TypeAnnotation::StringLiteral(value, _) => { + TypeAnnotation::StringLiteral(value, ..) => { checking_data.types.new_constant_type(Constant::String(value.clone())) } TypeAnnotation::NumberLiteral(value, _) => { diff --git a/parser/src/block.rs b/parser/src/block.rs index dbe13292..95973ee4 100644 --- a/parser/src/block.rs +++ b/parser/src/block.rs @@ -10,7 +10,7 @@ use crate::{ Visitable, }; -#[derive(Debug, Clone, PartialEq, Visitable, get_field_by_type::GetFieldByType)] +#[derive(Debug, Clone, PartialEq, Visitable, get_field_by_type::GetFieldByType, EnumFrom)] #[get_field_by_type_target(Span)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] diff --git a/parser/src/declarations/export.rs b/parser/src/declarations/export.rs index f11a0b51..0482103a 100644 --- a/parser/src/declarations/export.rs +++ b/parser/src/declarations/export.rs @@ -5,8 +5,8 @@ use crate::{ }; use super::{ - variable::VariableDeclaration, ClassDeclaration, ImportExportName, InterfaceDeclaration, - StatementFunction, TypeAlias, + variable::VariableDeclaration, ClassDeclaration, ImportExportName, ImportLocation, + InterfaceDeclaration, StatementFunction, TypeAlias, }; use get_field_by_type::GetFieldByType; @@ -39,8 +39,8 @@ pub enum Exportable { Interface(InterfaceDeclaration), TypeAlias(TypeAlias), Parts(Vec), - ImportAll { r#as: Option, from: String }, - ImportParts { parts: Vec, from: String }, + ImportAll { r#as: Option, from: ImportLocation }, + ImportParts { parts: Vec, from: ImportLocation }, } impl ASTNode for ExportDeclaration { @@ -74,24 +74,8 @@ impl ASTNode for ExportDeclaration { None }; reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; - let token = reader.next().ok_or_else(parse_lexing_error)?; - let (end, from) = match token { - Token( - TSXToken::DoubleQuotedStringLiteral(from) - | TSXToken::SingleQuotedStringLiteral(from), - start, - ) => { - let span = start.with_length(from.len() + 2); - (span, from) - } - token => { - let position = token.get_span(); - return Err(ParseError::new( - crate::ParseErrors::ExpectedStringLiteral { found: token.0 }, - position, - )); - } - }; + let (from, end) = + ImportLocation::from_token(reader.next().ok_or_else(parse_lexing_error)?)?; Ok(ExportDeclaration::Variable { exported: Exportable::ImportAll { r#as, from }, position: start.union(end), @@ -154,23 +138,9 @@ impl ASTNode for ExportDeclaration { )?; // Know this is 'from' from above let _ = reader.next().unwrap(); - let (end, from) = match reader.next().ok_or_else(parse_lexing_error)? { - Token( - TSXToken::DoubleQuotedStringLiteral(from) - | TSXToken::SingleQuotedStringLiteral(from), - start, - ) => { - let span = start.with_length(from.len() + 2); - (span, from) - } - token => { - let position = token.get_span(); - return Err(ParseError::new( - crate::ParseErrors::ExpectedStringLiteral { found: token.0 }, - position, - )); - } - }; + let (from, end) = ImportLocation::from_token( + reader.next().ok_or_else(parse_lexing_error)?, + )?; Ok(Self::Variable { exported: Exportable::ImportParts { parts, from }, position: start.union(end), @@ -195,7 +165,7 @@ impl ASTNode for ExportDeclaration { )); } } - Token(TSXToken::Keyword(kw), _) if kw.is_function_heading() => { + Token(TSXToken::Keyword(kw), _) if kw.is_in_function_header() => { let function_declaration = StatementFunction::from_reader(reader, state, options)?; let position = start.union(function_declaration.get_position()); Ok(Self::Variable { @@ -264,7 +234,7 @@ impl ASTNode for ExportDeclaration { buf.push(' '); } buf.push_str("from \""); - buf.push_str(from); + from.to_string_from_buffer(buf); buf.push('"'); } Exportable::ImportParts { parts, from } => { @@ -281,7 +251,7 @@ impl ASTNode for ExportDeclaration { buf.push('}'); options.add_gap(buf); buf.push_str("from \""); - buf.push_str(from); + from.to_string_from_buffer(buf); buf.push('"'); } } @@ -335,7 +305,7 @@ impl ASTNode for ExportPart { { reader.next(); let token = reader.next().ok_or_else(parse_lexing_error)?; - let (alias, end) = ImportExportName::from_token(token)?; + let (alias, end) = ImportExportName::from_token(token, state)?; let position = pos.union(end); Self::NameWithAlias { name, alias, position } } else { @@ -371,6 +341,7 @@ impl ASTNode for ExportPart { buf.push_str(alias); buf.push(q.as_char()); } + ImportExportName::Cursor(_) => {} } } ExportPart::PrefixComment(comment, inner, _) => { diff --git a/parser/src/declarations/import.rs b/parser/src/declarations/import.rs index aae23b0f..fc499e75 100644 --- a/parser/src/declarations/import.rs +++ b/parser/src/declarations/import.rs @@ -4,19 +4,21 @@ use source_map::{End, Span}; use tokenizer_lib::{sized_tokens::TokenStart, Token, TokenReader}; use crate::{ - errors::parse_lexing_error, parse_bracketed, tokens::token_as_identifier, tsx_keywords, - ASTNode, Keyword, ParseError, ParseErrors, ParseOptions, ParseResult, Quoted, TSXKeyword, - TSXToken, VariableIdentifier, + errors::parse_lexing_error, parse_bracketed, throw_unexpected_token, + tokens::token_as_identifier, tsx_keywords, ASTNode, CursorId, Keyword, ParseOptions, + ParseResult, ParsingState, Quoted, TSXKeyword, TSXToken, VariableIdentifier, }; use visitable_derive::Visitable; +use super::ImportLocation; + +/// Side effects is represented under the Parts variant where the vector is empty #[derive(Debug, Clone, PartialEq, Eq, Visitable)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] -pub enum ImportKind { - Parts(Vec), +pub enum ImportedItems { + Parts(Option>), All { under: VariableIdentifier }, - SideEffect, } /// TODO a few more thing needed here @@ -25,11 +27,15 @@ pub enum ImportKind { #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] pub struct ImportDeclaration { + #[cfg(feature = "extras")] + pub deferred_keyword: Option>, pub type_keyword: Option>, pub default: Option, - pub kind: ImportKind, - pub from: String, + pub items: ImportedItems, + pub from: ImportLocation, pub position: Span, + #[cfg(feature = "extras")] + pub reversed: bool, } /// TODO default @@ -39,24 +45,21 @@ pub struct ImportDeclaration { pub enum ImportExportName { Reference(String), Quoted(String, Quoted), + #[cfg_attr(feature = "self-rust-tokenize", self_tokenize_field(0))] + Cursor(CursorId), } impl ImportExportName { - pub(crate) fn from_token(token: Token) -> ParseResult<(Self, End)> { - if let TSXToken::DoubleQuotedStringLiteral(_) | TSXToken::SingleQuotedStringLiteral(_) = - token.0 - { - let (start, alias, quoted) = match token { - Token(TSXToken::SingleQuotedStringLiteral(content), start) => { - (start, content, Quoted::Single) - } - Token(TSXToken::DoubleQuotedStringLiteral(content), start) => { - (start, content, Quoted::Double) - } - _ => unreachable!(), - }; + pub(crate) fn from_token( + token: Token, + state: &mut ParsingState, + ) -> ParseResult<(Self, End)> { + if let Token(TSXToken::StringLiteral(alias, quoted), start) = token { let with_length = start.get_end_after(alias.len() + 1); + state.constant_imports.push(alias.clone()); Ok((ImportExportName::Quoted(alias, quoted), with_length)) + } else if let Token(TSXToken::Cursor(id), start) = token { + Ok((Self::Cursor(id.into_cursor()), End(start.0))) } else { let (ident, pos) = token_as_identifier(token, "import alias")?; Ok((ImportExportName::Reference(ident), pos.get_end())) @@ -70,90 +73,25 @@ impl ASTNode for ImportDeclaration { state: &mut crate::ParsingState, options: &ParseOptions, ) -> ParseResult { - let start_position = reader.expect_next(TSXToken::Keyword(TSXKeyword::Import))?; - let type_keyword = reader - .conditional_next(|t| matches!(t, TSXToken::Keyword(TSXKeyword::Type))) - .map(|tok| Keyword::new(tok.get_span())); + let out = parse_import_specifier_and_parts(reader, state, options)?; - let peek = reader.peek(); - let default = if let Some(Token(TSXToken::OpenBrace | TSXToken::Multiply, _)) = peek { - None - } else if let Some(Token( - TSXToken::DoubleQuotedStringLiteral(_) | TSXToken::SingleQuotedStringLiteral(_), - _, - )) = peek - { - let Token( - TSXToken::SingleQuotedStringLiteral(from) - | TSXToken::DoubleQuotedStringLiteral(from), - pos, - ) = reader.next().unwrap() - else { - unreachable!() - }; - return Ok(ImportDeclaration { - position: start_position.union(pos.get_end_after(from.len() + 2)), - default: None, - kind: ImportKind::SideEffect, - type_keyword: None, - from, - }); - } else { - let default_identifier = VariableIdentifier::from_reader(reader, state, options)?; - if !matches!(reader.peek(), Some(Token(TSXToken::Keyword(TSXKeyword::From), _))) { - reader.expect_next(TSXToken::Comma)?; - } - Some(default_identifier) - }; - - let kind = if default.is_some() - && matches!(reader.peek(), Some(Token(TSXToken::Keyword(TSXKeyword::From), _))) - { - // From default keyword - ImportKind::Parts(Vec::new()) - } else if let Some(Token(TSXToken::Multiply, _)) = reader.peek() { - reader.next(); - let _as = reader.expect_next(TSXToken::Keyword(TSXKeyword::As))?; - let under = VariableIdentifier::from_reader(reader, state, options)?; - ImportKind::All { under } - } else { - let parts = parse_bracketed::( - reader, - state, - options, - Some(TSXToken::OpenBrace), - TSXToken::CloseBrace, - )? - .0; - ImportKind::Parts(parts) - }; + if !(matches!(out.items, ImportedItems::Parts(None)) && out.default.is_none()) { + reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; + } - reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; + let (from, end) = + ImportLocation::from_token(reader.next().ok_or_else(parse_lexing_error)?)?; - let token = reader.next().ok_or_else(parse_lexing_error)?; - let (end, from) = match token { - Token( - TSXToken::DoubleQuotedStringLiteral(from) - | TSXToken::SingleQuotedStringLiteral(from), - start, - ) => { - let span = start.with_length(from.len() + 2); - (span, from) - } - token => { - let position = token.get_span(); - return Err(ParseError::new( - ParseErrors::ExpectedStringLiteral { found: token.0 }, - position, - )); - } - }; Ok(ImportDeclaration { - default, - kind, - type_keyword, + default: out.default, + items: out.items, + type_keyword: out.type_keyword, + #[cfg(feature = "extras")] + deferred_keyword: out.deferred_keyword, from, - position: start_position.union(end), + position: out.start.union(end), + #[cfg(feature = "extras")] + reversed: false, }) } @@ -170,13 +108,16 @@ impl ASTNode for ImportDeclaration { if let Some(ref default) = self.default { buf.push(' '); - default.to_string_from_buffer(buf, options, depth) + default.to_string_from_buffer(buf, options, depth); + if matches!(self.items, ImportedItems::Parts(None)) { + buf.push(' '); + } } else { options.add_gap(buf); } - match self.kind { - ImportKind::All { ref under } => { + match self.items { + ImportedItems::All { ref under } => { if self.default.is_some() { buf.push_str(", "); } @@ -184,39 +125,33 @@ impl ASTNode for ImportDeclaration { under.to_string_from_buffer(buf, options, depth); buf.push(' '); } - ImportKind::SideEffect => { - buf.push('"'); - buf.push_str(&self.from); - buf.push('"'); - return; - } - ImportKind::Parts(ref parts) => { - if !parts.is_empty() { - if self.default.is_some() { - buf.push_str(", "); - } - buf.push('{'); - options.add_gap(buf); - for (at_end, part) in parts.iter().endiate() { - part.to_string_from_buffer(buf, options, depth); - if !at_end { - buf.push(','); - options.add_gap(buf); + ImportedItems::Parts(ref parts) => { + if let Some(parts) = parts { + if !parts.is_empty() { + if self.default.is_some() { + buf.push_str(", "); + } + buf.push('{'); + options.add_gap(buf); + for (at_end, part) in parts.iter().endiate() { + part.to_string_from_buffer(buf, options, depth); + if !at_end { + buf.push(','); + options.add_gap(buf); + } } + options.add_gap(buf); + buf.push('}'); + options.add_gap(buf); } - options.add_gap(buf); - buf.push('}'); - options.add_gap(buf); - } else if self.default.is_some() { - buf.push(' '); } } } - buf.push_str("from"); - options.add_gap(buf); - buf.push('"'); - buf.push_str(&self.from); - buf.push('"'); + if !(matches!(self.items, ImportedItems::Parts(None)) && self.default.is_none()) { + buf.push_str("from"); + options.add_gap(buf); + } + self.from.to_string_from_buffer(buf); } fn get_position(&self) -> &Span { @@ -224,6 +159,121 @@ impl ASTNode for ImportDeclaration { } } +impl ImportDeclaration { + #[cfg(feature = "extras")] + pub fn reversed_from_reader( + reader: &mut impl TokenReader, + state: &mut crate::ParsingState, + options: &ParseOptions, + ) -> ParseResult { + let start = reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; + + let (from, _end) = + ImportLocation::from_token(reader.next().ok_or_else(parse_lexing_error)?)?; + + let out = parse_import_specifier_and_parts(reader, state, options)?; + + Ok(ImportDeclaration { + default: out.default, + items: out.items, + type_keyword: out.type_keyword, + #[cfg(feature = "extras")] + deferred_keyword: out.deferred_keyword, + from, + position: start.union(out.end), + reversed: true, + }) + } +} + +pub(crate) struct PartsResult { + pub start: source_map::Start, + #[cfg(feature = "extras")] + pub deferred_keyword: Option>, + pub type_keyword: Option>, + pub default: Option, + pub items: ImportedItems, + pub end: source_map::End, +} + +pub(crate) fn parse_import_specifier_and_parts( + reader: &mut impl TokenReader, + state: &mut ParsingState, + options: &ParseOptions, +) -> Result { + let start = reader.expect_next(TSXToken::Keyword(TSXKeyword::Import))?; + + #[cfg(feature = "extras")] + let deferred_keyword = reader + .conditional_next(|t| matches!(t, TSXToken::Keyword(TSXKeyword::Deferred))) + .map(|tok| Keyword::new(tok.get_span())); + + let type_keyword = reader + .conditional_next(|t| matches!(t, TSXToken::Keyword(TSXKeyword::Type))) + .map(|tok| Keyword::new(tok.get_span())); + + let peek = reader.peek(); + + let default = if let Some(Token( + TSXToken::OpenBrace + | TSXToken::Multiply + | TSXToken::StringLiteral(..) + | TSXToken::Cursor(_), + _, + )) = peek + { + None + } else { + let default_identifier = VariableIdentifier::from_reader(reader, state, options)?; + if reader.conditional_next(|t| matches!(t, TSXToken::Comma)).is_some() { + Some(default_identifier) + } else { + let end = default_identifier.get_position().get_end(); + return Ok(PartsResult { + start, + #[cfg(feature = "extras")] + deferred_keyword, + type_keyword, + default: Some(default_identifier), + items: ImportedItems::Parts(None), + end, + }); + } + }; + + let peek = reader.peek(); + let (items, end) = if let Some(Token(TSXToken::Multiply, _)) = peek { + reader.next(); + let _as = reader.expect_next(TSXToken::Keyword(TSXKeyword::As))?; + let under = VariableIdentifier::from_reader(reader, state, options)?; + let end = under.get_position().get_end(); + (ImportedItems::All { under }, end) + } else if let Some(Token(TSXToken::OpenBrace, _)) = peek { + let (parts, end) = parse_bracketed::( + reader, + state, + options, + Some(TSXToken::OpenBrace), + TSXToken::CloseBrace, + )?; + (ImportedItems::Parts(Some(parts)), end) + } else if let Some(Token(TSXToken::StringLiteral(..), _)) = peek { + (ImportedItems::Parts(None), start.get_end_after(6)) + } else { + return throw_unexpected_token(reader, &[TSXToken::Multiply, TSXToken::OpenBrace]); + }; + + Ok(PartsResult { + start, + #[cfg(feature = "extras")] + deferred_keyword, + type_keyword, + default, + items, + end, + }) +} + /// #[derive(Debug, Clone, PartialEq, Eq, Visitable, GetFieldByType)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] @@ -258,24 +308,14 @@ impl ASTNode for ImportPart { }; Ok(Self::PrefixComment(comment, under, position)) } else { - let (alias, alias_pos) = if let TSXToken::DoubleQuotedStringLiteral(_) - | TSXToken::SingleQuotedStringLiteral(_) = token.0 - { - let (start, alias, quoted) = match token { - Token(TSXToken::SingleQuotedStringLiteral(content), start) => { - (start, content, Quoted::Single) - } - Token(TSXToken::DoubleQuotedStringLiteral(content), start) => { - (start, content, Quoted::Double) - } - _ => unreachable!(), + let (alias, alias_pos) = + if let Token(TSXToken::StringLiteral(alias, quoted), start) = token { + let with_length = start.with_length(alias.len() + 2); + (ImportExportName::Quoted(alias, quoted), with_length) + } else { + let (ident, pos) = token_as_identifier(token, "import alias")?; + (ImportExportName::Reference(ident), pos) }; - let with_length = start.with_length(alias.len() + 1); - (ImportExportName::Quoted(alias, quoted), with_length) - } else { - let (ident, pos) = token_as_identifier(token, "import alias")?; - (ImportExportName::Reference(ident), pos) - }; let mut value = match alias { ImportExportName::Quoted(..) => { reader.expect_next(TSXToken::Keyword(TSXKeyword::As))?; @@ -303,6 +343,10 @@ impl ASTNode for ImportPart { Self::Name(VariableIdentifier::Standard(reference, alias_pos)) } } + ImportExportName::Cursor(_id) => { + todo!("cursor id change") + // Self::Name(VariableIdentifier::Cursor(id, pos)) + } }; while let Some(Token(TSXToken::MultiLineComment(_), _)) = reader.peek() { let Some(Token(TSXToken::MultiLineComment(c), start)) = reader.next() else { @@ -331,6 +375,7 @@ impl ASTNode for ImportPart { buf.push_str(alias); buf.push(q.as_char()); } + ImportExportName::Cursor(_) => {} } buf.push_str(" as "); buf.push_str(name); diff --git a/parser/src/declarations/mod.rs b/parser/src/declarations/mod.rs index d337a09d..0afc1a58 100644 --- a/parser/src/declarations/mod.rs +++ b/parser/src/declarations/mod.rs @@ -6,8 +6,8 @@ use visitable_derive::Visitable; use crate::{ errors::parse_lexing_error, extensions::decorators, throw_unexpected_token_with_token, - Decorated, Keyword, ParseError, ParseErrors, StatementPosition, TSXKeyword, TSXToken, - TypeDefinitionModuleDeclaration, + CursorId, Decorated, Keyword, ParseError, ParseErrors, Quoted, StatementPosition, TSXKeyword, + TSXToken, TypeDefinitionModuleDeclaration, }; pub use self::{ @@ -79,12 +79,63 @@ impl Declaration { ); #[cfg(feature = "extras")] - return result || matches!(token, Some(Token(TSXToken::Keyword(TSXKeyword::Generator), _))); + return result + || matches!(token, Some(Token(TSXToken::Keyword(kw), _)) if kw.is_in_function_header()) + || (matches!(token, Some(Token(TSXToken::Keyword(TSXKeyword::From), _))) + && matches!(reader.peek_n(1), Some(Token(TSXToken::StringLiteral(..), _)))); + #[cfg(not(feature = "extras"))] return result; } } +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] +#[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] +pub enum ImportLocation { + Quoted(String, Quoted), + #[cfg_attr(feature = "self-rust-tokenize", self_tokenize_field(0))] + Cursor(CursorId), +} + +impl ImportLocation { + pub(crate) fn from_token( + token: Token, + ) -> crate::ParseResult<(Self, source_map::End)> { + if let Token(TSXToken::StringLiteral(content, quoted), start) = token { + let with_length = start.get_end_after(content.len() + 1); + Ok((ImportLocation::Quoted(content, quoted), with_length)) + } else if let Token(TSXToken::Cursor(id), start) = token { + Ok((Self::Cursor(id.into_cursor()), source_map::End(start.0))) + } else { + Err(ParseError::new( + ParseErrors::ExpectedStringLiteral { found: token.0 }, + token.1.with_length(0), + )) + } + } + + pub(crate) fn to_string_from_buffer(&self, buf: &mut T) { + match self { + ImportLocation::Quoted(inner, quoted) => { + buf.push(quoted.as_char()); + buf.push_str(inner); + buf.push(quoted.as_char()); + } + ImportLocation::Cursor(_) => {} + } + } + + /// Can be None if self is a cursor point + pub fn get_path(&self) -> Option<&str> { + if let Self::Quoted(name, _) = self { + Some(name) + } else { + None + } + } +} + impl crate::ASTNode for Declaration { fn from_reader( reader: &mut impl tokenizer_lib::TokenReader, @@ -116,7 +167,7 @@ impl crate::ASTNode for Declaration { .map(|on| Declaration::Enum(Decorated::new(decorators, on))) } #[cfg(feature = "extras")] - TSXToken::Keyword(TSXKeyword::Generator) if options.generator_keyword => { + TSXToken::Keyword(ref kw) if kw.is_in_function_header() => { let function = StatementFunction::from_reader(reader, state, options)?; Ok(Declaration::Function(Decorated::new(decorators, function))) } @@ -141,6 +192,10 @@ impl crate::ASTNode for Declaration { TSXToken::Keyword(TSXKeyword::Import) => { ImportDeclaration::from_reader(reader, state, options).map(Into::into) } + #[cfg(feature = "extras")] + TSXToken::Keyword(TSXKeyword::From) => { + ImportDeclaration::reversed_from_reader(reader, state, options).map(Into::into) + } TSXToken::Keyword(TSXKeyword::Interface) => { InterfaceDeclaration::from_reader(reader, state, options) .map(|on| Declaration::Interface(Decorated::new(decorators, on))) diff --git a/parser/src/expressions/arrow_function.rs b/parser/src/expressions/arrow_function.rs index 64f37c9e..e129e3c7 100644 --- a/parser/src/expressions/arrow_function.rs +++ b/parser/src/expressions/arrow_function.rs @@ -66,7 +66,13 @@ impl FunctionBased for ArrowFunctionBase { additionally: None, position: position.clone(), }]; - Ok(FunctionParameters { parameters, rest_parameter: None, position }) + Ok(FunctionParameters { + parameters, + rest_parameter: None, + position, + this_type: None, + super_type: None, + }) } } } @@ -128,6 +134,8 @@ impl ArrowFunction { parameters, rest_parameter: None, position: first_parameter.1, + this_type: None, + super_type: None, }, return_type: None, type_parameters: None, diff --git a/parser/src/expressions/assignments.rs b/parser/src/expressions/assignments.rs index ac25fdf4..486f042b 100644 --- a/parser/src/expressions/assignments.rs +++ b/parser/src/expressions/assignments.rs @@ -107,6 +107,17 @@ impl TryFrom for VariableOrPropertyAccess { Expression::Index { indexer, position, indexee, is_optional: false } => { Ok(Self::Index { indexer, position, indexee }) } + // Yah weird. Recursion is fine + Expression::ParenthesizedExpression(inner, _) => { + if let MultipleExpression::Single(expression) = *inner { + TryFrom::try_from(expression) + } else { + Err(ParseError::new( + crate::ParseErrors::InvalidLHSAssignment, + inner.get_position().clone(), + )) + } + } expression => Err(ParseError::new( crate::ParseErrors::InvalidLHSAssignment, expression.get_position().clone(), diff --git a/parser/src/expressions/mod.rs b/parser/src/expressions/mod.rs index c6f268f8..e474096e 100644 --- a/parser/src/expressions/mod.rs +++ b/parser/src/expressions/mod.rs @@ -118,6 +118,7 @@ pub enum Expression { NewTarget(Span), DynamicImport { path: Box, + options: Option>, position: Span, }, PropertyAccess { @@ -230,13 +231,9 @@ impl Expression { position: Span { start: position.0, end: position.0, source: () }, }); } - Token(TSXToken::SingleQuotedStringLiteral(content), start) => { + Token(TSXToken::StringLiteral(content, quoted), start) => { let position = start.with_length(content.len() + 2); - Expression::StringLiteral(content, Quoted::Single, position) - } - Token(TSXToken::DoubleQuotedStringLiteral(content), start) => { - let position = start.with_length(content.len() + 2); - Expression::StringLiteral(content, Quoted::Double, position) + Expression::StringLiteral(content, quoted, position) } Token(TSXToken::NumberLiteral(value), start) => { let position = start.with_length(value.len()); @@ -268,6 +265,27 @@ impl Expression { t @ Token(TSXToken::Keyword(TSXKeyword::This), _) => { Expression::ThisReference(t.get_span()) } + Token(TSXToken::Keyword(TSXKeyword::Import), start) => { + let _ = reader.expect_next(TSXToken::OpenParentheses)?; + let path = Expression::from_reader(reader, state, options)?; + if let Expression::StringLiteral(path, ..) = &path { + state.constant_imports.push(path.clone()); + } else { + // TODO warning dynamic + } + let options = if reader.conditional_next(|t| matches!(t, TSXToken::Comma)).is_some() + { + Some(Box::new(Expression::from_reader(reader, state, options)?)) + } else { + None + }; + let end = reader.expect_next(TSXToken::OpenParentheses)?; + Expression::DynamicImport { + path: Box::new(path), + options, + position: start.union(end.get_end_after(1)), + } + } t @ Token(TSXToken::Keyword(TSXKeyword::Super), _) => { let _super_position = t.get_span(); let token = reader.next().unwrap(); @@ -513,7 +531,7 @@ impl Expression { )?; Expression::TemplateLiteral(template_literal) } - Token(TSXToken::Keyword(kw), start) if kw.is_function_heading() => { + Token(TSXToken::Keyword(kw), start) if kw.is_in_function_header() => { let token = Token(TSXToken::Keyword(kw), start); let (async_keyword, token) = if let Token(TSXToken::Keyword(TSXKeyword::Async), _) = token { @@ -523,7 +541,7 @@ impl Expression { }; if async_keyword.is_some() - && !matches!(token, Token(TSXToken::Keyword(ref kw), _) if kw.is_function_heading()) + && !matches!(token, Token(TSXToken::Keyword(ref kw), _) if kw.is_in_function_header()) { if let Token(TSXToken::OpenParentheses, start) = token { let function = ArrowFunction::from_reader_sub_open_paren( @@ -1739,6 +1757,8 @@ impl Expression { parameters: Default::default(), rest_parameter: Default::default(), position: position.clone(), + this_type: None, + super_type: None, }, return_type: None, type_parameters: None, diff --git a/parser/src/functions.rs b/parser/src/functions.rs index 74427cec..36c6d714 100644 --- a/parser/src/functions.rs +++ b/parser/src/functions.rs @@ -424,4 +424,12 @@ impl FunctionHeader { FunctionHeader::ChadFunctionHeader { async_keyword, .. } => async_keyword.is_some(), } } + + #[cfg(feature = "extras")] + pub fn get_location(&self) -> Option<&FunctionLocationModifier> { + match self { + FunctionHeader::VirginFunctionHeader { location, .. } + | FunctionHeader::ChadFunctionHeader { location, .. } => location.as_ref(), + } + } } diff --git a/parser/src/lexer.rs b/parser/src/lexer.rs index 66c5a499..04391e84 100644 --- a/parser/src/lexer.rs +++ b/parser/src/lexer.rs @@ -5,7 +5,7 @@ use super::{Span, TSXToken}; use crate::{ cursor::EmptyCursorId, errors::LexingErrors, html_tag_contains_literal_content, - html_tag_is_self_closing, + html_tag_is_self_closing, Quoted, }; use tokenizer_lib::{sized_tokens::TokenStart, Token, TokenSender}; @@ -396,8 +396,9 @@ pub fn lex_script( return_err!(LexingErrors::NewLineInStringLiteral); } '\'' if !*double_quoted && !*escaped => { - push_token!(TSXToken::SingleQuotedStringLiteral( - script[(start + 1)..idx].to_owned() + push_token!(TSXToken::StringLiteral( + script[(start + 1)..idx].to_owned(), + Quoted::Single )); state = LexingState::None; start = idx + 1; @@ -405,8 +406,9 @@ pub fn lex_script( continue; } '"' if *double_quoted && !*escaped => { - push_token!(TSXToken::DoubleQuotedStringLiteral( - script[(start + 1)..idx].to_owned() + push_token!(TSXToken::StringLiteral( + script[(start + 1)..idx].to_owned(), + Quoted::Double )); state = LexingState::None; start = idx + 1; diff --git a/parser/src/lib.rs b/parser/src/lib.rs index 1b93f6dc..41c79df6 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -261,7 +261,12 @@ pub fn lex_and_parse_script( let lex_options = options.get_lex_options(); let length = script.len() as u32; let parsing_thread = std::thread::spawn(move || { - let mut state = ParsingState { line_starts, source, length }; + let mut state = ParsingState { + line_starts, + source, + length_of_source: length, + constant_imports: Default::default(), + }; let res = T::from_reader(&mut reader, &mut state, &options); if res.is_ok() { reader.expect_next(TSXToken::EOS)?; @@ -295,7 +300,12 @@ pub fn lex_and_parse_script( return Err(ParseError::new(reason, pos)); } - let mut state = ParsingState { line_starts, length: script.len() as u32, source }; + let mut state = ParsingState { + line_starts, + length_of_source: script.len() as u32, + source, + constant_imports: Default::default(), + }; let res = T::from_reader(&mut queue, &mut state, &options); if res.is_ok() { queue.expect_next(TSXToken::EOS)?; @@ -322,7 +332,9 @@ pub(crate) fn throw_unexpected_token_with_token( pub struct ParsingState { pub(crate) line_starts: source_map::LineStarts, pub(crate) source: source_map::SourceId, - pub(crate) length: u32, + pub(crate) length_of_source: u32, + /// TODO as multithreaded channel + record is dynamic exists + pub(crate) constant_imports: Vec, } /// A keyword @@ -760,6 +772,25 @@ impl MethodHeader { } } + pub fn is_async(&self) -> bool { + match self { + MethodHeader::GeneratorStar(async_kw, _) => async_kw.is_some(), + #[cfg(feature = "extras")] + MethodHeader::Generator(async_kw, _) => async_kw.is_some(), + MethodHeader::Async(_) => true, + _ => false, + } + } + + pub fn is_generator(&self) -> bool { + match self { + MethodHeader::GeneratorStar(..) => true, + #[cfg(feature = "extras")] + MethodHeader::Generator(..) => true, + _ => false, + } + } + // pub(crate) fn get_end(&self) -> source_map::End { // match self { // MethodHeader::Get(kw) => kw.1.get_end(), diff --git a/parser/src/modules.rs b/parser/src/modules.rs index afbf23ca..1c9be3f9 100644 --- a/parser/src/modules.rs +++ b/parser/src/modules.rs @@ -63,7 +63,7 @@ impl ASTNode for Module { state: &mut crate::ParsingState, options: &ParseOptions, ) -> ParseResult { - let end = state.length; + let end = state.length_of_source; parse_statements_and_declarations(reader, state, options).map(|statements| Module { source: state.source, items: statements, @@ -245,7 +245,7 @@ impl ASTNode for TypeDefinitionModule { } } } - let end = state.length; + let end = state.length_of_source; Ok(Self { declarations, source: state.source, diff --git a/parser/src/parameters.rs b/parser/src/parameters.rs index a149bd3c..1cb6e46b 100644 --- a/parser/src/parameters.rs +++ b/parser/src/parameters.rs @@ -1,4 +1,4 @@ -use crate::TSXToken; +use crate::{TSXKeyword, TSXToken}; use derive_partial_eq_extras::PartialEqExtras; use iterator_endiate::EndiateIteratorExt; use source_map::Span; @@ -47,6 +47,8 @@ pub struct SpreadParameter { #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] pub struct FunctionParameters { + pub this_type: Option<(TypeAnnotation, Span)>, + pub super_type: Option<(TypeAnnotation, Span)>, pub parameters: Vec, pub rest_parameter: Option>, #[partial_eq_ignore] @@ -117,6 +119,8 @@ impl FunctionParameters { options: &crate::ParseOptions, start: TokenStart, ) -> Result { + let mut this_type = None; + let mut super_type = None; let mut parameters = Vec::new(); let mut rest_parameter = None; @@ -145,6 +149,20 @@ impl FunctionParameters { type_annotation, })); break; + } else if let Some(Token(_, start)) = reader.conditional_next(|tok| { + parameters.len() == 0 && matches!(tok, TSXToken::Keyword(TSXKeyword::This)) + }) { + reader.expect_next(TSXToken::Colon)?; + let type_annotation = TypeAnnotation::from_reader(reader, state, options)?; + let position = start.union(type_annotation.get_position()); + this_type = Some((type_annotation, position)); + } else if let Some(Token(_, start)) = reader.conditional_next(|tok| { + parameters.len() == 0 && matches!(tok, TSXToken::Keyword(TSXKeyword::Super)) + }) { + reader.expect_next(TSXToken::Colon)?; + let type_annotation = TypeAnnotation::from_reader(reader, state, options)?; + let position = start.union(type_annotation.get_position()); + super_type = Some((type_annotation, position)); } else { let name = WithComment::>::from_reader( reader, state, options, @@ -213,6 +231,12 @@ impl FunctionParameters { } } let close = reader.expect_next_get_end(TSXToken::CloseParentheses)?; - Ok(FunctionParameters { position: start.union(close), parameters, rest_parameter }) + Ok(FunctionParameters { + position: start.union(close), + parameters, + rest_parameter, + this_type, + super_type, + }) } } diff --git a/parser/src/property_key.rs b/parser/src/property_key.rs index c2b7d07b..d4f827d2 100644 --- a/parser/src/property_key.rs +++ b/parser/src/property_key.rs @@ -1,4 +1,4 @@ -use crate::TSXToken; +use crate::{Quoted, TSXToken}; use source_map::Span; use std::fmt::Debug; use tokenizer_lib::{sized_tokens::TokenReaderWithTokenEnds, Token, TokenReader}; @@ -62,7 +62,7 @@ impl PropertyKeyKind for PublicOrPrivate { #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] pub enum PropertyKey { Ident(String, Span, T::Private), - StringLiteral(String, Span), + StringLiteral(String, Quoted, Span), NumberLiteral(NumberRepresentation, Span), /// Includes anything in the `[...]` maybe a symbol Computed(Box, Span), @@ -72,7 +72,7 @@ impl PropertyKey { pub fn get_position(&self) -> &Span { match self { PropertyKey::Ident(_, pos, _) - | PropertyKey::StringLiteral(_, pos) + | PropertyKey::StringLiteral(_, _, pos) | PropertyKey::NumberLiteral(_, pos) | PropertyKey::Computed(_, pos) => pos, } @@ -82,7 +82,9 @@ impl PropertyKey { impl PartialEq for PropertyKey { fn eq(&self, other: &str) -> bool { match self { - PropertyKey::Ident(name, _, _) | PropertyKey::StringLiteral(name, _) => name == other, + PropertyKey::Ident(name, _, _) | PropertyKey::StringLiteral(name, _, _) => { + name == other + } PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => false, } } @@ -92,7 +94,7 @@ impl ASTNode for PropertyKey { fn get_position(&self) -> &Span { match self { PropertyKey::Ident(_, pos, _) - | PropertyKey::StringLiteral(_, pos) + | PropertyKey::StringLiteral(_, _, pos) | PropertyKey::NumberLiteral(_, pos) | PropertyKey::Computed(_, pos) => pos, } @@ -104,10 +106,9 @@ impl ASTNode for PropertyKey { options: &ParseOptions, ) -> ParseResult { match reader.next().ok_or_else(parse_lexing_error)? { - Token(TSXToken::DoubleQuotedStringLiteral(content), start) - | Token(TSXToken::SingleQuotedStringLiteral(content), start) => { + Token(TSXToken::StringLiteral(content, quoted), start) => { let position = start.with_length(content.len() + 2); - Ok(Self::StringLiteral(content, position)) + Ok(Self::StringLiteral(content, quoted, position)) } Token(TSXToken::NumberLiteral(value), start) => { let position = start.with_length(value.len()); @@ -134,10 +135,10 @@ impl ASTNode for PropertyKey { match self { Self::Ident(ident, _pos, _) => buf.push_str(ident.as_str()), Self::NumberLiteral(number, _) => buf.push_str(&number.to_string()), - Self::StringLiteral(string, _) => { - buf.push('"'); + Self::StringLiteral(string, quoted, _) => { + buf.push(quoted.as_char()); buf.push_str(string.as_str()); - buf.push('"'); + buf.push(quoted.as_char()); } Self::Computed(expression, _) => { buf.push('['); diff --git a/parser/src/tokens.rs b/parser/src/tokens.rs index f1b8aa08..bd187099 100644 --- a/parser/src/tokens.rs +++ b/parser/src/tokens.rs @@ -9,7 +9,7 @@ use enum_variants_strings::EnumVariantsStrings; use source_map::Span; use tokenizer_lib::{sized_tokens::TokenStart, Token}; -use crate::ParseError; +use crate::{ParseError, Quoted}; /// All JS Tokens with extensions including TypeScript, JSX and more #[derive(Debug, FiniteAutomataConstructor, PartialEqExtras)] @@ -94,7 +94,7 @@ pub enum TSXToken { IdentLiteral(String), Keyword(TSXKeyword), NumberLiteral(String), - SingleQuotedStringLiteral(String), DoubleQuotedStringLiteral(String), + StringLiteral(String, Quoted), MultiLineComment(String), Comment(String), RegexLiteral(String), RegexFlagLiteral(String), TemplateLiteralStart, TemplateLiteralChunk(String), TemplateLiteralEnd, @@ -195,9 +195,9 @@ impl tokenizer_lib::sized_tokens::SizedToken for TSXToken { | TSXToken::RegexFlagLiteral(lit) => lit.len() as u32, TSXToken::MultiLineComment(comment) => comment.len() as u32 + 4, - TSXToken::SingleQuotedStringLiteral(comment) - | TSXToken::DoubleQuotedStringLiteral(comment) - | TSXToken::Comment(comment) => comment.len() as u32 + 2, + TSXToken::StringLiteral(comment, _) | TSXToken::Comment(comment) => { + comment.len() as u32 + 2 + } TSXToken::RegexLiteral(regex) => regex.len() as u32 + 2, TSXToken::Comma @@ -334,19 +334,26 @@ pub enum TSXKeyword { #[cfg(feature = "extras")] /// https://github.com/tc39/proposal-generator-arrow-functions#introduce-new-generator-keyword-for-both-function-and-arrow-function Generator, + + #[cfg(feature = "extras")] + Deferred } impl TSXKeyword { #[cfg(feature = "extras")] - pub(crate) fn is_function_heading(&self) -> bool { + pub(crate) fn is_in_function_header(&self) -> bool { matches!( self, - TSXKeyword::Function | TSXKeyword::Async | TSXKeyword::Module | TSXKeyword::Server + TSXKeyword::Function + | TSXKeyword::Async + | TSXKeyword::Module + | TSXKeyword::Server + | TSXKeyword::Generator ) } #[cfg(not(feature = "extras"))] - pub(crate) fn is_function_heading(&self) -> bool { + pub(crate) fn is_in_function_header(&self) -> bool { matches!(self, TSXKeyword::Function | TSXKeyword::Async) } } @@ -387,13 +394,6 @@ impl TSXToken { } } - pub fn is_string_literal(&self) -> bool { - matches!( - self, - TSXToken::SingleQuotedStringLiteral(_) | TSXToken::DoubleQuotedStringLiteral(_) - ) - } - /// Used for lexing regular expression and JSX literals differently pub fn is_expression_prefix(&self) -> bool { matches!( diff --git a/parser/src/types/interface.rs b/parser/src/types/interface.rs index 45b63333..7d9b7201 100644 --- a/parser/src/types/interface.rs +++ b/parser/src/types/interface.rs @@ -342,10 +342,9 @@ impl ASTNode for InterfaceMember { // Non literal property names and index type let Token(_, start) = reader.next().unwrap(); let name = match reader.next().ok_or_else(parse_lexing_error)? { - Token(TSXToken::SingleQuotedStringLiteral(name), start) - | Token(TSXToken::DoubleQuotedStringLiteral(name), start) => { + Token(TSXToken::StringLiteral(name, quoted), start) => { let position = start.with_length(name.len() + 2); - PropertyKey::StringLiteral(name, position) + PropertyKey::StringLiteral(name, quoted, position) } Token(TSXToken::NumberLiteral(value), start) => { let position = start.with_length(value.len()); diff --git a/parser/src/types/type_annotations.rs b/parser/src/types/type_annotations.rs index 86f878e4..dca1247d 100644 --- a/parser/src/types/type_annotations.rs +++ b/parser/src/types/type_annotations.rs @@ -4,7 +4,7 @@ use crate::{ extensions::decorators::Decorated, CursorId, Decorator, Keyword, ParseResult, VariableField, VariableFieldInTypeAnnotation, WithComment, }; -use crate::{parse_bracketed, throw_unexpected_token_with_token, to_string_bracketed}; +use crate::{parse_bracketed, throw_unexpected_token_with_token, to_string_bracketed, Quoted}; use derive_partial_eq_extras::PartialEqExtras; use iterator_endiate::EndiateIteratorExt; use tokenizer_lib::sized_tokens::{TokenEnd, TokenReaderWithTokenEnds, TokenStart}; @@ -40,7 +40,7 @@ pub enum TypeAnnotation { /// Intersection e.g. `c & d` Intersection(Vec, Span), /// String literal e.g. `"foo"` - StringLiteral(String, Span), + StringLiteral(String, Quoted, Span), /// Number literal e.g. `45` NumberLiteral(NumberRepresentation, Span), /// Boolean literal e.g. `true` @@ -298,10 +298,10 @@ impl ASTNode for TypeAnnotation { Self::NumberLiteral(value, _) => { buf.push_str(&value.to_string()); } - Self::StringLiteral(expression, _) => { - buf.push('"'); + Self::StringLiteral(expression, quoted, _) => { + buf.push(quoted.as_char()); buf.push_str(expression.as_str()); - buf.push('"'); + buf.push(quoted.as_char()); } Self::Union(union_members, _) => { for (at_end, member) in union_members.iter().endiate() { @@ -431,10 +431,9 @@ impl TypeAnnotation { let pos = start.with_length(num.len()); Self::NumberLiteral(num.parse::().unwrap(), pos) } - Token(TSXToken::SingleQuotedStringLiteral(content), start) - | Token(TSXToken::DoubleQuotedStringLiteral(content), start) => { + Token(TSXToken::StringLiteral(content, quoted), start) => { let pos = start.with_length(content.len() + 2); - Self::StringLiteral(content, pos) + Self::StringLiteral(content, quoted, pos) } Token(TSXToken::At, pos) => { let decorator = Decorator::from_reader_sub_at_symbol(reader, state, options, pos)?; @@ -983,7 +982,7 @@ mod tests { fn literals() { assert_matches_ast!( "\"my_string\"", - TypeAnnotation::StringLiteral(Deref @ "my_string", span!(0, 11)) + TypeAnnotation::StringLiteral(Deref @ "my_string", Quoted::Double, span!(0, 11)) ); assert_matches_ast!( "45", diff --git a/parser/src/visiting.rs b/parser/src/visiting.rs index 3fe91845..996d3b5f 100644 --- a/parser/src/visiting.rs +++ b/parser/src/visiting.rs @@ -239,6 +239,7 @@ mod ast { crate::PropertyReference, crate::Quoted, crate::declarations::ImportExportName, + crate::declarations::ImportLocation, crate::PropertyKey, crate::PropertyKey ]; @@ -358,17 +359,15 @@ mod structures { ImmutableVariableOrPropertyPart::ClassName(name, _) => *name, ImmutableVariableOrPropertyPart::ObjectPropertyKey(property) => { match property.get_ast_ref() { - PropertyKey::Ident(ident, _, _) | PropertyKey::StringLiteral(ident, _) => { - Some(ident.as_str()) - } + PropertyKey::Ident(ident, _, _) + | PropertyKey::StringLiteral(ident, _, _) => Some(ident.as_str()), PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => None, } } ImmutableVariableOrPropertyPart::ClassPropertyKey(property) => { match property.get_ast_ref() { - PropertyKey::Ident(ident, _, _) | PropertyKey::StringLiteral(ident, _) => { - Some(ident.as_str()) - } + PropertyKey::Ident(ident, _, _) + | PropertyKey::StringLiteral(ident, _, _) => Some(ident.as_str()), PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => None, } } diff --git a/parser/tests/statements.rs b/parser/tests/statements.rs index aaeaeea0..2ccaea05 100644 --- a/parser/tests/statements.rs +++ b/parser/tests/statements.rs @@ -198,3 +198,47 @@ export { default as name1 } from "module-name""# // let output = module.to_string(&ToStringOptions::typescript()); // assert_eq!(output, input); } + +#[cfg(feature = "extras")] +#[test] +fn reversed_imports() { + let input = r#" +from "module-name" import defaultExport; +from "module-name" import * as name; +from "module-name" import { export1 }; +from "module-name" import { export1, export2 }; +from "module-name" import defaultExport, { export1, /* … */ }; +from "module-name" import defaultExport, * as name; + "# + .trim(); + + let module = + Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); + + eprintln!("Module: {:#?}", module); + + // let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); + // assert_eq!(output, input); +} + +#[cfg(feature = "extras")] +#[test] +fn function_custom_headers() { + let input = r#" +function a() {} +generator function a() {} +generator server function a() {} +generator server function a() {} +async server function a() {} +module function a() {} + "# + .trim(); + + let module = + Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); + + eprintln!("Module: {:#?}", module); + + // let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); + // assert_eq!(output, input); +} diff --git a/src/repl.rs b/src/repl.rs index 7446ab6d..7537499d 100644 --- a/src/repl.rs +++ b/src/repl.rs @@ -4,6 +4,7 @@ use std::path::{Path, PathBuf}; use argh::FromArgs; use parser::{visiting::VisitorsMut, ASTNode}; +use parser::{Expression, Module, Statement}; use std::io::{BufRead, BufReader, Write}; use std::process::{Command, Stdio}; @@ -72,7 +73,7 @@ pub(crate) fn run_deno_repl( } }; - let source_id = state.get_source_id(); + let source = state.get_source_id(); loop { let input = cli_input_resolver(""); @@ -92,17 +93,24 @@ pub(crate) fn run_deno_repl( continue; }; - let (from_index, _) = state.get_fs_mut().append_to_file(source_id, &input); + let (from_index, _) = state.get_fs_mut().append_to_file(source, &input); - let mut item = match parser::Module::from_string( - input, - Default::default(), - source_id, - Some(from_index as u32), - ) { + let options = Default::default(); + let offset = Some(from_index as u32); + let result = if input.trim_start().starts_with('{') { + Expression::from_string(input, options, source, offset).map(|expression| Module { + span: expression.get_position().clone(), + items: vec![Statement::Expression(expression.into()).into()], + source, + }) + } else { + Module::from_string(input, options, source, offset) + }; + + let mut item = match result { Ok(item) => item, Err(err) => { - emit_ezno_diagnostic((err, source_id).into(), state.get_fs_ref()).unwrap(); + emit_ezno_diagnostic((err, source).into(), state.get_fs_ref()).unwrap(); continue; } }; diff --git a/src/wasm_bindings.rs b/src/wasm_bindings.rs index cac9dd7b..7fe5e733 100644 --- a/src/wasm_bindings.rs +++ b/src/wasm_bindings.rs @@ -125,6 +125,24 @@ pub fn just_imports(input: String) -> JsValue { } } +/// Removes whitespace in module +#[wasm_bindgen] +pub fn minify_module(input: String) -> JsValue { + use parser::{ASTNode, Module, SourceId}; + + std::panic::set_hook(Box::new(console_error_panic_hook::hook)); + let item = Module::from_string(input, Default::default(), SourceId::NULL, None); + match item { + Ok(mut item) => { + serde_wasm_bindgen::to_value(&item.to_string(&parser::ToStringOptions::minified())) + .unwrap() + } + Err(parse_error) => { + serde_wasm_bindgen::to_value(&(parse_error.reason, parse_error.position)).unwrap() + } + } +} + #[wasm_bindgen] pub fn get_version() -> JsValue { serde_wasm_bindgen::to_value(&env!("CARGO_PKG_VERSION")).unwrap()