From e8d86768cb5d8c05f81c43427bedfe1014b4d088 Mon Sep 17 00:00:00 2001 From: Dawid Sygocki Date: Sat, 4 Jun 2022 15:49:16 +0200 Subject: [PATCH] =?UTF-8?q?Poprawki=20do=20etapu=204=20(Analizator=20sk?= =?UTF-8?q?=C5=82adniowy)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../LexicalAnalysis/LexerErrorHandlerMock.cs | 17 + .../LexerTests.KeywordsAndIdentifiers.cs | 69 + .../LexicalAnalysis/LexerTests.Numbers.cs | 148 ++ .../LexerTests.OperatorsAndComments.cs | 120 + .../LexicalAnalysis/LexerTests.Strings.cs | 78 + Toffee.Tests/LexicalAnalysis/LexerTests.cs | 385 +--- .../ExpressionParsingTests.Binary.cs | 311 +++ .../ExpressionParsingTests.Block.cs | 86 + .../ExpressionParsingTests.Conditional.cs | 55 + .../ExpressionParsingTests.ForLoop.cs | 109 + .../ExpressionParsingTests.FunctionCall.cs | 95 + ...pressionParsingTests.FunctionDefinition.cs | 145 ++ .../ExpressionParsingTests.Grouping.cs | 110 + .../ExpressionParsingTests.Identifier.cs | 34 + .../ExpressionParsingTests.Literal.cs | 40 + .../ExpressionParsingTests.PatternMatching.cs | 151 ++ .../ExpressionParsingTests.Type.cs | 48 + .../ExpressionParsingTests.TypeCast.cs | 44 + .../ExpressionParsingTests.Unary.cs | 314 +++ .../ExpressionParsingTests.WhileLoop.cs | 64 + .../ExpressionParsingTests.cs | 166 ++ ...ckExpressionMissingClosingBraceTestData.cs | 160 ++ ...BlockExpressionMissingSemicolonTestData.cs | 97 + .../Generators/BlockExpressionTestData.cs | 111 + ...tionalBranchesMissingConsequentTestData.cs | 69 + .../ConditionalExpressionTestData.cs | 136 ++ .../Generators/ExpressionStatementTestData.cs | 115 + .../Generators/ForLoopExpressionTestData.cs | 135 ++ ...SpecificationMissingParenthesesTestData.cs | 62 + ...orLoopSpecificationMissingPartsTestData.cs | 133 ++ ...essionMissingClosingParenthesisTestData.cs | 45 + .../FunctionCallExpressionTestData.cs | 70 + ...ionExpressionMissingParenthesisTestData.cs | 121 ++ .../FunctionDefinitionExpressionTestData.cs | 151 ++ ...eAccessExpressionNonIdentifiersTestData.cs | 69 + ...ceImportStatementNonIdentifiersTestData.cs | 75 + .../OperatorsAssociativityTestData.cs | 174 ++ .../Generators/OperatorsPriorityTestData.cs | 533 +++++ ...zedExpressionMissingParenthesesTestData.cs | 65 + ...atternMatchingBranchesMissingConsequent.cs | 61 + ...ingExpressionMissingParenthesesTestData.cs | 67 + .../PatternMatchingExpressionTestData.cs | 143 ++ ...ficationMissingColonOrSemicolonTestData.cs | 105 + ...ionListStatementMissingVariableTestData.cs | 70 + ...ableInitializationListStatementTestData.cs | 119 + .../Generators/WhileLoopExpressionTestData.cs | 34 + .../{ParserTests.cs => Helpers.cs} | 13 +- Toffee.Tests/SyntacticAnalysis/LexerMock.cs | 5 +- .../ParserErrorHandlerMock.cs | 17 + .../ParserTests.Expressions.cs | 1927 ----------------- .../ParserTests.Statements.cs | 389 ---- .../StatementParsingTests.Break.cs | 29 + .../StatementParsingTests.BreakIf.cs | 44 + .../StatementParsingTests.Expression.cs | 31 + .../StatementParsingTests.NamespaceImport.cs | 61 + .../StatementParsingTests.Return.cs | 58 + ...ParsingTests.VariableInitializationList.cs | 138 ++ .../StatementParsingTests.cs | 85 + Toffee.Tests/Toffee.Tests.csproj | 74 +- Toffee/CommandLine/Application.cs | 11 +- Toffee/LexicalAnalysis/BaseLexer.cs | 7 - Toffee/LexicalAnalysis/ILexer.cs | 3 - Toffee/LexicalAnalysis/Lexer.Numbers.cs | 1 - Toffee/LexicalAnalysis/LexerErrors.cs | 6 +- Toffee/LexicalAnalysis/LexerWarnings.cs | 6 +- Toffee/LexicalAnalysis/TokenMappers.cs | 14 +- Toffee/Running/AstPrinter.Expressions.cs | 132 +- Toffee/Running/AstPrinter.Statements.cs | 5 +- .../SyntacticAnalysis/CommentSkippingLexer.cs | 4 - Toffee/SyntacticAnalysis/Expression.cs | 45 +- Toffee/SyntacticAnalysis/IParser.cs | 4 +- Toffee/SyntacticAnalysis/LiteralMapper.cs | 10 +- Toffee/SyntacticAnalysis/OperatorMapper.cs | 45 +- .../SyntacticAnalysis/Parser.Expressions.cs | 255 +-- Toffee/SyntacticAnalysis/Parser.Statements.cs | 53 +- Toffee/SyntacticAnalysis/Parser.cs | 97 +- Toffee/SyntacticAnalysis/ParserErrors.cs | 77 +- Toffee/SyntacticAnalysis/ParserException.cs | 8 +- Toffee/SyntacticAnalysis/ParserWarnings.cs | 6 +- Toffee/SyntacticAnalysis/Statement.cs | 23 +- Toffee/SyntacticAnalysis/TypeMapper.cs | 21 +- docs/gramatyka.md | 4 +- 82 files changed, 6173 insertions(+), 3044 deletions(-) create mode 100644 Toffee.Tests/LexicalAnalysis/LexerErrorHandlerMock.cs create mode 100644 Toffee.Tests/LexicalAnalysis/LexerTests.KeywordsAndIdentifiers.cs create mode 100644 Toffee.Tests/LexicalAnalysis/LexerTests.Numbers.cs create mode 100644 Toffee.Tests/LexicalAnalysis/LexerTests.OperatorsAndComments.cs create mode 100644 Toffee.Tests/LexicalAnalysis/LexerTests.Strings.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Binary.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Block.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Conditional.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.ForLoop.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionCall.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionDefinition.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Grouping.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Identifier.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Literal.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.PatternMatching.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Type.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.TypeCast.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Unary.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.WhileLoop.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingClosingBraceTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingSemicolonTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ConditionalBranchesMissingConsequentTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ConditionalExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ExpressionStatementTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ForLoopExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingParenthesesTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingPartsTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionMissingClosingParenthesisTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionMissingParenthesisTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/NamespaceAccessExpressionNonIdentifiersTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/NamespaceImportStatementNonIdentifiersTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/OperatorsAssociativityTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/OperatorsPriorityTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/ParenthesizedExpressionMissingParenthesesTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingBranchesMissingConsequent.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionMissingParenthesesTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingSpecificationMissingColonOrSemicolonTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementMissingVariableTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementTestData.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/Generators/WhileLoopExpressionTestData.cs rename Toffee.Tests/SyntacticAnalysis/{ParserTests.cs => Helpers.cs} (85%) create mode 100644 Toffee.Tests/SyntacticAnalysis/ParserErrorHandlerMock.cs delete mode 100644 Toffee.Tests/SyntacticAnalysis/ParserTests.Expressions.cs delete mode 100644 Toffee.Tests/SyntacticAnalysis/ParserTests.Statements.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Break.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.BreakIf.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Expression.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.NamespaceImport.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Return.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.VariableInitializationList.cs create mode 100644 Toffee.Tests/SyntacticAnalysis/StatementParsingTests.cs diff --git a/Toffee.Tests/LexicalAnalysis/LexerErrorHandlerMock.cs b/Toffee.Tests/LexicalAnalysis/LexerErrorHandlerMock.cs new file mode 100644 index 0000000..c3b694f --- /dev/null +++ b/Toffee.Tests/LexicalAnalysis/LexerErrorHandlerMock.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using Toffee.ErrorHandling; +using Toffee.LexicalAnalysis; + +namespace Toffee.Tests.LexicalAnalysis; + +public class LexerErrorHandlerMock : ILexerErrorHandler +{ + public List HandledErrors = new(); + public List HandledWarnings = new(); + + public bool HadErrors => HandledErrors.Count > 0; + public bool HadWarnings => HandledWarnings.Count > 0; + + public void Handle(LexerError lexerError) => HandledErrors.Add(lexerError); + public void Handle(LexerWarning lexerWarning) => HandledWarnings.Add(lexerWarning); +} diff --git a/Toffee.Tests/LexicalAnalysis/LexerTests.KeywordsAndIdentifiers.cs b/Toffee.Tests/LexicalAnalysis/LexerTests.KeywordsAndIdentifiers.cs new file mode 100644 index 0000000..63ac39f --- /dev/null +++ b/Toffee.Tests/LexicalAnalysis/LexerTests.KeywordsAndIdentifiers.cs @@ -0,0 +1,69 @@ +using Toffee.LexicalAnalysis; +using Xunit; + +namespace Toffee.Tests.LexicalAnalysis; + +public partial class LexerTests +{ + [Trait("Category", "Keywords")] + [Theory] + [InlineData("int", TokenType.KeywordInt)] + [InlineData("float", TokenType.KeywordFloat)] + [InlineData("string", TokenType.KeywordString)] + [InlineData("bool", TokenType.KeywordBool)] + [InlineData("function", TokenType.KeywordFunction)] + [InlineData("null", TokenType.KeywordNull)] + [InlineData("init", TokenType.KeywordInit)] + [InlineData("const", TokenType.KeywordConst)] + [InlineData("pull", TokenType.KeywordPull)] + [InlineData("if", TokenType.KeywordIf)] + [InlineData("elif", TokenType.KeywordElif)] + [InlineData("else", TokenType.KeywordElse)] + [InlineData("while", TokenType.KeywordWhile)] + [InlineData("for", TokenType.KeywordFor)] + [InlineData("break", TokenType.KeywordBreak)] + [InlineData("break_if", TokenType.KeywordBreakIf)] + [InlineData("functi", TokenType.KeywordFuncti)] + [InlineData("return", TokenType.KeywordReturn)] + [InlineData("match", TokenType.KeywordMatch)] + [InlineData("and", TokenType.KeywordAnd)] + [InlineData("or", TokenType.KeywordOr)] + [InlineData("is", TokenType.KeywordIs)] + [InlineData("not", TokenType.KeywordNot)] + [InlineData("default", TokenType.KeywordDefault)] + [InlineData("false", TokenType.KeywordFalse)] + [InlineData("true", TokenType.KeywordTrue)] + public void KeywordsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Identifiers")] + [Theory] + [InlineData("integer")] + [InlineData("INIT")] + [InlineData("constantinople")] + [InlineData("ppull")] + [InlineData("iff")] + [InlineData("and2")] + [InlineData("defaul")] + public void IdentifiersBasedOnKeywordsShouldBeRecognizedCorrectly(string input) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.Identifier, lexer.CurrentToken.Type); + Assert.Equal(input, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/LexicalAnalysis/LexerTests.Numbers.cs b/Toffee.Tests/LexicalAnalysis/LexerTests.Numbers.cs new file mode 100644 index 0000000..607a4e3 --- /dev/null +++ b/Toffee.Tests/LexicalAnalysis/LexerTests.Numbers.cs @@ -0,0 +1,148 @@ +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Xunit; + +namespace Toffee.Tests.LexicalAnalysis; + +public partial class LexerTests +{ + [Trait("Category", "Numbers")] + [Theory] + [InlineData("1", 1ul)] + [InlineData("0", 0ul)] + [InlineData("9223372036854775807", 9223372036854775807ul)] + [InlineData("9223372036854775808", 9223372036854775808ul)] + [InlineData("18446744073709551615", 18446744073709551615ul)] + [InlineData("0000001", 1ul)] + [InlineData("01", 1ul)] + [InlineData("0x1", 1ul)] + [InlineData("0x001", 1ul)] + [InlineData("0xabCD", 43981ul)] + [InlineData("0c1", 1ul)] + [InlineData("0c001", 1ul)] + [InlineData("0c741", 481ul)] + [InlineData("0b1", 1ul)] + [InlineData("0b0001", 1ul)] + [InlineData("0b1011", 11ul)] + public void IntegersShouldBeRecognizedCorrectly(string input, ulong expectedContent) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Numbers")] + [Theory] + [InlineData("1.", 1.0)] + [InlineData("0.", 0.0)] + [InlineData("1.2345", 1.2345)] + [InlineData("000000.1", 0.1)] + [InlineData("1.7976931348623157E+308", 1.7976931348623157e308)] + [InlineData("2.2e1", 22.0)] + [InlineData("2.2e-1", 0.22)] + [InlineData("2.2e+1", 22.0)] + [InlineData("002.e1", 20.0)] + [InlineData("0.0e0", 0.0)] + [InlineData("2.E-0", 2.0)] + public void FloatsShouldBeRecognizedCorrectly(string input, double expectedContent) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Numbers")] + [Theory] + [InlineData("18446744073709551616", TokenType.LiteralInteger, 1844674407370955161ul, 19u)] + [InlineData("10.99999999999999999999", TokenType.LiteralFloat, 10.9999999999999999999, 22u)] + [InlineData("3.14e99999999999999999999", TokenType.LiteralFloat, double.PositiveInfinity, 24u)] + public void NumberLiteralOverflowShouldBeDetectedProperly(string input, TokenType expectedTokenType, + object expectedContent, uint expectedOffset) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + Assert.Equal(typeof(NumberLiteralTooLarge), lexer.CurrentError?.GetType()); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Numbers")] + [Theory] + [InlineData("0x", 'x', 0ul, 2u)] + [InlineData("0xx", 'x', 0ul, 2u)] + [InlineData("0c", 'c', 0ul, 2u)] + [InlineData("0b", 'b', 0ul, 2u)] + public void MissingNonDecimalDigitsShouldBeDetectedProperly(string input, char prefix, object expectedContent, + uint expectedOffset) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + Assert.Equal(typeof(MissingNonDecimalDigits), lexer.CurrentError?.GetType()); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + Assert.Equal(prefix, (lexer.CurrentError as MissingNonDecimalDigits)!.NonDecimalPrefix); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Numbers")] + [Theory] + [InlineData("0a", 'a', 0ul, 1u)] + [InlineData("0z", 'z', 0ul, 1u)] + [InlineData("0u", 'u', 0ul, 1u)] + public void InvalidNonDecimalPrefixesShouldBeDetectedProperly(string input, char prefix, object expectedContent, + uint expectedOffset) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + Assert.Equal(typeof(InvalidNonDecimalPrefix), lexer.CurrentError?.GetType()); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + Assert.Equal(prefix, (lexer.CurrentError as InvalidNonDecimalPrefix)!.NonDecimalPrefix); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Numbers")] + [Theory] + [InlineData("12.e", 12.0, 4u)] + [InlineData("1234.5678e+", 1234.5678, 11u)] + [InlineData("0.5e--", 0.5, 5u)] + [InlineData("789.ee", 789.0, 5u)] + public void MissingExponentShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + Assert.Equal(typeof(MissingExponent), lexer.CurrentError?.GetType()); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/LexicalAnalysis/LexerTests.OperatorsAndComments.cs b/Toffee.Tests/LexicalAnalysis/LexerTests.OperatorsAndComments.cs new file mode 100644 index 0000000..b7d9875 --- /dev/null +++ b/Toffee.Tests/LexicalAnalysis/LexerTests.OperatorsAndComments.cs @@ -0,0 +1,120 @@ +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Xunit; + +namespace Toffee.Tests.LexicalAnalysis; + +public partial class LexerTests +{ + [Trait("Category", "Operators")] + [Theory] + [InlineData(".", TokenType.OperatorDot)] + [InlineData("^", TokenType.OperatorCaret)] + [InlineData("+", TokenType.OperatorPlus)] + [InlineData("-", TokenType.OperatorMinus)] + [InlineData("!", TokenType.OperatorBang)] + [InlineData("*", TokenType.OperatorAsterisk)] + [InlineData("/", TokenType.OperatorSlash)] + [InlineData("%", TokenType.OperatorPercent)] + [InlineData("..", TokenType.OperatorDotDot)] + [InlineData("<", TokenType.OperatorLess)] + [InlineData("<=", TokenType.OperatorLessEquals)] + [InlineData(">", TokenType.OperatorGreater)] + [InlineData(">=", TokenType.OperatorGreaterEquals)] + [InlineData("==", TokenType.OperatorEqualsEquals)] + [InlineData("!=", TokenType.OperatorBangEquals)] + [InlineData("&&", TokenType.OperatorAndAnd)] + [InlineData("||", TokenType.OperatorOrOr)] + [InlineData("??", TokenType.OperatorQueryQuery)] + [InlineData("?>", TokenType.OperatorQueryGreater)] + [InlineData("=", TokenType.OperatorEquals)] + [InlineData("+=", TokenType.OperatorPlusEquals)] + [InlineData("-=", TokenType.OperatorMinusEquals)] + [InlineData("*=", TokenType.OperatorAsteriskEquals)] + [InlineData("/=", TokenType.OperatorSlashEquals)] + [InlineData("%=", TokenType.OperatorPercentEquals)] + [InlineData("(", TokenType.LeftParenthesis)] + [InlineData(")", TokenType.RightParenthesis)] + [InlineData("{", TokenType.LeftBrace)] + [InlineData("}", TokenType.RightBrace)] + [InlineData(",", TokenType.Comma)] + [InlineData(":", TokenType.Colon)] + [InlineData(";", TokenType.Semicolon)] + public void OperatorsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Comments")] + [Theory] + [InlineData("//", TokenType.LineComment, false)] + [InlineData("/*", TokenType.BlockComment, true)] + [InlineData("/**/", TokenType.BlockComment, false)] + public void CommentsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType, bool shouldProduceError) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); + + Assert.False(shouldProduceError ^ errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Comments")] + [Theory] + [InlineData("//", false, "", false)] + [InlineData("// ", false, " ", false)] + [InlineData("// example content", false, " example content", false)] + [InlineData("// example\nmultiline\ncontent", false, " example", false)] + [InlineData("///**/", false, "/**/", false)] + [InlineData("/*", true, "", true)] + [InlineData("/**/", true, "", false)] + [InlineData("/* */", true, " ", false)] + [InlineData("/* example content */", true, " example content ", false)] + [InlineData("/* example\nmultiline\ncontent */", true, " example\nmultiline\ncontent ", false)] + [InlineData("/*///* /**/", true, "///* /*", false)] + public void ContentOfCommentsShouldBePreservedProperly(string input, bool isBlock, string expectedContent, bool shouldProduceError) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(isBlock ? TokenType.BlockComment : TokenType.LineComment, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(shouldProduceError ^ errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Operators")] + [Theory] + [InlineData("`", "`", 0u)] + [InlineData("🐲", "🐲", 0u)] + [InlineData("\a", "\a", 0u)] + [InlineData("?", "?", 0u)] + [InlineData("&", "&", 0u)] + [InlineData("|", "|", 0u)] + public void UnknownTokensShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.Unknown, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + Assert.Equal(typeof(UnknownToken), lexer.CurrentError?.GetType()); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + Assert.Equal(input, (lexer.CurrentError as UnknownToken)!.Content); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/LexicalAnalysis/LexerTests.Strings.cs b/Toffee.Tests/LexicalAnalysis/LexerTests.Strings.cs new file mode 100644 index 0000000..20aa9e1 --- /dev/null +++ b/Toffee.Tests/LexicalAnalysis/LexerTests.Strings.cs @@ -0,0 +1,78 @@ +using System; +using System.Linq; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Xunit; + +namespace Toffee.Tests.LexicalAnalysis; + +public partial class LexerTests +{ + [Trait("Category", "Strings")] + [Theory] + [InlineData(@"""""", "")] + [InlineData(@""" """, " ")] + [InlineData(@"""abcd1234""", "abcd1234")] + [InlineData(@"""aąаαáåあア汉漢👨‍💻""", "aąаαáåあア汉漢👨‍💻")] + [InlineData(@"""\a""", "\a")] + [InlineData(@"""\b""", "\b")] + [InlineData(@"""\f""", "\f")] + [InlineData(@"""\n""", "\n")] + [InlineData(@"""\r""", "\r")] + [InlineData(@"""\t""", "\t")] + [InlineData(@"""\v""", "\v")] + [InlineData(@"""\\""", "\\")] + [InlineData(@"""\""""", "\"")] + [InlineData(@"""\0""", "\0")] + [InlineData(@"""\xD""", "\xD")] + [InlineData(@"""\x6a""", "\x6a")] + public void StringsShouldBeRecognizedCorrectly(string input, string expectedContent) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Strings")] + [Theory] + [InlineData(@"""\a\b\f\n\r\t\v\\\""\0\a\b\f\n\r\t\v\\\""\0""", "\a\b\f\n\r\t\v\\\"\0\a\b\f\n\r\t\v\\\"\0")] + [InlineData(@"""\xatest""", "\xatest")] + [InlineData(@"""\x0123456""", "\x0123456")] + [InlineData(@"""\xabcdefg""", "\xabcdefg")] + [InlineData(@"""\xcactus""", "\xcactus")] + public void BoundariesOfEscapeSequencesInStringShouldBeRecognizedCorrectly(string input, string expectedContent) + { + var scannerMock = new ScannerMock(input); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Strings")] + [Theory] + [InlineData(@"""abcd\efg""", "abcdefg", typeof(UnknownEscapeSequence), 5u)] + [InlineData(@"""abcdefghijklmnopqrstuvw\xyz""", "abcdefghijklmnopqrstuvwyz", typeof(MissingHexCharCode), 24u)] + public void IssuesInEscapeSequencesInStringsShouldBeDetectedProperly(string input, string expectedContent, + Type expectedWarningType, uint expectedOffset) + { + var errorHandlerMock = new LexerErrorHandlerMock(); + var scannerMock = new ScannerMock(input); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); + + Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); + Assert.Equal(expectedContent, lexer.CurrentToken.Content); + var warning = errorHandlerMock.HandledWarnings.First(x => x.GetType() == expectedWarningType); + Assert.Equal(new Position(expectedOffset, 1, expectedOffset), warning.Position); + } +} diff --git a/Toffee.Tests/LexicalAnalysis/LexerTests.cs b/Toffee.Tests/LexicalAnalysis/LexerTests.cs index 00237bd..5e475d9 100644 --- a/Toffee.Tests/LexicalAnalysis/LexerTests.cs +++ b/Toffee.Tests/LexicalAnalysis/LexerTests.cs @@ -1,103 +1,26 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using Moq; -using Toffee.ErrorHandling; +using System.Collections.Generic; using Toffee.LexicalAnalysis; using Toffee.Scanning; using Xunit; namespace Toffee.Tests.LexicalAnalysis; -public class LexerTests +public partial class LexerTests { - [Trait("Category", "Operators")] - [Theory] - [InlineData(".", TokenType.OperatorDot)] - [InlineData("^", TokenType.OperatorCaret)] - [InlineData("+", TokenType.OperatorPlus)] - [InlineData("-", TokenType.OperatorMinus)] - [InlineData("!", TokenType.OperatorBang)] - [InlineData("*", TokenType.OperatorAsterisk)] - [InlineData("/", TokenType.OperatorSlash)] - [InlineData("%", TokenType.OperatorPercent)] - [InlineData("..", TokenType.OperatorDotDot)] - [InlineData("<", TokenType.OperatorLess)] - [InlineData("<=", TokenType.OperatorLessEquals)] - [InlineData(">", TokenType.OperatorGreater)] - [InlineData(">=", TokenType.OperatorGreaterEquals)] - [InlineData("==", TokenType.OperatorEqualsEquals)] - [InlineData("!=", TokenType.OperatorBangEquals)] - [InlineData("&&", TokenType.OperatorAndAnd)] - [InlineData("||", TokenType.OperatorOrOr)] - [InlineData("??", TokenType.OperatorQueryQuery)] - [InlineData("?>", TokenType.OperatorQueryGreater)] - [InlineData("=", TokenType.OperatorEquals)] - [InlineData("+=", TokenType.OperatorPlusEquals)] - [InlineData("-=", TokenType.OperatorMinusEquals)] - [InlineData("*=", TokenType.OperatorAsteriskEquals)] - [InlineData("/=", TokenType.OperatorSlashEquals)] - [InlineData("%=", TokenType.OperatorPercentEquals)] - [InlineData("(", TokenType.LeftParenthesis)] - [InlineData(")", TokenType.RightParenthesis)] - [InlineData("{", TokenType.LeftBrace)] - [InlineData("}", TokenType.RightBrace)] - [InlineData(",", TokenType.Comma)] - [InlineData(":", TokenType.Colon)] - [InlineData(";", TokenType.Semicolon)] - public void OperatorsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); - } - - [Trait("Category", "Comments")] - [Theory] - [InlineData("//", TokenType.LineComment)] - [InlineData("/*", TokenType.BlockComment)] - [InlineData("/**/", TokenType.BlockComment)] - public void CommentsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); - } - - [Trait("Category", "Comments")] - [Theory] - [InlineData("//", false, "")] - [InlineData("// ", false, " ")] - [InlineData("// example content", false, " example content")] - [InlineData("// example\nmultiline\ncontent", false, " example")] - [InlineData("///**/", false, "/**/")] - [InlineData("/*", true, "")] - [InlineData("/**/", true, "")] - [InlineData("/* */", true, " ")] - [InlineData("/* example content */", true, " example content ")] - [InlineData("/* example\nmultiline\ncontent */", true, " example\nmultiline\ncontent ")] - [InlineData("/*///* /**/", true, "///* /*")] - public void ContentOfCommentsShouldBePreservedProperly(string input, bool isBlock, string expectedContent) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(isBlock ? TokenType.BlockComment : TokenType.LineComment, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - } - [Fact] public void EmptyInputShouldResultInEtxToken() { var scannerMock = new ScannerMock(""); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); Assert.Equal(TokenType.EndOfText, lexer.CurrentToken.Type); lexer.Advance(); Assert.Equal(TokenType.EndOfText, lexer.CurrentToken.Type); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); } [Theory] @@ -108,191 +31,13 @@ public void EmptyInputShouldResultInEtxToken() public void WhiteSpacesShouldBeSkipped(string input) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); Assert.Equal(TokenType.EndOfText, lexer.CurrentToken.Type); - } - - [Trait("Category", "Numbers")] - [Theory] - [InlineData("1", 1ul)] - [InlineData("0", 0ul)] - [InlineData("9223372036854775807", 9223372036854775807ul)] - [InlineData("9223372036854775808", 9223372036854775808ul)] - [InlineData("18446744073709551615", 18446744073709551615ul)] - [InlineData("0000001", 1ul)] - [InlineData("01", 1ul)] - [InlineData("0x1", 1ul)] - [InlineData("0x001", 1ul)] - [InlineData("0xabCD", 43981ul)] - [InlineData("0c1", 1ul)] - [InlineData("0c001", 1ul)] - [InlineData("0c741", 481ul)] - [InlineData("0b1", 1ul)] - [InlineData("0b0001", 1ul)] - [InlineData("0b1011", 11ul)] - public void IntegersShouldBeRecognizedCorrectly(string input, ulong expectedContent) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - } - - [Trait("Category", "Numbers")] - [Theory] - [InlineData("1.", 1.0)] - [InlineData("0.", 0.0)] - [InlineData("1.2345", 1.2345)] - [InlineData("000000.1", 0.1)] - [InlineData("1.7976931348623157E+308", 1.7976931348623157e308)] - [InlineData("2.2e1", 22.0)] - [InlineData("2.2e-1", 0.22)] - [InlineData("2.2e+1", 22.0)] - [InlineData("002.e1", 20.0)] - [InlineData("0.0e0", 0.0)] - [InlineData("2.E-0", 2.0)] - public void FloatsShouldBeRecognizedCorrectly(string input, double expectedContent) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - } - - [Trait("Category", "Strings")] - [Theory] - [InlineData(@"""""", "")] - [InlineData(@""" """, " ")] - [InlineData(@"""abcd1234""", "abcd1234")] - [InlineData(@"""aąаαáåあア汉漢👨‍💻""", "aąаαáåあア汉漢👨‍💻")] - [InlineData(@"""\a""", "\a")] - [InlineData(@"""\b""", "\b")] - [InlineData(@"""\f""", "\f")] - [InlineData(@"""\n""", "\n")] - [InlineData(@"""\r""", "\r")] - [InlineData(@"""\t""", "\t")] - [InlineData(@"""\v""", "\v")] - [InlineData(@"""\\""", "\\")] - [InlineData(@"""\""""", "\"")] - [InlineData(@"""\0""", "\0")] - [InlineData(@"""\xD""", "\xD")] - [InlineData(@"""\x6a""", "\x6a")] - public void StringsShouldBeRecognizedCorrectly(string input, string expectedContent) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - } - - [Trait("Category", "Strings")] - [Theory] - [InlineData(@"""\a\b\f\n\r\t\v\\\""\0\a\b\f\n\r\t\v\\\""\0""", "\a\b\f\n\r\t\v\\\"\0\a\b\f\n\r\t\v\\\"\0")] - [InlineData(@"""\xatest""", "\xatest")] - [InlineData(@"""\x0123456""", "\x0123456")] - [InlineData(@"""\xabcdefg""", "\xabcdefg")] - [InlineData(@"""\xcactus""", "\xcactus")] - public void BoundariesOfEscapeSequencesInStringShouldBeRecognizedCorrectly(string input, string expectedContent) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - } - - [Trait("Category", "Keywords")] - [Theory] - [InlineData("int", TokenType.KeywordInt)] - [InlineData("float", TokenType.KeywordFloat)] - [InlineData("string", TokenType.KeywordString)] - [InlineData("bool", TokenType.KeywordBool)] - [InlineData("function", TokenType.KeywordFunction)] - [InlineData("null", TokenType.KeywordNull)] - [InlineData("init", TokenType.KeywordInit)] - [InlineData("const", TokenType.KeywordConst)] - [InlineData("pull", TokenType.KeywordPull)] - [InlineData("if", TokenType.KeywordIf)] - [InlineData("elif", TokenType.KeywordElif)] - [InlineData("else", TokenType.KeywordElse)] - [InlineData("while", TokenType.KeywordWhile)] - [InlineData("for", TokenType.KeywordFor)] - [InlineData("break", TokenType.KeywordBreak)] - [InlineData("break_if", TokenType.KeywordBreakIf)] - [InlineData("functi", TokenType.KeywordFuncti)] - [InlineData("return", TokenType.KeywordReturn)] - [InlineData("match", TokenType.KeywordMatch)] - [InlineData("and", TokenType.KeywordAnd)] - [InlineData("or", TokenType.KeywordOr)] - [InlineData("is", TokenType.KeywordIs)] - [InlineData("not", TokenType.KeywordNot)] - [InlineData("default", TokenType.KeywordDefault)] - [InlineData("false", TokenType.KeywordFalse)] - [InlineData("true", TokenType.KeywordTrue)] - public void KeywordsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); - } - - [Trait("Category", "Identifiers")] - [Theory] - [InlineData("integer")] - [InlineData("INIT")] - [InlineData("constantinople")] - [InlineData("ppull")] - [InlineData("iff")] - [InlineData("and2")] - [InlineData("defaul")] - public void IdentifiersBasedOnKeywordsShouldBeRecognizedCorrectly(string input) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.Identifier, lexer.CurrentToken.Type); - Assert.Equal(input, lexer.CurrentToken.Content); - } - - [Trait("Category", "Strings")] - [Theory] - [InlineData(@"""abcd\efg""", "abcdefg", typeof(UnknownEscapeSequence), 5u)] - [InlineData(@"""abcdefghijklmnopqrstuvw\xyz""", "abcdefghijklmnopqrstuvwyz", typeof(MissingHexCharCode), 24u)] - public void IssuesInEscapeSequencesInStringsShouldBeDetectedProperly(string input, string expectedContent, Type expectedWarningType, uint expectedOffset) - { - var capturedAttachments = new List(); - var logger = new Mock(); - logger.Setup(x => - x.Handle(Capture.In(capturedAttachments))); - - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock, logger.Object); - - Assert.Equal(TokenType.LiteralString, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - var warning = capturedAttachments.First(x => x.GetType() == expectedWarningType); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), warning.Position); - } - - [Trait("Category", "Numbers")] - [Theory] - [InlineData("18446744073709551616", TokenType.LiteralInteger, 1844674407370955161ul, 19u)] - [InlineData("10.99999999999999999999", TokenType.LiteralFloat, 10.9999999999999999999, 22u)] - [InlineData("3.14e99999999999999999999", TokenType.LiteralFloat, double.PositiveInfinity, 24u)] - public void NumberLiteralOverflowShouldBeDetectedProperly(string input, TokenType expectedTokenType, object expectedContent, uint expectedOffset) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - Assert.Equal(typeof(NumberLiteralTooLarge), lexer.CurrentError?.GetType()); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); } [Trait("Category", "Strings")] @@ -303,51 +48,20 @@ public void NumberLiteralOverflowShouldBeDetectedProperly(string input, TokenTyp [InlineData("// abcdefg", 4, TokenType.LineComment, " abc", 6u)] [InlineData("/* abcdefg */", 4, TokenType.BlockComment, " abc", 6u)] [InlineData("abcdefg", 3, TokenType.Identifier, "abc", 3u)] - public void ExcessLexemeLengthShouldBeDetectedProperly(string input, int lengthLimit, TokenType expectedTokenType, string expectedContent, uint expectedOffset) + public void ExcessLexemeLengthShouldBeDetectedProperly(string input, int lengthLimit, TokenType expectedTokenType, + string expectedContent, uint expectedOffset) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock, maxLexemeLength: lengthLimit); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock, lengthLimit); Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); Assert.Equal(expectedContent, lexer.CurrentToken.Content); Assert.Equal(typeof(ExceededMaxLexemeLength), lexer.CurrentError?.GetType()); Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); Assert.Equal(lengthLimit, (lexer.CurrentError as ExceededMaxLexemeLength)!.MaxLexemeLength); - } - [Trait("Category", "Numbers")] - [Theory] - [InlineData("0x", 'x', 0ul, 2u)] - [InlineData("0xx", 'x', 0ul, 2u)] - [InlineData("0c", 'c', 0ul, 2u)] - [InlineData("0b", 'b', 0ul, 2u)] - public void MissingNonDecimalDigitsShouldBeDetectedProperly(string input, char prefix, object expectedContent, uint expectedOffset) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - Assert.Equal(typeof(MissingNonDecimalDigits), lexer.CurrentError?.GetType()); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); - Assert.Equal(prefix, (lexer.CurrentError as MissingNonDecimalDigits)!.NonDecimalPrefix); - } - - [Trait("Category", "Numbers")] - [Theory] - [InlineData("0a", 'a', 0ul, 1u)] - [InlineData("0z", 'z', 0ul, 1u)] - [InlineData("0u", 'u', 0ul, 1u)] - public void InvalidNonDecimalPrefixesShouldBeDetectedProperly(string input, char prefix, object expectedContent, uint expectedOffset) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - Assert.Equal(typeof(InvalidNonDecimalPrefix), lexer.CurrentError?.GetType()); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); - Assert.Equal(prefix, (lexer.CurrentError as InvalidNonDecimalPrefix)!.NonDecimalPrefix); + Assert.False(errorHandlerMock.HadWarnings); } [Trait("Category", "Strings")] @@ -355,53 +69,20 @@ public void InvalidNonDecimalPrefixesShouldBeDetectedProperly(string input, char [Theory] [InlineData(@"""nerotaruk", TokenType.LiteralString, "nerotaruk", 10u)] [InlineData("/* zaq1@WSX", TokenType.BlockComment, " zaq1@WSX", 11u)] - public void UnexpectedEndOfTextShouldBeDetectedProperly(string input, TokenType expectedTokenType, object expectedContent, uint expectedOffset) + public void UnexpectedEndOfTextShouldBeDetectedProperly(string input, TokenType expectedTokenType, + object expectedContent, uint expectedOffset) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); Assert.Equal(expectedContent, lexer.CurrentToken.Content); Assert.Equal(typeof(UnexpectedEndOfText), lexer.CurrentError?.GetType()); Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); Assert.Equal(expectedTokenType, (lexer.CurrentError as UnexpectedEndOfText)!.BuiltTokenType); - } - [Trait("Category", "Operators")] - [Theory] - [InlineData("`", "`", 0u)] - [InlineData("🐲", "🐲", 0u)] - [InlineData("\a", "\a", 0u)] - [InlineData("?", "?", 0u)] - [InlineData("&", "&", 0u)] - [InlineData("|", "|", 0u)] - public void UnknownTokensShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.Unknown, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - Assert.Equal(typeof(UnknownToken), lexer.CurrentError?.GetType()); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); - Assert.Equal(input, (lexer.CurrentError as UnknownToken)!.Content); - } - - [Trait("Category", "Numbers")] - [Theory] - [InlineData("12.e", 12.0, 4u)] - [InlineData("1234.5678e+", 1234.5678, 11u)] - [InlineData("0.5e--", 0.5, 5u)] - [InlineData("789.ee", 789.0, 5u)] - public void MissingExponentShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset) - { - var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); - - Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type); - Assert.Equal(expectedContent, lexer.CurrentToken.Content); - Assert.Equal(typeof(MissingExponent), lexer.CurrentError?.GetType()); - Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position); + Assert.False(errorHandlerMock.HadWarnings); } [Theory] @@ -416,15 +97,20 @@ public void MissingExponentShouldBeDetectedProperly(string input, object expecte [InlineData("500+", TokenType.OperatorPlus, "+")] [InlineData("112..", TokenType.OperatorDot, ".")] [InlineData("...", TokenType.OperatorDot, ".")] - public void TokenInSequenceShouldHaveNoImpactOnItsSuccessor(string input, TokenType expectedTokenType, object expectedContent) + public void TokenInSequenceShouldHaveNoImpactOnItsSuccessor(string input, TokenType expectedTokenType, + object expectedContent) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); lexer.Advance(); Assert.Equal(expectedTokenType, lexer.CurrentToken.Type); Assert.Equal(expectedContent, lexer.CurrentToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); } [Theory] @@ -439,15 +125,20 @@ public void TokenInSequenceShouldHaveNoImpactOnItsSuccessor(string input, TokenT [InlineData("500+", TokenType.LiteralInteger, 500ul)] [InlineData("112..", TokenType.LiteralFloat, 112.0)] [InlineData("...", TokenType.OperatorDotDot, "..")] - public void SupersededTokenShouldBeReturnedByAdvanceMethodCorrectly(string input, TokenType expectedTokenType, object expectedContent) + public void SupersededTokenShouldBeReturnedByAdvanceMethodCorrectly(string input, TokenType expectedTokenType, + object expectedContent) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); var supersededToken = lexer.Advance(); Assert.Equal(expectedTokenType, supersededToken.Type); Assert.Equal(expectedContent, supersededToken.Content); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); } [Theory] @@ -455,7 +146,8 @@ public void SupersededTokenShouldBeReturnedByAdvanceMethodCorrectly(string input public void PositionShouldBeCalculatedCorrectly(string input, uint tokenIndex, Token expectedToken) { var scannerMock = new ScannerMock(input); - var lexer = new Lexer(scannerMock); + var errorHandlerMock = new LexerErrorHandlerMock(); + ILexer lexer = new Lexer(scannerMock, errorHandlerMock); for (var i = 0u; i < tokenIndex; i++) lexer.Advance(); @@ -463,6 +155,9 @@ public void PositionShouldBeCalculatedCorrectly(string input, uint tokenIndex, T Assert.Equal(expectedToken.Type, lexer.CurrentToken.Type); Assert.Equal(expectedToken.StartPosition, lexer.CurrentToken.StartPosition); Assert.Equal(expectedToken.EndPosition, lexer.CurrentToken.EndPosition); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); } public static IEnumerable TestSequenceEnumerable() diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Binary.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Binary.cs new file mode 100644 index 0000000..d279fb8 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Binary.cs @@ -0,0 +1,311 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Binary expressions")] + [Theory] + [InlineData(TokenType.OperatorDot, Operator.NamespaceAccess)] + [InlineData(TokenType.OperatorCaret, Operator.Exponentiation)] + [InlineData(TokenType.OperatorPlus, Operator.Addition)] + [InlineData(TokenType.OperatorMinus, Operator.Subtraction)] + [InlineData(TokenType.OperatorAsterisk, Operator.Multiplication)] + [InlineData(TokenType.OperatorSlash, Operator.Division)] + [InlineData(TokenType.OperatorPercent, Operator.Remainder)] + [InlineData(TokenType.OperatorDotDot, Operator.Concatenation)] + [InlineData(TokenType.OperatorLess, Operator.LessThanComparison)] + [InlineData(TokenType.OperatorLessEquals, Operator.LessOrEqualComparison)] + [InlineData(TokenType.OperatorGreater, Operator.GreaterThanComparison)] + [InlineData(TokenType.OperatorGreaterEquals, Operator.GreaterOrEqualComparison)] + [InlineData(TokenType.OperatorEqualsEquals, Operator.EqualComparison)] + [InlineData(TokenType.OperatorBangEquals, Operator.NotEqualComparison)] + [InlineData(TokenType.OperatorAndAnd, Operator.Conjunction)] + [InlineData(TokenType.OperatorOrOr, Operator.Disjunction)] + [InlineData(TokenType.OperatorQueryQuery, Operator.NullCoalescing)] + [InlineData(TokenType.OperatorQueryGreater, Operator.NullSafePipe)] + [InlineData(TokenType.OperatorEquals, Operator.Assignment)] + [InlineData(TokenType.OperatorPlusEquals, Operator.AdditionAssignment)] + [InlineData(TokenType.OperatorMinusEquals, Operator.SubtractionAssignment)] + [InlineData(TokenType.OperatorAsteriskEquals, Operator.MultiplicationAssignment)] + [InlineData(TokenType.OperatorSlashEquals, Operator.DivisionAssignment)] + [InlineData(TokenType.OperatorPercentEquals, Operator.RemainderAssignment)] + public void BinaryExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) + { + const string leftIdentifierName = "a"; + var leftToken = new Token(TokenType.Identifier, leftIdentifierName); + var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); + + var opToken = Helpers.GetDefaultToken(operatorTokenType); + + const string rightIdentifierName = "b"; + var rightToken = new Token(TokenType.Identifier, rightIdentifierName); + var expectedRightExpression = new IdentifierExpression(rightIdentifierName); + + var lexerMock = new LexerMock(leftToken, opToken, rightToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression!.Left.Should().BeEquivalentTo(expectedLeftExpression, Helpers.ProvideOptions); + expression.Operator.Should().Be(expectedOperator); + expression.Right.Should().BeEquivalentTo(expectedRightExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Binary expressions")] + [Theory] + [InlineData(new[] { TokenType.KeywordIs }, TokenType.KeywordInt, Operator.EqualTypeCheck, DataType.Integer)] + [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot }, TokenType.KeywordNull, Operator.NotEqualTypeCheck, DataType.Null)] + public void TypeCheckingBinaryExpressionsShouldBeParsedCorrectly(TokenType[] operatorTokenTypes, TokenType typeTokenType, Operator expectedOperator, DataType expectedType) + { + const string leftIdentifierName = "a"; + var leftToken = new Token(TokenType.Identifier, leftIdentifierName); + var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); + + var opTokens = operatorTokenTypes.Select(Helpers.GetDefaultToken).ToArray(); + + var rightToken = Helpers.GetDefaultToken(typeTokenType); + var expectedRightExpression = new TypeExpression(expectedType); + + var lexerMock = new LexerMock(opTokens.Prepend(leftToken).Append(rightToken).AppendSemicolon()); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression!.Left.Should().BeEquivalentTo(expectedLeftExpression, Helpers.ProvideOptions); + expression.Operator.Should().Be(expectedOperator); + expression.Right.Should().BeEquivalentTo(expectedRightExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Binary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Theory] + [InlineData(TokenType.KeywordOr, Operator.PatternMatchingDisjunction)] + [InlineData(TokenType.KeywordAnd, Operator.PatternMatchingConjunction)] + public void BinaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) + { + const string leftIdentifierName = "b"; + const string rightIdentifierName = "c"; + + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + new Token(TokenType.Identifier, leftIdentifierName), + Helpers.GetDefaultToken(operatorTokenType), + new Token(TokenType.Identifier, rightIdentifierName), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); + var expectedRightExpression = new IdentifierExpression(rightIdentifierName); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().NotBeNull(); + patternMatchingExpression.Branches.Should().HaveCount(1); + + var binaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); + binaryExpression.Should().NotBeNull(); + binaryExpression!.Left.Should().BeEquivalentTo(expectedLeftExpression, Helpers.ProvideOptions); + binaryExpression.Operator.Should().Be(expectedOperator); + binaryExpression.Right.Should().BeEquivalentTo(expectedRightExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Binary expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(TokenType.OperatorDot)] + [InlineData(TokenType.OperatorCaret)] + [InlineData(TokenType.OperatorPlus)] + [InlineData(TokenType.OperatorMinus)] + [InlineData(TokenType.OperatorAsterisk)] + [InlineData(TokenType.OperatorSlash)] + [InlineData(TokenType.OperatorPercent)] + [InlineData(TokenType.OperatorDotDot)] + [InlineData(TokenType.OperatorLess)] + [InlineData(TokenType.OperatorLessEquals)] + [InlineData(TokenType.OperatorGreater)] + [InlineData(TokenType.OperatorGreaterEquals)] + [InlineData(TokenType.OperatorEqualsEquals)] + [InlineData(TokenType.OperatorBangEquals)] + [InlineData(TokenType.OperatorAndAnd)] + [InlineData(TokenType.OperatorOrOr)] + [InlineData(TokenType.OperatorQueryQuery)] + [InlineData(TokenType.OperatorQueryGreater)] + [InlineData(TokenType.OperatorEquals)] + [InlineData(TokenType.OperatorPlusEquals)] + [InlineData(TokenType.OperatorMinusEquals)] + [InlineData(TokenType.OperatorAsteriskEquals)] + [InlineData(TokenType.OperatorSlashEquals)] + [InlineData(TokenType.OperatorPercentEquals)] + public void MissingRightSideOfBinaryExpressionsShouldBeDetectedProperly(TokenType operatorTokenType) + { + const string leftIdentifierName = "a"; + + var tokenSequence = new[] + { + new Token(TokenType.Identifier, leftIdentifierName), + Helpers.GetDefaultToken(operatorTokenType), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(2, 1, 2), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Binary expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(new[] { TokenType.KeywordIs })] + [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot })] + public void MissingTypeInTypeCheckingBinaryExpressionsShouldBeDetectedProperly(TokenType[] operatorTokenTypes) + { + const string leftIdentifierName = "a"; + + var tokenSequence = operatorTokenTypes.Select(Helpers.GetDefaultToken) + .Prepend(new Token(TokenType.Identifier, leftIdentifierName)) + .AppendSemicolon(); + + var errorPosition = (uint)operatorTokenTypes.Length + 1; + var expectedError = new UnexpectedToken(new Position(errorPosition, 1, errorPosition), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should() + .BeEquivalentTo(expectedError, o => o.Excluding(i => i.Name == "ExpectedType")); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordInt); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordFloat); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordString); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordBool); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordFunction); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordNull); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Binary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(TokenType.KeywordOr)] + [InlineData(TokenType.KeywordAnd)] + public void MissingRightSideOfBinaryPatternMatchingExpressionsShouldBeDetectedProperly(TokenType operatorTokenType) + { + const string leftIdentifierName = "b"; + + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + new Token(TokenType.Identifier, leftIdentifierName), + Helpers.GetDefaultToken(operatorTokenType), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(7, 1, 7), TokenType.Colon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Namespace import statements")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(NamespaceAccessExpressionNonIdentifiersTestData))] + public void NonIdentifiersInNamespaceAccessExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.IsTerminated.Should().BeTrue(); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Block.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Block.cs new file mode 100644 index 0000000..df95a90 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Block.cs @@ -0,0 +1,86 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Block expressions")] + [Theory] + [ClassData(typeof(BlockExpressionTestData))] + public void BlockExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Statement[] expectedStatementList, Expression? expectedResultExpression) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var blockExpression = expressionStatement.Expression.As(); + blockExpression.Should().NotBeNull(); + blockExpression!.Statements.ToArray().Should().BeEquivalentTo(expectedStatementList, Helpers.ProvideOptions); + blockExpression.ResultExpression.Should().BeEquivalentTo(expectedResultExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Block expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(BlockExpressionMissingClosingBraceTestData))] + public void MissingClosingBraceInBlockExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Statement[] expectedStatementList, Expression? expectedResultExpression, ParserError expectedError, bool shouldStatementBeTerminated) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(shouldStatementBeTerminated); + + var blockExpression = expressionStatement.Expression.As(); + blockExpression.Should().NotBeNull(); + blockExpression!.Statements.ToArray().Should().BeEquivalentTo(expectedStatementList, Helpers.ProvideOptions); + blockExpression.ResultExpression.Should().BeEquivalentTo(expectedResultExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Block expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(BlockExpressionMissingSemicolonTestData))] + public void MissingSemicolonInBlockExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, params ParserError[] expectedErrors) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var blockExpression = expressionStatement.Expression.As(); + blockExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + for (var i = 0; i < expectedErrors.Length; i++) + errorHandlerMock.HandledErrors[i].Should().BeEquivalentTo(expectedErrors[i]); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Conditional.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Conditional.cs new file mode 100644 index 0000000..b4f94c5 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Conditional.cs @@ -0,0 +1,55 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Conditional expressions")] + [Theory] + [ClassData(typeof(ConditionalExpressionTestData))] + public void ConditionalExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, ConditionalElement expectedIfPart, ConditionalElement[] expectedElifParts, Expression? expectedElsePart) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var conditionalExpression = expressionStatement.Expression.As(); + conditionalExpression.Should().NotBeNull(); + conditionalExpression.IfPart.Should().BeEquivalentTo(expectedIfPart, Helpers.ProvideOptions); + conditionalExpression.ElifParts.ToArray().Should().BeEquivalentTo(expectedElifParts, Helpers.ProvideOptions); + conditionalExpression.ElsePart.Should().BeEquivalentTo(expectedElsePart, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Conditional expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(ConditionalBranchesMissingConsequentTestData))] + public void MissingConsequentOfConditionalBranchesShouldBeDetectedProperly(Token[] tokenSequence, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.ForLoop.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.ForLoop.cs new file mode 100644 index 0000000..681e82c --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.ForLoop.cs @@ -0,0 +1,109 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "For loop expressions")] + [Theory] + [ClassData(typeof(ForLoopExpressionTestData))] + public void ForLoopExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, string? expectedCounterName, ForLoopRange expectedRange, Expression expectedBody) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var forLoopExpression = expressionStatement.Expression.As(); + forLoopExpression.Should().NotBeNull(); + forLoopExpression.CounterName.Should().Be(expectedCounterName); + forLoopExpression.Range.Should().BeEquivalentTo(expectedRange, Helpers.ProvideOptions); + forLoopExpression.Body.Should().BeEquivalentTo(expectedBody, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "For loop expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(ForLoopSpecificationMissingParenthesesTestData))] + public void MissingParenthesesInForLoopSpecificationsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, params ParserError[] expectedErrors) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var forLoopExpression = expressionStatement.Expression.As(); + forLoopExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + for (var i = 0; i < expectedErrors.Length; i++) + errorHandlerMock.HandledErrors[i].Should().BeEquivalentTo(expectedErrors[i]); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "For loop expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(ForLoopSpecificationMissingPartsTestData))] + public void MissingPartsOfForLoopSpecificationsShouldBeDetectedProperly(Token[] tokenSequence, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "For loop expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingBodyOfForLoopExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordFor), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(4, 1, 4), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionCall.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionCall.cs new file mode 100644 index 0000000..4b76a5a --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionCall.cs @@ -0,0 +1,95 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Function call expressions")] + [Theory] + [ClassData(typeof(FunctionCallExpressionTestData))] + public void FunctionCallExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedCalledExpression, Expression[] expectedArguments) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var functionCallExpression = expressionStatement.Expression.As(); + functionCallExpression.Should().NotBeNull(); + functionCallExpression.Expression.Should().BeEquivalentTo(expectedCalledExpression, Helpers.ProvideOptions); + functionCallExpression.Arguments.ToArray().Should().BeEquivalentTo(expectedArguments, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Function call expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(FunctionCallExpressionMissingClosingParenthesisTestData))] + public void MissingClosingParenthesisInFunctionCallExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var functionCallExpression = expressionStatement.Expression.As(); + functionCallExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Function call expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MissingArgumentInFunctionCallExpressionsShouldBeDetectedProperly(bool missingAfterComma) + { + var commaAndArgument = new Token[2]; + commaAndArgument[missingAfterComma ? 1 : 0] = Helpers.GetDefaultToken(TokenType.Comma); + commaAndArgument[missingAfterComma ? 0 : 1] = new Token(TokenType.Identifier, "b"); + var tokenSequence = new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + }.Concat(commaAndArgument) + .Append(Helpers.GetDefaultToken(TokenType.RightParenthesis)) + .AppendSemicolon(); + + var errorPosition = 2u + (missingAfterComma ? 2u : 0u); + var expectedError = new ExpectedExpression(new Position(errorPosition, 1, errorPosition), + missingAfterComma ? TokenType.RightParenthesis : TokenType.Comma); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionDefinition.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionDefinition.cs new file mode 100644 index 0000000..c25260f --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.FunctionDefinition.cs @@ -0,0 +1,145 @@ +using System; +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Function definition expressions")] + [Theory] + [ClassData(typeof(FunctionDefinitionExpressionTestData))] + public void FunctionDefinitionExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, FunctionParameter[] expectedParameters, BlockExpression expectedBody) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var functionDefinitionExpression = expressionStatement.Expression.As(); + functionDefinitionExpression.Should().NotBeNull(); + functionDefinitionExpression.Parameters.ToArray().Should().BeEquivalentTo(expectedParameters, Helpers.ProvideOptions); + functionDefinitionExpression.Body.Should().BeEquivalentTo(expectedBody, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Function definition expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(FunctionDefinitionExpressionMissingParenthesisTestData))] + public void MissingParenthesesInFunctionDefinitionParameterListShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, params ParserError[] expectedErrors) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var functionDefinitionExpression = expressionStatement.Expression.As(); + functionDefinitionExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + for (var i = 0; i < expectedErrors.Length; i++) + errorHandlerMock.HandledErrors[i].Should().BeEquivalentTo(expectedErrors[i]); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Function definition expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingBodyOfFunctionDefinitionParameterListShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordFuncti), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedBlockExpression(new Position(3, 1, 3), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Function definition expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(false, false, false)] + [InlineData(false, false, true)] + [InlineData(false, true, false)] + [InlineData(false, true, true)] + [InlineData(true, false, false)] + [InlineData(true, false, true)] + [InlineData(true, true, false)] + [InlineData(true, true, true)] + public void MissingParameterInFunctionDefinitionExpressionsShouldBeDetectedProperly(bool missingAfterComma, bool missingIsConst, bool presentIsConst) + { + var missingParameter = missingIsConst ? new[] { Helpers.GetDefaultToken(TokenType.KeywordConst) } : Array.Empty(); + var presentParameter = presentIsConst + ? new[] { Helpers.GetDefaultToken(TokenType.KeywordConst), new Token(TokenType.Identifier, "a") } + : new[] { new Token(TokenType.Identifier, "a") }; + var firstParameter = missingAfterComma ? presentParameter : missingParameter; + var secondParameter = missingAfterComma ? missingParameter : presentParameter; + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordFuncti), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + }.Concat(firstParameter) + .Append(Helpers.GetDefaultToken(TokenType.Comma)) + .Concat(secondParameter) + .Concat(new[] + { + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + Helpers.GetDefaultToken(TokenType.RightBrace) + }) + .AppendSemicolon(); + + var errorPosition = 2u; + if (missingAfterComma) + errorPosition += 2u + (missingIsConst ? 1u : 0u) + (presentIsConst ? 1u : 0u); + else + errorPosition += missingIsConst ? 1u : 0u; + var expectedError = new ExpectedExpression(new Position(errorPosition, 1, errorPosition), + missingAfterComma ? TokenType.RightParenthesis : TokenType.Comma); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Grouping.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Grouping.cs new file mode 100644 index 0000000..a6f71f3 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Grouping.cs @@ -0,0 +1,110 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Grouping expressions")] + [Trait("Category", "Nesting")] + [Fact] + public void GroupingExpressionsShouldBeParsedCorrectly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.LiteralInteger, 5ul), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedExpression = new GroupingExpression(new GroupingExpression(new BinaryExpression( + new GroupingExpression(new IdentifierExpression("a")), + Operator.Addition, + new LiteralExpression(DataType.Integer, 5ul)))); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Grouping expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingClosingParenthesisInGroupingExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedExpression = new GroupingExpression(new IdentifierExpression("a")); + + var expectedError = new UnexpectedToken(new Position(2, 1, 2), TokenType.Semicolon, TokenType.RightParenthesis); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Grouping expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingExpressionInGroupingExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(1, 1, 1), TokenType.RightParenthesis); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Identifier.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Identifier.cs new file mode 100644 index 0000000..8cbd7b4 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Identifier.cs @@ -0,0 +1,34 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Identifier expressions")] + [Fact] + public void IdentifierExpressionsShouldBeParsedCorrectly() + { + const string identifierName = "ident"; + var identifierToken = new Token(TokenType.Identifier, identifierName); + + var lexerMock = new LexerMock(identifierToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression!.Name.Should().Be(identifierName); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Literal.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Literal.cs new file mode 100644 index 0000000..62b2410 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Literal.cs @@ -0,0 +1,40 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Literal expressions")] + [Theory] + [InlineData(TokenType.LiteralInteger, 1234ul, DataType.Integer, 1234ul)] + [InlineData(TokenType.LiteralFloat, 3.14, DataType.Float, 3.14)] + [InlineData(TokenType.LiteralString, "abcd", DataType.String, "abcd")] + [InlineData(TokenType.KeywordTrue, "true", DataType.Bool, true)] + [InlineData(TokenType.KeywordFalse, "false", DataType.Bool, false)] + [InlineData(TokenType.KeywordNull, null, DataType.Null, null)] + public void LiteralExpressionsShouldBeParsedCorrectly(TokenType literalTokenType, object? literalTokenContent, DataType literalType, object? literalValue) + { + var literalToken = new Token(literalTokenType, literalTokenContent); + + var lexerMock = new LexerMock(literalToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression!.Type.Should().Be(literalType); + expression.Value.Should().Be(literalValue); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.PatternMatching.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.PatternMatching.cs new file mode 100644 index 0000000..e3d6150 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.PatternMatching.cs @@ -0,0 +1,151 @@ +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Pattern matching expressions")] + [Theory] + [ClassData(typeof(PatternMatchingExpressionTestData))] + public void PatternMatchingExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedArgument, PatternMatchingBranch[] expectedBranches, Expression? expectedDefault) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().NotBeNull(); + patternMatchingExpression.Argument.Should().BeEquivalentTo(expectedArgument, Helpers.ProvideOptions); + patternMatchingExpression.Branches.ToArray().Should().BeEquivalentTo(expectedBranches, Helpers.ProvideOptions); + patternMatchingExpression.Default.Should().BeEquivalentTo(expectedDefault, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(PatternMatchingExpressionMissingParenthesesTestData))] + public void MissingParenthesesInPatternMatchingExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, params ParserError[] expectedErrors) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + for (var i = 0; i < expectedErrors.Length; i++) + errorHandlerMock.HandledErrors[i].Should().BeEquivalentTo(expectedErrors[i]); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(PatternMatchingSpecificationMissingColonOrSemicolonTestData))] + public void MissingColonOrSemicolonInPatternMatchingSpecificationsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(PatternMatchingBranchesMissingConsequent))] + public void MissingConsequentOfPatternMatchingBranchesShouldBeDetectedProperly(Token[] tokenSequence, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Pattern expressions")] + [Trait("Category", "Grouping expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingClosingParenthesisInGroupingPatternExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedExpression = new PatternMatchingExpression(new IdentifierExpression("a"), + new List { new(new GroupingExpression(new IdentifierExpression("b")), + new IdentifierExpression("c")) }); + + var expectedError = new UnexpectedToken(new Position(7, 1, 7), TokenType.Colon, TokenType.RightParenthesis); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Type.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Type.cs new file mode 100644 index 0000000..a7f45ca --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Type.cs @@ -0,0 +1,48 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Type expressions")] + [Theory] + [InlineData(TokenType.KeywordInt, DataType.Integer)] + [InlineData(TokenType.KeywordFloat, DataType.Float)] + [InlineData(TokenType.KeywordString, DataType.String)] + [InlineData(TokenType.KeywordBool, DataType.Bool)] + [InlineData(TokenType.KeywordFunction, DataType.Function)] + [InlineData(TokenType.KeywordNull, DataType.Null)] + public void TypeExpressionsShouldBeParsedCorrectly(TokenType typeTokenType, DataType type) + { + var tokenSequence = new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(typeTokenType), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var binaryExpression = expressionStatement.Expression.As(); + binaryExpression.Should().NotBeNull(); + + var typeExpression = binaryExpression.Right.As(); + typeExpression.Should().NotBeNull(); + typeExpression!.Type.Should().Be(type); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.TypeCast.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.TypeCast.cs new file mode 100644 index 0000000..500c30c --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.TypeCast.cs @@ -0,0 +1,44 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Type cast expressions")] + [Theory] + [InlineData(TokenType.KeywordInt, DataType.Integer)] + [InlineData(TokenType.KeywordFloat, DataType.Float)] + [InlineData(TokenType.KeywordString, DataType.String)] + [InlineData(TokenType.KeywordBool, DataType.Bool)] + public void TypeCastExpressionsShouldBeParsedCorrectly(TokenType literalTokenType, DataType expectedCastType) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(literalTokenType), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression.Type.Should().Be(expectedCastType); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Unary.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Unary.cs new file mode 100644 index 0000000..838c152 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.Unary.cs @@ -0,0 +1,314 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "Unary expressions")] + [Theory] + [InlineData(TokenType.OperatorPlus, Operator.NumberPromotion)] + [InlineData(TokenType.OperatorMinus, Operator.ArithmeticNegation)] + [InlineData(TokenType.OperatorBang, Operator.LogicalNegation)] + public void UnaryExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) + { + var opToken = Helpers.GetDefaultToken(operatorTokenType); + + const string identifierName = "a"; + var token = new Token(TokenType.Identifier, identifierName); + var expectedExpression = new IdentifierExpression(identifierName); + + var lexerMock = new LexerMock(opToken, token, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var expression = expressionStatement.Expression.As(); + expression.Should().NotBeNull(); + expression!.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + expression.Operator.Should().Be(expectedOperator); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Unary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Theory] + [InlineData(TokenType.OperatorLess, Operator.PatternMatchingLessThanComparison)] + [InlineData(TokenType.OperatorLessEquals, Operator.PatternMatchingLessOrEqualComparison)] + [InlineData(TokenType.OperatorGreater, Operator.PatternMatchingGreaterThanComparison)] + [InlineData(TokenType.OperatorGreaterEquals, Operator.PatternMatchingGreaterOrEqualComparison)] + [InlineData(TokenType.OperatorEqualsEquals, Operator.PatternMatchingEqualComparison)] + [InlineData(TokenType.OperatorBangEquals, Operator.PatternMatchingNotEqualComparison)] + public void UnaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + Helpers.GetDefaultToken(operatorTokenType), + new Token(TokenType.LiteralInteger, 5ul), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedExpression = new LiteralExpression(DataType.Integer, 5ul); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().NotBeNull(); + patternMatchingExpression.Branches.Should().HaveCount(1); + + var unaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); + unaryExpression.Should().NotBeNull(); + unaryExpression.Operator.Should().Be(expectedOperator); + unaryExpression.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Unary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Theory] + [InlineData(new[] { TokenType.KeywordIs }, TokenType.KeywordInt, Operator.PatternMatchingEqualTypeCheck, DataType.Integer)] + [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot }, TokenType.KeywordNull, Operator.PatternMatchingNotEqualTypeCheck, DataType.Null)] + public void TypeCheckingUnaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType[] operatorTokenTypes, TokenType typeTokenType, Operator expectedOperator, DataType expectedType) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace) + }.Concat(operatorTokenTypes.Select(Helpers.GetDefaultToken)).Concat(new[] + { + Helpers.GetDefaultToken(typeTokenType), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }).ToArray(); + + var expectedExpression = new TypeExpression(expectedType); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var patternMatchingExpression = expressionStatement.Expression.As(); + patternMatchingExpression.Should().NotBeNull(); + patternMatchingExpression.Branches.Should().HaveCount(1); + + var unaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); + unaryExpression.Should().NotBeNull(); + unaryExpression.Operator.Should().Be(expectedOperator); + unaryExpression.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Unary expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(TokenType.OperatorPlus)] + [InlineData(TokenType.OperatorMinus)] + [InlineData(TokenType.OperatorBang)] + public void MissingExpressionsInUnaryExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(operatorTokenType), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(1, 1, 1), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Unary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(TokenType.OperatorLess)] + [InlineData(TokenType.OperatorLessEquals)] + [InlineData(TokenType.OperatorGreater)] + [InlineData(TokenType.OperatorGreaterEquals)] + [InlineData(TokenType.OperatorEqualsEquals)] + [InlineData(TokenType.OperatorBangEquals)] + public void MissingExpressionsInUnaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace), + Helpers.GetDefaultToken(operatorTokenType), + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(6, 1, 6), TokenType.Colon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Negative")] + [Trait("Category", "Unary expressions")] + [Trait("Category", "Pattern matching expressions")] + [Trait("Category", "Type expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(new[] { TokenType.KeywordIs })] + [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot })] + public void MissingTypeInTypeCheckingUnaryPatternMatchingExpressionsShouldBeDetectedProperly(TokenType[] operatorTokenTypes) + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordMatch), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftBrace) + }.Concat(operatorTokenTypes.Select(Helpers.GetDefaultToken)).Concat(new[] + { + Helpers.GetDefaultToken(TokenType.Colon), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon), + Helpers.GetDefaultToken(TokenType.RightBrace), + Helpers.GetDefaultToken(TokenType.Semicolon) + }).ToArray(); + + var errorPosition = (uint)operatorTokenTypes.Length + 5; + var expectedError = new UnexpectedToken(new Position(errorPosition, 1, errorPosition), TokenType.Colon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should() + .BeEquivalentTo(expectedError, o => o.Excluding(i => i.Name == "ExpectedType")); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordInt); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordFloat); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordString); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordBool); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordFunction); + errorHandlerMock.HandledErrors[0].As().ExpectedType.Should().Contain(TokenType.KeywordNull); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Unary expressions")] + [Trait("Category", "Literal expressions")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(0ul, false, false)] + [InlineData(0ul, true, false)] + [InlineData(1234ul, false, false)] + [InlineData(1234ul, true, false)] + [InlineData(9223372036854775807ul, false, false)] + [InlineData(9223372036854775807ul, true, false)] + [InlineData(9223372036854775808ul, false, true)] + [InlineData(9223372036854775808ul, true, false)] + [InlineData(9223372036854775809ul, false, true)] + [InlineData(9223372036854775809ul, true, true)] + [InlineData(18446744073709551615ul, false, true)] + [InlineData(18446744073709551615ul, true, true)] + public void OutOfRangeIntegralUnaryExpressionsShouldBeDetectedProperly(ulong value, bool hasNegativePrefix, bool shouldEmitError) + { + var literalToken = new Token(TokenType.LiteralInteger, value); + var tokenSequence = (hasNegativePrefix + ? new[] { Helpers.GetDefaultToken(TokenType.OperatorMinus), literalToken } + : new[] { literalToken }) + .AppendSemicolon(); + + Expression expectedExpression = hasNegativePrefix + ? new UnaryExpression(Operator.ArithmeticNegation, new LiteralExpression(DataType.Integer, value)) + : new LiteralExpression(DataType.Integer, value); + + var expectedError = new IntegerOutOfRange(new Position(0, 1, 0), value, hasNegativePrefix); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + if (shouldEmitError) + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + else + Assert.False(errorHandlerMock.HadErrors); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.WhileLoop.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.WhileLoop.cs new file mode 100644 index 0000000..b01f282 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.WhileLoop.cs @@ -0,0 +1,64 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + [Trait("Category", "While loop expressions")] + [Theory] + [ClassData(typeof(WhileLoopExpressionTestData))] + public void WhileLoopExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedCondition, Expression expectedBody) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var whileLoopExpression = expressionStatement.Expression.As(); + whileLoopExpression.Should().NotBeNull(); + whileLoopExpression.Condition.Should().BeEquivalentTo(expectedCondition, Helpers.ProvideOptions); + whileLoopExpression.Body.Should().BeEquivalentTo(expectedBody, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "While loop expressions")] + [Trait("Category", "Negative")] + [Fact] + public void MissingBodyOfWhileLoopExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordWhile), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(4, 1, 4), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.cs b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.cs new file mode 100644 index 0000000..1eff759 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ExpressionParsingTests.cs @@ -0,0 +1,166 @@ +using System.Collections.Generic; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class ExpressionParsingTest +{ + // TODO: negative tests + + [Trait("Category", "Nesting")] + [Fact] + public void NestedExpressionsShouldBeHandledCorrectly() + { + var tokenSequence = new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorEquals), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorEquals), + new Token(TokenType.LiteralInteger, 5ul), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + Helpers.GetDefaultToken(TokenType.OperatorMinus), + Helpers.GetDefaultToken(TokenType.OperatorBang), + Helpers.GetDefaultToken(TokenType.KeywordFalse), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedTree = new BinaryExpression( + new IdentifierExpression("a"), + Operator.Assignment, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Assignment, + new BinaryExpression( + new LiteralExpression(DataType.Integer, 5ul), + Operator.Addition, + new UnaryExpression( + Operator.ArithmeticNegation, + new UnaryExpression( + Operator.LogicalNegation, + new LiteralExpression(DataType.Bool, false)))))); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedTree, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Associativity")] + [Trait("Category", "Nesting")] + [Theory] + [ClassData(typeof(OperatorsAssociativityTestData))] + public void ExpressionsShouldBeParsedWithRespectToOperatorsAssociativity(Token[] tokenSequence, Expression expectedExpression) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Priority")] + [Trait("Category", "Nesting")] + [Theory] + [ClassData(typeof(OperatorsPriorityTestData))] + public void ExpressionsShouldBeParsedWithRespectToOperatorsPriority(Token[] tokenSequence, Expression expectedExpression, bool shouldIgnoreErrors = false) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + if (!shouldIgnoreErrors) + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Parenthesized expressions")] + [Trait("Category", "Negative")] + [Trait("Category", "Conditional expressions")] + [Theory] + [ClassData(typeof(ParenthesizedExpressionMissingParenthesesTestData))] + public void MissingParenthesesInParenthesizedExpressionsShouldBeDetectedProperly(Token[] tokenSequence, Expression expectedExpression, params ParserError[] expectedErrors) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + + var conditionalExpression = expressionStatement.Expression.As(); + conditionalExpression.Should().BeEquivalentTo(expectedExpression, Helpers.ProvideOptions); + + for (var i = 0; i < expectedErrors.Length; i++) + errorHandlerMock.HandledErrors[i].Should().BeEquivalentTo(expectedErrors[i]); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Parenthesized expressions")] + [Trait("Category", "Negative")] + [Trait("Category", "Conditional expressions")] + [Fact] + public void MissingExpressionInParenthesizedExpressionsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordIf), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + new(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }; + + var expectedError = new ExpectedExpression(new Position(2, 1, 2), TokenType.RightParenthesis); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + // TODO: test if position of a token equals position of its first lexeme +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingClosingBraceTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingClosingBraceTestData.cs new file mode 100644 index 0000000..3946700 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingClosingBraceTestData.cs @@ -0,0 +1,160 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class BlockExpressionMissingClosingBraceTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var leftBraceToken = Helpers.GetDefaultToken(TokenType.LeftBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // empty + yield return new object[] + { + new[] + { + leftBraceToken, + semicolonToken + }, + Array.Empty(), + (null as Expression)!, + new UnexpectedToken(new Position(1, 1, 1), TokenType.Semicolon, TokenType.RightBrace), + true // with no statements there is no semicolon skipping + }; + // basic with result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a") + }, + Array.Empty(), + new IdentifierExpression("a"), + new UnexpectedToken(new Position(2, 1, 2), TokenType.Semicolon, TokenType.RightBrace), + false + }; + // basic with regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + } + }, + (null as Expression)!, + new UnexpectedToken(new Position(3, 1, 3), TokenType.Semicolon, TokenType.RightBrace), + false + }; + // basic with regular and more semicolons + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + } + }, + (null as Expression)!, + new UnexpectedToken(new Position(4, 1, 4), TokenType.Semicolon, TokenType.RightBrace), + false // semicolons are skipped as a part of statements block + }; + // with regular and result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + new(TokenType.Identifier, "b") + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + } + }, + new IdentifierExpression("b"), + new UnexpectedToken(new Position(4, 1, 4), TokenType.Semicolon, TokenType.RightBrace), + false + }; + // double regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + }, + new ExpressionStatement(new IdentifierExpression("b")) + { + IsTerminated = true + } + }, + (null as Expression)!, + new UnexpectedToken(new Position(5, 1, 5), TokenType.Semicolon, TokenType.RightBrace), + false + }; + // double regular and more semicolons + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + new(TokenType.Identifier, "b"), + semicolonToken, + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + }, + new ExpressionStatement(new IdentifierExpression("b")) + { + IsTerminated = true + } + }, + (null as Expression)!, + new UnexpectedToken(new Position(6, 1, 6), TokenType.Semicolon, TokenType.RightBrace), + false // semicolons are skipped as a part of statements block + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingSemicolonTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingSemicolonTestData.cs new file mode 100644 index 0000000..488980c --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionMissingSemicolonTestData.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class BlockExpressionMissingSemicolonTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var leftBraceToken = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBraceToken = Helpers.GetDefaultToken(TokenType.RightBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // missing semicolon and result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + rightBraceToken, + semicolonToken + }, + new BlockExpression(new List { new ExpressionStatement(new IdentifierExpression("a")) }, + new IdentifierExpression("b")), + new ExpectedSemicolon(new Position(2, 1, 2), TokenType.Identifier) + }; + // missing semicolon and regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken, + rightBraceToken, + semicolonToken + }, + new BlockExpression(new List + { + new ExpressionStatement(new IdentifierExpression("a")), + new ExpressionStatement(new IdentifierExpression("b")) { IsTerminated = true } + }), + new ExpectedSemicolon(new Position(2, 1, 2), TokenType.Identifier) + }; + // missing semicolon and regular and result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken, + new(TokenType.Identifier, "c"), + rightBraceToken, + semicolonToken + }, + new BlockExpression(new List + { + new ExpressionStatement(new IdentifierExpression("a")), + new ExpressionStatement(new IdentifierExpression("b")) { IsTerminated = true } + }, + new IdentifierExpression("c")), + new ExpectedSemicolon(new Position(2, 1, 2), TokenType.Identifier) + }; + // double missing semicolon and double regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + new(TokenType.Identifier, "c"), + semicolonToken, + rightBraceToken, + semicolonToken + }, + new BlockExpression(new List + { + new ExpressionStatement(new IdentifierExpression("a")), + new ExpressionStatement(new IdentifierExpression("b")), + new ExpressionStatement(new IdentifierExpression("c")) { IsTerminated = true } + }), + new ExpectedSemicolon(new Position(2, 1, 2), TokenType.Identifier), + new ExpectedSemicolon(new Position(3, 1, 3), TokenType.Identifier) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionTestData.cs new file mode 100644 index 0000000..66750ea --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/BlockExpressionTestData.cs @@ -0,0 +1,111 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class BlockExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var leftBraceToken = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBraceToken = Helpers.GetDefaultToken(TokenType.RightBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // empty + yield return new object[] + { + new[] + { + leftBraceToken, + rightBraceToken, + semicolonToken + }, + Array.Empty(), + (null as Expression)! + }; + // basic with result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + rightBraceToken, + semicolonToken + }, + Array.Empty(), + new IdentifierExpression("a") + }; + // basic with regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + rightBraceToken, + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + } + }, + (null as Expression)! + }; + // with regular and result + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + new(TokenType.Identifier, "b"), + rightBraceToken, + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + } + }, + new IdentifierExpression("b") + }; + // double regular + yield return new object[] + { + new[] + { + leftBraceToken, + new(TokenType.Identifier, "a"), + semicolonToken, + new(TokenType.Identifier, "b"), + semicolonToken, + rightBraceToken, + semicolonToken + }, + new Statement[] + { + new ExpressionStatement(new IdentifierExpression("a")) + { + IsTerminated = true + }, + new ExpressionStatement(new IdentifierExpression("b")) + { + IsTerminated = true + } + }, + (null as Expression)! + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalBranchesMissingConsequentTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalBranchesMissingConsequentTestData.cs new file mode 100644 index 0000000..aff07ce --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalBranchesMissingConsequentTestData.cs @@ -0,0 +1,69 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ConditionalBranchesMissingConsequentTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var ifToken = Helpers.GetDefaultToken(TokenType.KeywordIf); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var elifToken = Helpers.GetDefaultToken(TokenType.KeywordElif); + var elseToken = Helpers.GetDefaultToken(TokenType.KeywordElse); + // if branch + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + semicolonToken + }, + new ExpectedExpression(new Position(4, 1, 4), TokenType.Semicolon) + }; + // else branch + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elseToken, + semicolonToken + }, + new ExpectedExpression(new Position(6, 1, 6), TokenType.Semicolon) + }; + // with elif + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elifToken, + leftParenthesisToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + semicolonToken + }, + new ExpectedExpression(new Position(9, 1, 9), TokenType.Semicolon) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalExpressionTestData.cs new file mode 100644 index 0000000..e1269f8 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ConditionalExpressionTestData.cs @@ -0,0 +1,136 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ConditionalExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var ifToken = Helpers.GetDefaultToken(TokenType.KeywordIf); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var elifToken = Helpers.GetDefaultToken(TokenType.KeywordElif); + var elseToken = Helpers.GetDefaultToken(TokenType.KeywordElse); + // basic + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), + Array.Empty(), + (null as Expression)! + }; + // with else + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elseToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), + Array.Empty(), + new IdentifierExpression("c") + }; + // with elif + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elifToken, + leftParenthesisToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + new(TokenType.Identifier, "d"), + semicolonToken + }, + new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), + new[] + { + new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")) + }, + (null as Expression)! + }; + // with more than one elif + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elifToken, + leftParenthesisToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + new(TokenType.Identifier, "d"), + elifToken, + leftParenthesisToken, + new(TokenType.Identifier, "e"), + rightParenthesisToken, + new(TokenType.Identifier, "f"), + semicolonToken + }, + new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), + new[] + { + new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")), + new ConditionalElement(new IdentifierExpression("e"), new IdentifierExpression("f")) + }, + (null as Expression)! + }; + // with elif and else + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + elifToken, + leftParenthesisToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + new(TokenType.Identifier, "d"), + elseToken, + new(TokenType.Identifier, "e"), + semicolonToken + }, + new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), + new[] + { + new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")) + }, + new IdentifierExpression("e") + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ExpressionStatementTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ExpressionStatementTestData.cs new file mode 100644 index 0000000..6c86334 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ExpressionStatementTestData.cs @@ -0,0 +1,115 @@ +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ExpressionStatementTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + // block + yield return new object[] + { + (Token[])new BlockExpressionTestData().First()[0], + typeof(BlockExpression) + }; + // if + yield return new object[] + { + (Token[])new ConditionalExpressionTestData().First()[0], + typeof(ConditionalExpression) + }; + // for + yield return new object[] + { + (Token[])new ForLoopExpressionTestData().First()[0], + typeof(ForLoopExpression) + }; + // while + yield return new object[] + { + (Token[])new WhileLoopExpressionTestData().First()[0], + typeof(WhileLoopExpression) + }; + // functi + yield return new object[] + { + (Token[])new FunctionDefinitionExpressionTestData().First()[0], + typeof(FunctionDefinitionExpression) + }; + // match + yield return new object[] + { + (Token[])new PatternMatchingExpressionTestData().First()[0], + typeof(PatternMatchingExpression) + }; + // binary + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + typeof(BinaryExpression) + }; + // unary + yield return new object[] + { + new[] + { + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + typeof(UnaryExpression) + }; + // call + yield return new object[] + { + (Token[])new FunctionCallExpressionTestData().First()[0], + typeof(FunctionCallExpression) + }; + // identifier + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + typeof(IdentifierExpression) + }; + // literal + yield return new object[] + { + new[] + { + new Token(TokenType.LiteralInteger, 18ul), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + typeof(LiteralExpression) + }; + // type cast + yield return new object[] + { + new[] + { + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + typeof(TypeCastExpression) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopExpressionTestData.cs new file mode 100644 index 0000000..bfdc656 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopExpressionTestData.cs @@ -0,0 +1,135 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ForLoopExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var forToken = Helpers.GetDefaultToken(TokenType.KeywordFor); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var colonToken = Helpers.GetDefaultToken(TokenType.Colon); + var comma = Helpers.GetDefaultToken(TokenType.Comma); + // basic + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + null!, + new ForLoopRange(new IdentifierExpression("a")), + new IdentifierExpression("b") + }; + // with start:stop range + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + colonToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + null!, + new ForLoopRange(Start: new IdentifierExpression("a"), PastTheEnd: new IdentifierExpression("b")), + new IdentifierExpression("c") + }; + // with start:stop:step range + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + colonToken, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + new(TokenType.Identifier, "d"), + semicolonToken + }, + null!, + new ForLoopRange(Start: new IdentifierExpression("a"), PastTheEnd: new IdentifierExpression("b"), Step: new IdentifierExpression("c")), + new IdentifierExpression("d") + }; + // with counter + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + comma, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + "a", + new ForLoopRange(new IdentifierExpression("b")), + new IdentifierExpression("c") + }; + // with counter and start:stop range + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + comma, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + new(TokenType.Identifier, "d"), + semicolonToken + }, + "a", + new ForLoopRange(Start: new IdentifierExpression("b"), PastTheEnd: new IdentifierExpression("c")), + new IdentifierExpression("d") + }; + // with counter and start:stop:step range + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + comma, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + colonToken, + new(TokenType.Identifier, "d"), + rightParenthesisToken, + new(TokenType.Identifier, "e"), + semicolonToken + }, + "a", + new ForLoopRange(Start: new IdentifierExpression("b"), PastTheEnd: new IdentifierExpression("c"), Step: new IdentifierExpression("d")), + new IdentifierExpression("e") + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingParenthesesTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingParenthesesTestData.cs new file mode 100644 index 0000000..c7c0bee --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingParenthesesTestData.cs @@ -0,0 +1,62 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ForLoopSpecificationMissingParenthesesTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var forToken = Helpers.GetDefaultToken(TokenType.KeywordFor); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // left parenthesis + yield return new object[] + { + new[] + { + forToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ForLoopExpression(new ForLoopRange(new IdentifierExpression("a")), new IdentifierExpression("b")), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis) + }; + // right parenthesis + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ForLoopExpression(new ForLoopRange(new IdentifierExpression("a")), new IdentifierExpression("b")), + new UnexpectedToken(new Position(3, 1, 3), TokenType.Identifier, TokenType.LeftParenthesis) + }; + // both parentheses + yield return new object[] + { + new[] + { + forToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ForLoopExpression(new ForLoopRange(new IdentifierExpression("a")), new IdentifierExpression("b")), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis), + new UnexpectedToken(new Position(2, 1, 2), TokenType.Identifier, TokenType.LeftParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingPartsTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingPartsTestData.cs new file mode 100644 index 0000000..b19ac1b --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ForLoopSpecificationMissingPartsTestData.cs @@ -0,0 +1,133 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ForLoopSpecificationMissingPartsTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var forToken = Helpers.GetDefaultToken(TokenType.KeywordFor); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var colonToken = Helpers.GetDefaultToken(TokenType.Colon); + var comma = Helpers.GetDefaultToken(TokenType.Comma); + // missing range + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + rightParenthesisToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new ExpectedExpression(new Position(2, 1, 2), TokenType.RightParenthesis) + }; + // missing counter + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + comma, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ExpectedExpression(new Position(2, 1, 2), TokenType.Comma) + }; + // start:stop, missing range start + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + colonToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ExpectedExpression(new Position(2, 1, 2), TokenType.Colon) + }; + // start:stop, missing range stop + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + comma, + new(TokenType.Identifier, "b"), + colonToken, + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ExpectedExpression(new Position(6, 1, 6), TokenType.RightParenthesis) + }; + // start:stop:step, missing range start + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + colonToken, + new(TokenType.Identifier, "a"), + colonToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ExpectedExpression(new Position(2, 1, 2), TokenType.Colon) + }; + // start:stop:step, missing range stop + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + colonToken, + colonToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ExpectedExpression(new Position(4, 1, 4), TokenType.Colon) + }; + // start:stop:step, missing range step + yield return new object[] + { + new[] + { + forToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + colonToken, + new(TokenType.Identifier, "b"), + colonToken, + rightParenthesisToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new ExpectedExpression(new Position(6, 1, 6), TokenType.RightParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionMissingClosingParenthesisTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionMissingClosingParenthesisTestData.cs new file mode 100644 index 0000000..da01cec --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionMissingClosingParenthesisTestData.cs @@ -0,0 +1,45 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class FunctionCallExpressionMissingClosingParenthesisTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var commaToken = Helpers.GetDefaultToken(TokenType.Comma); + // no arguments + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + leftParenthesisToken, + semicolonToken + }, + new FunctionCallExpression(new IdentifierExpression("a"), new List()), + new UnexpectedToken(new Position(2, 1, 2), TokenType.Semicolon, TokenType.RightParenthesis) + }; + // an argument + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + leftParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new FunctionCallExpression(new IdentifierExpression("a"), new List { new IdentifierExpression("b") }), + new UnexpectedToken(new Position(3, 1, 3), TokenType.Semicolon, TokenType.RightParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionTestData.cs new file mode 100644 index 0000000..aeeec37 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionCallExpressionTestData.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class FunctionCallExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var commaToken = Helpers.GetDefaultToken(TokenType.Comma); + // basic + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + leftParenthesisToken, + rightParenthesisToken, + semicolonToken + }, + new IdentifierExpression("a"), + Array.Empty() + }; + // with an argument + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + leftParenthesisToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + semicolonToken + }, + new IdentifierExpression("a"), + new[] + { + new IdentifierExpression("b") + } + }; + // with more than one argument + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + leftParenthesisToken, + new(TokenType.Identifier, "b"), + commaToken, + new(TokenType.Identifier, "c"), + rightParenthesisToken, + semicolonToken + }, + new IdentifierExpression("a"), + new[] + { + new IdentifierExpression("b"), + new IdentifierExpression("c") + } + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionMissingParenthesisTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionMissingParenthesisTestData.cs new file mode 100644 index 0000000..fa12cf6 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionMissingParenthesisTestData.cs @@ -0,0 +1,121 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class FunctionDefinitionExpressionMissingParenthesisTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var functiToken = Helpers.GetDefaultToken(TokenType.KeywordFuncti); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // basic, missing left parenthesis + yield return new object[] + { + new[] + { + functiToken, + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "a"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List(), + new BlockExpression(new List(), new IdentifierExpression("a"))), + new UnexpectedToken(new Position(1, 1, 1), TokenType.RightParenthesis, TokenType.LeftParenthesis) + }; + // basic, missing right parenthesis + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + leftBrace, + new(TokenType.Identifier, "a"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List(), + new BlockExpression(new List(), new IdentifierExpression("a"))), + new UnexpectedToken(new Position(2, 1, 2), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + // basic, missing both parentheses + yield return new object[] + { + new[] + { + functiToken, + leftBrace, + new(TokenType.Identifier, "a"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List(), + new BlockExpression(new List(), new IdentifierExpression("a"))), + new UnexpectedToken(new Position(1, 1, 1), TokenType.LeftBrace, TokenType.LeftParenthesis), + new UnexpectedToken(new Position(1, 1, 1), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + // with one parameter, missing left parenthesis + yield return new object[] + { + new[] + { + functiToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List { new("a") }, + new BlockExpression(new List(), new IdentifierExpression("b"))), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis) + }; + // with one parameter, missing right parenthesis + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List { new("a") }, + new BlockExpression(new List(), new IdentifierExpression("b"))), + new UnexpectedToken(new Position(3, 1, 3), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + // with one parameter, missing both parenthesis + yield return new object[] + { + new[] + { + functiToken, + new(TokenType.Identifier, "a"), + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new FunctionDefinitionExpression(new List { new("a") }, + new BlockExpression(new List(), new IdentifierExpression("b"))), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis), + new UnexpectedToken(new Position(2, 1, 2), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionTestData.cs new file mode 100644 index 0000000..a59251b --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/FunctionDefinitionExpressionTestData.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class FunctionDefinitionExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var functiToken = Helpers.GetDefaultToken(TokenType.KeywordFuncti); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var constToken = Helpers.GetDefaultToken(TokenType.KeywordConst); + var bangToken = Helpers.GetDefaultToken(TokenType.OperatorBang); + var commaToken = Helpers.GetDefaultToken(TokenType.Comma); + // basic + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "a"), + rightBrace, + semicolonToken + }, + Array.Empty(), + new BlockExpression(new List(), new IdentifierExpression("a")) + }; + // with one parameter + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new[] + { + new FunctionParameter("a") + }, + new BlockExpression(new List(), new IdentifierExpression("b")) + }; + // with one const parameter + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + constToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new[] + { + new FunctionParameter(IsConst: true, Name: "a") + }, + new BlockExpression(new List(), new IdentifierExpression("b")) + }; + // with one required parameter + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + bangToken, + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new[] + { + new FunctionParameter("a", IsNullAllowed: false) + }, + new BlockExpression(new List(), new IdentifierExpression("b")) + }; + // with more than one parameter + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + commaToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "c"), + rightBrace, + semicolonToken + }, + new[] + { + new FunctionParameter("a"), + new FunctionParameter("b") + }, + new BlockExpression(new List(), new IdentifierExpression("c")) + }; + // with more than one parameter (including one const and non-nullable) + yield return new object[] + { + new[] + { + functiToken, + leftParenthesisToken, + constToken, + new(TokenType.Identifier, "a"), + bangToken, + commaToken, + new(TokenType.Identifier, "b"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "c"), + rightBrace, + semicolonToken + }, + new[] + { + new FunctionParameter(IsConst: true, Name: "a", IsNullAllowed: false), + new FunctionParameter("b") + }, + new BlockExpression(new List(), new IdentifierExpression("c")) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceAccessExpressionNonIdentifiersTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceAccessExpressionNonIdentifiersTestData.cs new file mode 100644 index 0000000..2a0967f --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceAccessExpressionNonIdentifiersTestData.cs @@ -0,0 +1,69 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class NamespaceAccessExpressionNonIdentifiersTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var dotToken = Helpers.GetDefaultToken(TokenType.OperatorDot); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + // after dot + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + dotToken, + new(TokenType.LiteralString, "b"), + semicolonToken + }, + new BinaryExpression(new IdentifierExpression("a"), Operator.NamespaceAccess, + new LiteralExpression(DataType.String, "b")), + new ExpectedIdentifier(new Position(2, 1, 2), typeof(LiteralExpression)) + }; + // before dot + yield return new object[] + { + new[] + { + new(TokenType.LiteralFloat, 3.14), + dotToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new BinaryExpression(new LiteralExpression(DataType.Float, 3.14), Operator.NamespaceAccess, + new IdentifierExpression("a")), + new ExpectedIdentifier(new Position(0, 1, 0), typeof(LiteralExpression)) + }; + // in the middle + yield return new object[] + { + new[] + { + new(TokenType.Identifier, "a"), + dotToken, + new(TokenType.Identifier, "b"), + leftParenthesisToken, + rightParenthesisToken, + dotToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new BinaryExpression(new FunctionCallExpression(new BinaryExpression(new IdentifierExpression("a"), + Operator.NamespaceAccess, + new IdentifierExpression("b")), new List()), + Operator.NamespaceAccess, + new IdentifierExpression("c")), + new ExpectedIdentifier(new Position(0, 1, 0), typeof(FunctionCallExpression)) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceImportStatementNonIdentifiersTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceImportStatementNonIdentifiersTestData.cs new file mode 100644 index 0000000..648b9a3 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/NamespaceImportStatementNonIdentifiersTestData.cs @@ -0,0 +1,75 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class NamespaceImportStatementNonIdentifiersTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var pullToken = Helpers.GetDefaultToken(TokenType.KeywordPull); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var dotToken = Helpers.GetDefaultToken(TokenType.OperatorDot); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + // basic + yield return new object[] + { + new[] + { + pullToken, + new(TokenType.LiteralInteger, 1234ul), + semicolonToken + }, + new UnexpectedToken(new Position(1, 1, 1), TokenType.LiteralInteger, TokenType.Identifier) + }; + // after dot + yield return new object[] + { + new[] + { + pullToken, + new(TokenType.Identifier, "a"), + dotToken, + new(TokenType.LiteralString, "b"), + semicolonToken + }, + new UnexpectedToken(new Position(3, 1, 3), TokenType.LiteralString, TokenType.Identifier) + }; + // before dot + yield return new object[] + { + new[] + { + pullToken, + new(TokenType.LiteralFloat, 3.14), + dotToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new UnexpectedToken(new Position(1, 1, 1), TokenType.LiteralFloat, TokenType.Identifier) + }; + // in the middle + yield return new object[] + { + new[] + { + pullToken, + new(TokenType.Identifier, "a"), + dotToken, + new(TokenType.Identifier, "b"), + leftParenthesisToken, + rightParenthesisToken, + dotToken, + new(TokenType.Identifier, "c"), + semicolonToken + }, + new UnexpectedToken(new Position(4, 1, 4), TokenType.LeftParenthesis, TokenType.Identifier) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsAssociativityTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsAssociativityTestData.cs new file mode 100644 index 0000000..d4a4584 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsAssociativityTestData.cs @@ -0,0 +1,174 @@ +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class OperatorsAssociativityTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + static object[] GenerateLeftBinary(TokenType tokenType, Operator @operator) => + new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(tokenType), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(tokenType), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + @operator, + new IdentifierExpression("b")), + @operator, + new IdentifierExpression("c")) + }; + static object[] GenerateRightBinary(TokenType tokenType, Operator @operator) => + new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(tokenType), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(tokenType), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + @operator, + new BinaryExpression( + new IdentifierExpression("b"), + @operator, + new IdentifierExpression("c"))) + }; + static object[] GenerateRightUnary(TokenType tokenType, Operator @operator) => + new object[] + { + new[] + { + Helpers.GetDefaultToken(tokenType), + Helpers.GetDefaultToken(tokenType), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new UnaryExpression( + @operator, + new UnaryExpression( + @operator, + new IdentifierExpression("a"))) + }; + static object[] GenerateTypeCheck(bool isNegated = false) => + new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.Semicolon) + }.SelectMany(x => + x.Type == TokenType.KeywordIs + ? isNegated ? new[] { x, Helpers.GetDefaultToken(TokenType.KeywordNot) } : new[] { x } + : new[] { x }).ToArray(), + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + isNegated ? Operator.NotEqualTypeCheck : Operator.EqualTypeCheck, + new TypeExpression(DataType.Integer)), + isNegated ? Operator.NotEqualTypeCheck : Operator.EqualTypeCheck, + new TypeExpression(DataType.Integer)) + }; + + // . + yield return GenerateLeftBinary(TokenType.OperatorDot, Operator.NamespaceAccess); + // () + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new FunctionCallExpression( + new FunctionCallExpression( + new IdentifierExpression("a"), + new List { new IdentifierExpression("b") }), + new List { new IdentifierExpression("c") }) + }; + // ^ + yield return GenerateRightBinary(TokenType.OperatorCaret, Operator.Exponentiation); + // unary + + yield return GenerateRightUnary(TokenType.OperatorPlus, Operator.NumberPromotion); + // unary - + yield return GenerateRightUnary(TokenType.OperatorMinus, Operator.ArithmeticNegation); + // unary ! + yield return GenerateRightUnary(TokenType.OperatorBang, Operator.LogicalNegation); + // * + yield return GenerateLeftBinary(TokenType.OperatorAsterisk, Operator.Multiplication); + // / + yield return GenerateLeftBinary(TokenType.OperatorSlash, Operator.Division); + // % + yield return GenerateLeftBinary(TokenType.OperatorPercent, Operator.Remainder); + // binary + + yield return GenerateLeftBinary(TokenType.OperatorPlus, Operator.Addition); + // binary - + yield return GenerateLeftBinary(TokenType.OperatorMinus, Operator.Subtraction); + // .. + yield return GenerateLeftBinary(TokenType.OperatorDotDot, Operator.Concatenation); + // < + yield return GenerateLeftBinary(TokenType.OperatorLess, Operator.LessThanComparison); + // <= + yield return GenerateLeftBinary(TokenType.OperatorLessEquals, Operator.LessOrEqualComparison); + // > + yield return GenerateLeftBinary(TokenType.OperatorGreater, Operator.GreaterThanComparison); + // >= + yield return GenerateLeftBinary(TokenType.OperatorGreaterEquals, Operator.GreaterOrEqualComparison); + // == + yield return GenerateLeftBinary(TokenType.OperatorEqualsEquals, Operator.EqualComparison); + // != + yield return GenerateLeftBinary(TokenType.OperatorBangEquals, Operator.NotEqualComparison); + // is + yield return GenerateTypeCheck(); + // is not + yield return GenerateTypeCheck(true); + // && + yield return GenerateLeftBinary(TokenType.OperatorAndAnd, Operator.Conjunction); + // || + yield return GenerateLeftBinary(TokenType.OperatorOrOr, Operator.Disjunction); + // ?> + yield return GenerateLeftBinary(TokenType.OperatorQueryGreater, Operator.NullSafePipe); + // ?? + yield return GenerateLeftBinary(TokenType.OperatorQueryQuery, Operator.NullCoalescing); + // = + yield return GenerateRightBinary(TokenType.OperatorEquals, Operator.Assignment); + // += + yield return GenerateRightBinary(TokenType.OperatorPlusEquals, Operator.AdditionAssignment); + // -= + yield return GenerateRightBinary(TokenType.OperatorMinusEquals, Operator.SubtractionAssignment); + // *= + yield return GenerateRightBinary(TokenType.OperatorAsteriskEquals, Operator.MultiplicationAssignment); + // /= + yield return GenerateRightBinary(TokenType.OperatorSlashEquals, Operator.DivisionAssignment); + // %= + yield return GenerateRightBinary(TokenType.OperatorPercentEquals, Operator.RemainderAssignment); + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsPriorityTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsPriorityTestData.cs new file mode 100644 index 0000000..a07f026 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/OperatorsPriorityTestData.cs @@ -0,0 +1,533 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class OperatorsPriorityTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + // same priority () . + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorDot), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.LeftParenthesis), + Helpers.GetDefaultToken(TokenType.RightParenthesis), + Helpers.GetDefaultToken(TokenType.OperatorDot), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new FunctionCallExpression(new BinaryExpression( + new IdentifierExpression("a"), + Operator.NamespaceAccess, + new IdentifierExpression("b")), new List()), + Operator.NamespaceAccess, + new IdentifierExpression("c")), + true // ignore errors (function call cannot be a part of namespace access) + }; + // . higher than ^ + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorCaret), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorDot), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Exponentiation, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.NamespaceAccess, + new IdentifierExpression("c"))) + }; + // ^ higher than unary + + yield return new object[] + { + new[] + { + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorCaret), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new UnaryExpression( + Operator.NumberPromotion, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Exponentiation, + new IdentifierExpression("b"))) + }; + // same priority unary + unary - unary ! + yield return new object[] + { + new[] + { + Helpers.GetDefaultToken(TokenType.OperatorPlus), + Helpers.GetDefaultToken(TokenType.OperatorMinus), + Helpers.GetDefaultToken(TokenType.OperatorBang), + Helpers.GetDefaultToken(TokenType.OperatorMinus), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new UnaryExpression( + Operator.NumberPromotion, + new UnaryExpression( + Operator.ArithmeticNegation, + new UnaryExpression( + Operator.LogicalNegation, + new UnaryExpression( + Operator.ArithmeticNegation, + new UnaryExpression( + Operator.NumberPromotion, + new IdentifierExpression("a")))))) + }; + // unary + higher than * + yield return new object[] + { + new[] + { + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorAsterisk), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new UnaryExpression( + Operator.NumberPromotion, + new IdentifierExpression("a")), + Operator.Multiplication, + new IdentifierExpression("b")) + }; + // same priority * / % + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorAsterisk), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorSlash), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.OperatorPercent), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.OperatorSlash), + new Token(TokenType.Identifier, "e"), + Helpers.GetDefaultToken(TokenType.OperatorAsterisk), + new Token(TokenType.Identifier, "f"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Multiplication, + new IdentifierExpression("b")), + Operator.Division, + new IdentifierExpression("c")), + Operator.Remainder, + new IdentifierExpression("d")), + Operator.Division, + new IdentifierExpression("e")), + Operator.Multiplication, + new IdentifierExpression("f")) + }; + // * higher than binary + + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorAsterisk), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Addition, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Multiplication, + new IdentifierExpression("c"))) + }; + // same priority binary + binary - + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorMinus), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Addition, + new IdentifierExpression("b")), + Operator.Subtraction, + new IdentifierExpression("c")), + Operator.Addition, + new IdentifierExpression("d")) + }; + // binary + higher than .. + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorDotDot), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorPlus), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Concatenation, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Addition, + new IdentifierExpression("c"))) + }; + // .. higher than < + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorLess), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorDotDot), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.LessThanComparison, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Concatenation, + new IdentifierExpression("c"))) + }; + // same priority < <= > >= == != + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorLess), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorLessEquals), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.OperatorGreater), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.OperatorGreaterEquals), + new Token(TokenType.Identifier, "e"), + Helpers.GetDefaultToken(TokenType.OperatorEqualsEquals), + new Token(TokenType.Identifier, "f"), + Helpers.GetDefaultToken(TokenType.OperatorBangEquals), + new Token(TokenType.Identifier, "g"), + Helpers.GetDefaultToken(TokenType.OperatorEqualsEquals), + new Token(TokenType.Identifier, "h"), + Helpers.GetDefaultToken(TokenType.OperatorGreaterEquals), + new Token(TokenType.Identifier, "i"), + Helpers.GetDefaultToken(TokenType.OperatorGreater), + new Token(TokenType.Identifier, "j"), + Helpers.GetDefaultToken(TokenType.OperatorLessEquals), + new Token(TokenType.Identifier, "k"), + Helpers.GetDefaultToken(TokenType.OperatorLess), + new Token(TokenType.Identifier, "l"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + Operator.LessThanComparison, + new IdentifierExpression("b")), + Operator.LessOrEqualComparison, + new IdentifierExpression("c")), + Operator.GreaterThanComparison, + new IdentifierExpression("d")), + Operator.GreaterOrEqualComparison, + new IdentifierExpression("e")), + Operator.EqualComparison, + new IdentifierExpression("f")), + Operator.NotEqualComparison, + new IdentifierExpression("g")), + Operator.EqualComparison, + new IdentifierExpression("h")), + Operator.GreaterOrEqualComparison, + new IdentifierExpression("i")), + Operator.GreaterThanComparison, + new IdentifierExpression("j")), + Operator.LessOrEqualComparison, + new IdentifierExpression("k")), + Operator.LessThanComparison, + new IdentifierExpression("l")) + }; + // < higher than is + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorLess), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + Operator.LessThanComparison, + new IdentifierExpression("b") + ), + Operator.EqualTypeCheck, + new TypeExpression(DataType.Integer)) + }; + // < higher than is not + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorLess), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordNot), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new BinaryExpression( + new IdentifierExpression("a"), + Operator.LessThanComparison, + new IdentifierExpression("b") + ), + Operator.NotEqualTypeCheck, + new TypeExpression(DataType.Integer)) + }; + // is higher than && + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorAndAnd), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Conjunction, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.EqualTypeCheck, + new TypeExpression(DataType.Integer))) + }; + // is not higher than && + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorAndAnd), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.KeywordIs), + Helpers.GetDefaultToken(TokenType.KeywordNot), + Helpers.GetDefaultToken(TokenType.KeywordInt), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Conjunction, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.NotEqualTypeCheck, + new TypeExpression(DataType.Integer))) + }; + // && higher than || + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorOrOr), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorAndAnd), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Disjunction, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Conjunction, + new IdentifierExpression("c"))) + }; + // || higher than ?> + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorQueryGreater), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorOrOr), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.NullSafePipe, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.Disjunction, + new IdentifierExpression("c"))) + }; + // ?> higher than ?? + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorQueryQuery), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorQueryGreater), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.NullCoalescing, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.NullSafePipe, + new IdentifierExpression("c"))) + }; + // ?? higher than = + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorEquals), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorQueryQuery), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Assignment, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.NullCoalescing, + new IdentifierExpression("c"))) + }; + // same priority = += -= *= /= %= + yield return new object[] + { + new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorEquals), + new Token(TokenType.Identifier, "b"), + Helpers.GetDefaultToken(TokenType.OperatorPlusEquals), + new Token(TokenType.Identifier, "c"), + Helpers.GetDefaultToken(TokenType.OperatorMinusEquals), + new Token(TokenType.Identifier, "d"), + Helpers.GetDefaultToken(TokenType.OperatorAsteriskEquals), + new Token(TokenType.Identifier, "e"), + Helpers.GetDefaultToken(TokenType.OperatorSlashEquals), + new Token(TokenType.Identifier, "f"), + Helpers.GetDefaultToken(TokenType.OperatorPercentEquals), + new Token(TokenType.Identifier, "g"), + Helpers.GetDefaultToken(TokenType.OperatorSlashEquals), + new Token(TokenType.Identifier, "h"), + Helpers.GetDefaultToken(TokenType.OperatorAsteriskEquals), + new Token(TokenType.Identifier, "i"), + Helpers.GetDefaultToken(TokenType.OperatorMinusEquals), + new Token(TokenType.Identifier, "j"), + Helpers.GetDefaultToken(TokenType.OperatorPlusEquals), + new Token(TokenType.Identifier, "k"), + Helpers.GetDefaultToken(TokenType.OperatorEquals), + new Token(TokenType.Identifier, "l"), + Helpers.GetDefaultToken(TokenType.Semicolon) + }, + new BinaryExpression( + new IdentifierExpression("a"), + Operator.Assignment, + new BinaryExpression( + new IdentifierExpression("b"), + Operator.AdditionAssignment, + new BinaryExpression( + new IdentifierExpression("c"), + Operator.SubtractionAssignment, + new BinaryExpression( + new IdentifierExpression("d"), + Operator.MultiplicationAssignment, + new BinaryExpression( + new IdentifierExpression("e"), + Operator.DivisionAssignment, + new BinaryExpression( + new IdentifierExpression("f"), + Operator.RemainderAssignment, + new BinaryExpression( + new IdentifierExpression("g"), + Operator.DivisionAssignment, + new BinaryExpression( + new IdentifierExpression("h"), + Operator.MultiplicationAssignment, + new BinaryExpression( + new IdentifierExpression("i"), + Operator.SubtractionAssignment, + new BinaryExpression( + new IdentifierExpression("j"), + Operator.AdditionAssignment, + new BinaryExpression( + new IdentifierExpression("k"), + Operator.Assignment, + new IdentifierExpression("l")))))))))))) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/ParenthesizedExpressionMissingParenthesesTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/ParenthesizedExpressionMissingParenthesesTestData.cs new file mode 100644 index 0000000..27f423d --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/ParenthesizedExpressionMissingParenthesesTestData.cs @@ -0,0 +1,65 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class ParenthesizedExpressionMissingParenthesesTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var ifToken = Helpers.GetDefaultToken(TokenType.KeywordIf); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + // left parenthesis + yield return new object[] + { + new[] + { + ifToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ConditionalExpression(new ConditionalElement(new IdentifierExpression("a"), + new IdentifierExpression("b")), new List()), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis) + }; + // right parenthesis + yield return new object[] + { + new[] + { + ifToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ConditionalExpression(new ConditionalElement(new IdentifierExpression("a"), + new IdentifierExpression("b")), new List()), + new UnexpectedToken(new Position(3, 1, 3), TokenType.Identifier, TokenType.RightParenthesis) + }; + // both parentheses + yield return new object[] + { + new[] + { + ifToken, + new(TokenType.Identifier, "a"), + new(TokenType.Identifier, "b"), + semicolonToken + }, + new ConditionalExpression(new ConditionalElement(new IdentifierExpression("a"), + new IdentifierExpression("b")), new List()), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis), + new UnexpectedToken(new Position(2, 1, 2), TokenType.Identifier, TokenType.RightParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingBranchesMissingConsequent.cs b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingBranchesMissingConsequent.cs new file mode 100644 index 0000000..76e7364 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingBranchesMissingConsequent.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class PatternMatchingBranchesMissingConsequent : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var matchToken = Helpers.GetDefaultToken(TokenType.KeywordMatch); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var colonToken = Helpers.GetDefaultToken(TokenType.Colon); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var defaultToken = Helpers.GetDefaultToken(TokenType.KeywordDefault); + // with non-default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + colonToken, + semicolonToken, + rightBrace, + semicolonToken + }, + new ExpectedExpression(new Position(7, 1, 7), TokenType.Semicolon) + }; + // with default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + defaultToken, + colonToken, + semicolonToken, + rightBrace, + semicolonToken + }, + new ExpectedExpression(new Position(7, 1, 7), TokenType.Semicolon) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionMissingParenthesesTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionMissingParenthesesTestData.cs new file mode 100644 index 0000000..0b2b735 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionMissingParenthesesTestData.cs @@ -0,0 +1,67 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class PatternMatchingExpressionMissingParenthesesTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var matchToken = Helpers.GetDefaultToken(TokenType.KeywordMatch); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // missing left parenthesis + yield return new object[] + { + new[] + { + matchToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), new List()), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis) + }; + // missing right parenthesis + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + leftBrace, + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), new List()), + new UnexpectedToken(new Position(3, 1, 3), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + // missing both parentheses + yield return new object[] + { + new[] + { + matchToken, + new(TokenType.Identifier, "a"), + leftBrace, + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), new List()), + new UnexpectedToken(new Position(1, 1, 1), TokenType.Identifier, TokenType.LeftParenthesis), + new UnexpectedToken(new Position(2, 1, 2), TokenType.LeftBrace, TokenType.RightParenthesis) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionTestData.cs new file mode 100644 index 0000000..2daab19 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingExpressionTestData.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class PatternMatchingExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var matchToken = Helpers.GetDefaultToken(TokenType.KeywordMatch); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var colonToken = Helpers.GetDefaultToken(TokenType.Colon); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var defaultToken = Helpers.GetDefaultToken(TokenType.KeywordDefault); + // basic + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + rightBrace, + semicolonToken + }, + new IdentifierExpression("a"), + Array.Empty(), + (null as Expression)! + }; + // with non-default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + semicolonToken, + rightBrace, + semicolonToken + }, + new IdentifierExpression("a"), + new[] + { + new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")) + }, + (null as Expression)! + }; + // with default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + defaultToken, + colonToken, + new(TokenType.Identifier, "b"), + semicolonToken, + rightBrace, + semicolonToken + }, + new IdentifierExpression("a"), + Array.Empty(), + new IdentifierExpression("b") + }; + // with more than one non-default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + semicolonToken, + new(TokenType.Identifier, "d"), + colonToken, + new(TokenType.Identifier, "e"), + semicolonToken, + rightBrace, + semicolonToken + }, + new IdentifierExpression("a"), + new[] + { + new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")), + new PatternMatchingBranch(new IdentifierExpression("d"), new IdentifierExpression("e")) + }, + (null as Expression)! + }; + // with both non-default and default branch + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + semicolonToken, + defaultToken, + colonToken, + new(TokenType.Identifier, "d"), + semicolonToken, + rightBrace, + semicolonToken + }, + new IdentifierExpression("a"), + new[] + { + new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")) + }, + new IdentifierExpression("d") + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingSpecificationMissingColonOrSemicolonTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingSpecificationMissingColonOrSemicolonTestData.cs new file mode 100644 index 0000000..6732d80 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/PatternMatchingSpecificationMissingColonOrSemicolonTestData.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class PatternMatchingSpecificationMissingColonOrSemicolonTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var matchToken = Helpers.GetDefaultToken(TokenType.KeywordMatch); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var leftBrace = Helpers.GetDefaultToken(TokenType.LeftBrace); + var rightBrace = Helpers.GetDefaultToken(TokenType.RightBrace); + var colonToken = Helpers.GetDefaultToken(TokenType.Colon); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var defaultToken = Helpers.GetDefaultToken(TokenType.KeywordDefault); + // non-default branch, missing colon + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + new(TokenType.Identifier, "c"), + semicolonToken, + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), + new List { new(new IdentifierExpression("b"), new IdentifierExpression("c")) }), + new UnexpectedToken(new Position(6, 1, 6), TokenType.Identifier, TokenType.Colon) + }; + // non-default branch, missing semicolon + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + new(TokenType.Identifier, "b"), + colonToken, + new(TokenType.Identifier, "c"), + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), + new List { new(new IdentifierExpression("b"), new IdentifierExpression("c")) }), + new UnexpectedToken(new Position(8, 1, 8), TokenType.Identifier, TokenType.Semicolon) + }; + // default branch, missing colon + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + defaultToken, + new(TokenType.Identifier, "b"), + semicolonToken, + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), new List(), + new IdentifierExpression("b")), + new UnexpectedToken(new Position(6, 1, 6), TokenType.Identifier, TokenType.Colon) + }; + // default branch, missing semicolon + yield return new object[] + { + new[] + { + matchToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + leftBrace, + defaultToken, + colonToken, + new(TokenType.Identifier, "b"), + rightBrace, + semicolonToken + }, + new PatternMatchingExpression(new IdentifierExpression("a"), new List(), + new IdentifierExpression("b")), + new UnexpectedToken(new Position(8, 1, 8), TokenType.Identifier, TokenType.Semicolon) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementMissingVariableTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementMissingVariableTestData.cs new file mode 100644 index 0000000..2bde93c --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementMissingVariableTestData.cs @@ -0,0 +1,70 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class VariableInitializationListStatementMissingVariableTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var initToken = Helpers.GetDefaultToken(TokenType.KeywordInit); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var constToken = Helpers.GetDefaultToken(TokenType.KeywordConst); + var assignmentToken = Helpers.GetDefaultToken(TokenType.OperatorEquals); + var commaToken = Helpers.GetDefaultToken(TokenType.Comma); + // basic + yield return new object[] + { + new[] + { + initToken, + semicolonToken + }, + new UnexpectedToken(new Position(1, 1, 1), TokenType.Semicolon, TokenType.Identifier) + }; + // after basic + yield return new object[] + { + new[] + { + initToken, + new(TokenType.Identifier, "a"), + commaToken, + semicolonToken + }, + new UnexpectedToken(new Position(3, 1, 3), TokenType.Semicolon, TokenType.Identifier) + }; + // after const + yield return new object[] + { + new[] + { + initToken, + constToken, + new(TokenType.Identifier, "a"), + commaToken, + semicolonToken + }, + new UnexpectedToken(new Position(4, 1, 4), TokenType.Semicolon, TokenType.Identifier) + }; + // after initialization + yield return new object[] + { + new[] + { + initToken, + new(TokenType.Identifier, "a"), + assignmentToken, + new Token(TokenType.LiteralInteger, 5ul), + commaToken, + semicolonToken + }, + new UnexpectedToken(new Position(5, 1, 5), TokenType.Semicolon, TokenType.Identifier) + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementTestData.cs new file mode 100644 index 0000000..a209b39 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/VariableInitializationListStatementTestData.cs @@ -0,0 +1,119 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class VariableInitializationListStatementTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var initToken = Helpers.GetDefaultToken(TokenType.KeywordInit); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + var constToken = Helpers.GetDefaultToken(TokenType.KeywordConst); + var assignmentToken = Helpers.GetDefaultToken(TokenType.OperatorEquals); + var commaToken = Helpers.GetDefaultToken(TokenType.Comma); + // basic + yield return new object[] + { + new[] + { + initToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new VariableInitialization[] + { + new("a") + } + }; + // with const + yield return new object[] + { + new[] + { + initToken, + constToken, + new(TokenType.Identifier, "a"), + semicolonToken + }, + new VariableInitialization[] + { + new(IsConst: true, Name: "a") + } + }; + // with initialization + yield return new object[] + { + new[] + { + initToken, + new(TokenType.Identifier, "a"), + assignmentToken, + new Token(TokenType.LiteralInteger, 5ul), + semicolonToken + }, + new VariableInitialization[] + { + new("a", new LiteralExpression(DataType.Integer, 5ul)) + } + }; + // with const and initialization + yield return new object[] + { + new[] + { + initToken, + constToken, + new(TokenType.Identifier, "a"), + assignmentToken, + new Token(TokenType.LiteralInteger, 5ul), + semicolonToken + }, + new VariableInitialization[] + { + new(IsConst: true, Name: "a", InitialValue: new LiteralExpression(DataType.Integer, 5ul)) + } + }; + // more than one + yield return new object[] + { + new[] + { + initToken, + new(TokenType.Identifier, "a"), + commaToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new VariableInitialization[] + { + new("a"), + new("b") + } + }; + // more than one, with const and initialization + yield return new object[] + { + new[] + { + initToken, + constToken, + new(TokenType.Identifier, "a"), + assignmentToken, + new Token(TokenType.LiteralInteger, 5ul), + commaToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new VariableInitialization[] + { + new(IsConst: true, Name: "a", InitialValue: new LiteralExpression(DataType.Integer, 5ul)), + new("b") + } + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/Generators/WhileLoopExpressionTestData.cs b/Toffee.Tests/SyntacticAnalysis/Generators/WhileLoopExpressionTestData.cs new file mode 100644 index 0000000..17f0cea --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/Generators/WhileLoopExpressionTestData.cs @@ -0,0 +1,34 @@ +using System.Collections; +using System.Collections.Generic; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis.Generators; + +public class WhileLoopExpressionTestData : IEnumerable +{ + public IEnumerator GetEnumerator() + { + var whileToken = Helpers.GetDefaultToken(TokenType.KeywordWhile); + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + var semicolonToken = Helpers.GetDefaultToken(TokenType.Semicolon); + // basic + yield return new object[] + { + new[] + { + whileToken, + leftParenthesisToken, + new(TokenType.Identifier, "a"), + rightParenthesisToken, + new(TokenType.Identifier, "b"), + semicolonToken + }, + new IdentifierExpression("a"), + new IdentifierExpression("b") + }; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); +} diff --git a/Toffee.Tests/SyntacticAnalysis/ParserTests.cs b/Toffee.Tests/SyntacticAnalysis/Helpers.cs similarity index 85% rename from Toffee.Tests/SyntacticAnalysis/ParserTests.cs rename to Toffee.Tests/SyntacticAnalysis/Helpers.cs index dcb9363..a15f86d 100644 --- a/Toffee.Tests/SyntacticAnalysis/ParserTests.cs +++ b/Toffee.Tests/SyntacticAnalysis/Helpers.cs @@ -1,17 +1,22 @@ using System; +using System.Collections.Generic; +using System.Linq; using FluentAssertions.Equivalency; using Toffee.LexicalAnalysis; namespace Toffee.Tests.SyntacticAnalysis; -public partial class ParserTests +public static class Helpers { - private static EquivalencyAssertionOptions ProvideOptions(EquivalencyAssertionOptions options) => + public static EquivalencyAssertionOptions ProvideOptions(EquivalencyAssertionOptions options) => options.RespectingRuntimeTypes() .AllowingInfiniteRecursion() - .Excluding(info => info.Name == "Position"); + .Excluding(info => info.Name == "StartPosition" || info.Name == "EndPosition"); - private static Token GetDefaultToken(TokenType type) => new(type, MapTokenTypeToContent(type)); + public static Token GetDefaultToken(TokenType type) => new(type, MapTokenTypeToContent(type)); + + public static Token[] AppendSemicolon(this IEnumerable tokenSequence) => + tokenSequence.Append(GetDefaultToken(TokenType.Semicolon)).ToArray(); private static string MapTokenTypeToContent(TokenType type) { diff --git a/Toffee.Tests/SyntacticAnalysis/LexerMock.cs b/Toffee.Tests/SyntacticAnalysis/LexerMock.cs index 69cd8fa..60c073a 100644 --- a/Toffee.Tests/SyntacticAnalysis/LexerMock.cs +++ b/Toffee.Tests/SyntacticAnalysis/LexerMock.cs @@ -14,10 +14,9 @@ public class LexerMock : ILexer public int MaxLexemeLength { get => int.MaxValue; - set => throw new NotImplementedException(); + set => throw new NotSupportedException(); } - public bool HadError => false; public LexerError? CurrentError => null; public Token CurrentToken => (_currentOutputIndex < _outputBuffer.Length @@ -36,6 +35,4 @@ public Token Advance() _currentOutputIndex++; return supersededToken; } - - public void ResetError() => throw new NotImplementedException(); } diff --git a/Toffee.Tests/SyntacticAnalysis/ParserErrorHandlerMock.cs b/Toffee.Tests/SyntacticAnalysis/ParserErrorHandlerMock.cs new file mode 100644 index 0000000..c9608c8 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/ParserErrorHandlerMock.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using Toffee.ErrorHandling; +using Toffee.SyntacticAnalysis; + +namespace Toffee.Tests.SyntacticAnalysis; + +public class ParserErrorHandlerMock : IParserErrorHandler +{ + public List HandledErrors = new(); + public List HandledWarnings = new(); + + public bool HadErrors => HandledErrors.Count > 0; + public bool HadWarnings => HandledWarnings.Count > 0; + + public void Handle(ParserError lexerError) => HandledErrors.Add(lexerError); + public void Handle(ParserWarning lexerWarning) => HandledWarnings.Add(lexerWarning); +} diff --git a/Toffee.Tests/SyntacticAnalysis/ParserTests.Expressions.cs b/Toffee.Tests/SyntacticAnalysis/ParserTests.Expressions.cs deleted file mode 100644 index 396aa92..0000000 --- a/Toffee.Tests/SyntacticAnalysis/ParserTests.Expressions.cs +++ /dev/null @@ -1,1927 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using FluentAssertions; -using Toffee.LexicalAnalysis; -using Toffee.SyntacticAnalysis; -using Xunit; - -namespace Toffee.Tests.SyntacticAnalysis; - -public partial class ParserTests -{ - [Trait("Category", "Literal expressions")] - [Theory] - [InlineData(TokenType.LiteralInteger, 1234L, DataType.Integer, 1234L)] - [InlineData(TokenType.LiteralFloat, 3.14, DataType.Float, 3.14)] - [InlineData(TokenType.LiteralString, "abcd", DataType.String, "abcd")] - [InlineData(TokenType.KeywordTrue, "true", DataType.Bool, true)] - [InlineData(TokenType.KeywordFalse, "false", DataType.Bool, false)] - [InlineData(TokenType.KeywordNull, null, DataType.Null, null)] - public void LiteralExpressionsShouldBeParsedCorrectly(TokenType literalTokenType, object? literalTokenContent, DataType literalType, object? literalValue) - { - var literalToken = new Token(literalTokenType, literalTokenContent); - - var lexerMock = new LexerMock(literalToken); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression!.Type.Should().Be(literalType); - expression.Value.Should().Be(literalValue); - } - - [Trait("Category", "Identifier expressions")] - [Fact] - public void IdentifierExpressionsShouldBeParsedCorrectly() - { - const string identifierName = "ident"; - var identifierToken = new Token(TokenType.Identifier, identifierName); - - var lexerMock = new LexerMock(identifierToken); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression!.Name.Should().Be(identifierName); - } - - [Trait("Category", "Type expressions")] - [Theory] - [InlineData(TokenType.KeywordInt, DataType.Integer)] - [InlineData(TokenType.KeywordFloat, DataType.Float)] - [InlineData(TokenType.KeywordString, DataType.String)] - [InlineData(TokenType.KeywordBool, DataType.Bool)] - [InlineData(TokenType.KeywordFunction, DataType.Function)] - [InlineData(TokenType.KeywordNull, DataType.Null)] - public void TypeExpressionsShouldBeParsedCorrectly(TokenType typeTokenType, DataType type) - { - var tokenSequence = new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(typeTokenType) - }; - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var binaryExpression = expressionStatement.Expression.As(); - binaryExpression.Should().NotBeNull(); - - var typeExpression = binaryExpression.Right.As(); - typeExpression.Should().NotBeNull(); - typeExpression!.Type.Should().Be(type); - } - - [Trait("Category", "Binary expressions")] - [Theory] - [InlineData(TokenType.OperatorDot, Operator.NamespaceAccess)] - [InlineData(TokenType.OperatorCaret, Operator.Exponentiation)] - [InlineData(TokenType.OperatorPlus, Operator.Addition)] - [InlineData(TokenType.OperatorMinus, Operator.Subtraction)] - [InlineData(TokenType.OperatorAsterisk, Operator.Multiplication)] - [InlineData(TokenType.OperatorSlash, Operator.Division)] - [InlineData(TokenType.OperatorPercent, Operator.Remainder)] - [InlineData(TokenType.OperatorDotDot, Operator.Concatenation)] - [InlineData(TokenType.OperatorLess, Operator.LessThanComparison)] - [InlineData(TokenType.OperatorLessEquals, Operator.LessOrEqualComparison)] - [InlineData(TokenType.OperatorGreater, Operator.GreaterThanComparison)] - [InlineData(TokenType.OperatorGreaterEquals, Operator.GreaterOrEqualComparison)] - [InlineData(TokenType.OperatorEqualsEquals, Operator.EqualComparison)] - [InlineData(TokenType.OperatorBangEquals, Operator.NotEqualComparison)] - [InlineData(TokenType.OperatorAndAnd, Operator.Conjunction)] - [InlineData(TokenType.OperatorOrOr, Operator.Disjunction)] - [InlineData(TokenType.OperatorQueryQuery, Operator.NullCoalescing)] - [InlineData(TokenType.OperatorQueryGreater, Operator.NullSafePipe)] - [InlineData(TokenType.OperatorEquals, Operator.Assignment)] - [InlineData(TokenType.OperatorPlusEquals, Operator.AdditionAssignment)] - [InlineData(TokenType.OperatorMinusEquals, Operator.SubtractionAssignment)] - [InlineData(TokenType.OperatorAsteriskEquals, Operator.MultiplicationAssignment)] - [InlineData(TokenType.OperatorSlashEquals, Operator.DivisionAssignment)] - [InlineData(TokenType.OperatorPercentEquals, Operator.RemainderAssignment)] - public void BinaryExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) - { - const string leftIdentifierName = "a"; - var leftToken = new Token(TokenType.Identifier, leftIdentifierName); - var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); - - var opToken = GetDefaultToken(operatorTokenType); - - const string rightIdentifierName = "b"; - var rightToken = new Token(TokenType.Identifier, rightIdentifierName); - var expectedRightExpression = new IdentifierExpression(rightIdentifierName); - - var lexerMock = new LexerMock(leftToken, opToken, rightToken); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression!.Left.Should().BeEquivalentTo(expectedLeftExpression, ProvideOptions); - expression.Operator.Should().Be(expectedOperator); - expression.Right.Should().BeEquivalentTo(expectedRightExpression, ProvideOptions); - } - - [Trait("Category", "Unary expressions")] - [Theory] - [InlineData(TokenType.OperatorPlus, Operator.NumberPromotion)] - [InlineData(TokenType.OperatorMinus, Operator.ArithmeticNegation)] - [InlineData(TokenType.OperatorBang, Operator.LogicalNegation)] - public void UnaryExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) - { - var opToken = GetDefaultToken(operatorTokenType); - - const string identifierName = "a"; - var token = new Token(TokenType.Identifier, identifierName); - var expectedExpression = new IdentifierExpression(identifierName); - - var lexerMock = new LexerMock(opToken, token); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression!.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - expression.Operator.Should().Be(expectedOperator); - } - - [Trait("Category", "Binary expressions")] - [Theory] - [InlineData(new[] { TokenType.KeywordIs }, TokenType.KeywordInt, Operator.EqualTypeCheck, DataType.Integer)] - [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot }, TokenType.KeywordNull, Operator.NotEqualTypeCheck, DataType.Null)] - public void TypeCheckingBinaryExpressionsShouldBeParsedCorrectly(TokenType[] operatorTokenTypes, TokenType typeTokenType, Operator expectedOperator, DataType expectedType) - { - const string leftIdentifierName = "a"; - var leftToken = new Token(TokenType.Identifier, leftIdentifierName); - var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); - - var opTokens = operatorTokenTypes.Select(GetDefaultToken).ToArray(); - - var rightToken = GetDefaultToken(typeTokenType); - var expectedRightExpression = new TypeExpression(expectedType); - - var lexerMock = new LexerMock(opTokens.Prepend(leftToken).Append(rightToken).ToArray()); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression!.Left.Should().BeEquivalentTo(expectedLeftExpression, ProvideOptions); - expression.Operator.Should().Be(expectedOperator); - expression.Right.Should().BeEquivalentTo(expectedRightExpression, ProvideOptions); - } - - [Trait("Category", "Block expressions")] - [Theory] - [MemberData(nameof(GenerateBlockExpressionTestData))] - public void BlockExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Statement[] expectedStatementList, Statement? expectedUnterminatedStatement) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var blockExpression = expressionStatement.Expression.As(); - blockExpression.Should().NotBeNull(); - blockExpression!.Statements.ToArray().Should().BeEquivalentTo(expectedStatementList, ProvideOptions); - blockExpression.UnterminatedStatement.Should().BeEquivalentTo(expectedUnterminatedStatement, ProvideOptions); - } - - [Trait("Category", "Conditional expressions")] - [Theory] - [MemberData(nameof(GenerateConditionalExpressionTestData))] - public void ConditionalExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, ConditionalElement expectedIfPart, ConditionalElement[] expectedElifParts, Expression? expectedElsePart) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var conditionalExpression = expressionStatement.Expression.As(); - conditionalExpression.Should().NotBeNull(); - conditionalExpression.IfPart.Should().BeEquivalentTo(expectedIfPart, ProvideOptions); - conditionalExpression.ElifParts.ToArray().Should().BeEquivalentTo(expectedElifParts, ProvideOptions); - conditionalExpression.ElsePart.Should().BeEquivalentTo(expectedElsePart, ProvideOptions); - } - - [Trait("Category", "For loop expressions")] - [Theory] - [MemberData(nameof(GenerateForLoopExpressionTestData))] - public void ForLoopExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, string? expectedCounterName, ForLoopRange expectedRange, Expression expectedBody) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var forLoopExpression = expressionStatement.Expression.As(); - forLoopExpression.Should().NotBeNull(); - forLoopExpression.CounterName.Should().Be(expectedCounterName); - forLoopExpression.Range.Should().BeEquivalentTo(expectedRange, ProvideOptions); - forLoopExpression.Body.Should().BeEquivalentTo(expectedBody, ProvideOptions); - } - - [Trait("Category", "While loop expressions")] - [Theory] - [MemberData(nameof(GenerateWhileLoopExpressionTestData))] - public void WhileLoopExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedCondition, Expression expectedBody) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var whileLoopExpression = expressionStatement.Expression.As(); - whileLoopExpression.Should().NotBeNull(); - whileLoopExpression.Condition.Should().BeEquivalentTo(expectedCondition, ProvideOptions); - whileLoopExpression.Body.Should().BeEquivalentTo(expectedBody, ProvideOptions); - } - - [Trait("Category", "Function definition expressions")] - [Theory] - [MemberData(nameof(GenerateFunctionDefinitionExpressionTestData))] - public void FunctionDefinitionExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, FunctionParameter[] expectedParameters, BlockExpression expectedBody) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var functionDefinitionExpression = expressionStatement.Expression.As(); - functionDefinitionExpression.Should().NotBeNull(); - functionDefinitionExpression.Parameters.ToArray().Should().BeEquivalentTo(expectedParameters, ProvideOptions); - functionDefinitionExpression.Body.Should().BeEquivalentTo(expectedBody, ProvideOptions); - } - - [Trait("Category", "Pattern matching expressions")] - [Theory] - [MemberData(nameof(GeneratePatternMatchingExpressionTestData))] - public void PatternMatchingExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedArgument, PatternMatchingBranch[] expectedBranches, Expression? expectedDefault) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var patternMatchingExpression = expressionStatement.Expression.As(); - patternMatchingExpression.Should().NotBeNull(); - patternMatchingExpression.Argument.Should().BeEquivalentTo(expectedArgument, ProvideOptions); - patternMatchingExpression.Branches.ToArray().Should().BeEquivalentTo(expectedBranches, ProvideOptions); - patternMatchingExpression.Default.Should().BeEquivalentTo(expectedDefault, ProvideOptions); - } - - [Trait("Category", "Function call expressions")] - [Theory] - [MemberData(nameof(GenerateFunctionCallExpressionTestData))] - public void FunctionCallExpressionsShouldBeParsedCorrectly(Token[] tokenSequence, Expression expectedCalledExpression, Expression[] expectedArguments) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var functionCallExpression = expressionStatement.Expression.As(); - functionCallExpression.Should().NotBeNull(); - functionCallExpression.Expression.Should().BeEquivalentTo(expectedCalledExpression, ProvideOptions); - functionCallExpression.Arguments.ToArray().Should().BeEquivalentTo(expectedArguments, ProvideOptions); - } - - [Trait("Category", "Binary expressions")] - [Trait("Category", "Pattern matching expressions")] - [Theory] - [InlineData(TokenType.KeywordOr, Operator.PatternMatchingDisjunction)] - [InlineData(TokenType.KeywordAnd, Operator.PatternMatchingConjunction)] - public void BinaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) - { - const string leftIdentifierName = "b"; - const string rightIdentifierName = "c"; - - var tokenSequence = new[] - { - GetDefaultToken(TokenType.KeywordMatch), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.LeftBrace), - new Token(TokenType.Identifier, leftIdentifierName), - GetDefaultToken(operatorTokenType), - new Token(TokenType.Identifier, rightIdentifierName), - GetDefaultToken(TokenType.Colon), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.Semicolon), - GetDefaultToken(TokenType.RightBrace) - }; - - var expectedLeftExpression = new IdentifierExpression(leftIdentifierName); - var expectedRightExpression = new IdentifierExpression(rightIdentifierName); - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var patternMatchingExpression = expressionStatement.Expression.As(); - patternMatchingExpression.Should().NotBeNull(); - patternMatchingExpression.Branches.Should().HaveCount(1); - - var binaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); - binaryExpression.Should().NotBeNull(); - binaryExpression!.Left.Should().BeEquivalentTo(expectedLeftExpression, ProvideOptions); - binaryExpression.Operator.Should().Be(expectedOperator); - binaryExpression.Right.Should().BeEquivalentTo(expectedRightExpression, ProvideOptions); - } - - [Trait("Category", "Unary expressions")] - [Trait("Category", "Pattern matching expressions")] - [Theory] - [InlineData(TokenType.OperatorLess, Operator.PatternMatchingLessThanComparison)] - [InlineData(TokenType.OperatorLessEquals, Operator.PatternMatchingLessOrEqualComparison)] - [InlineData(TokenType.OperatorGreater, Operator.PatternMatchingGreaterThanComparison)] - [InlineData(TokenType.OperatorGreaterEquals, Operator.PatternMatchingGreaterOrEqualComparison)] - [InlineData(TokenType.OperatorEqualsEquals, Operator.PatternMatchingEqualComparison)] - [InlineData(TokenType.OperatorBangEquals, Operator.PatternMatchingNotEqualComparison)] - public void UnaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType operatorTokenType, Operator expectedOperator) - { - var tokenSequence = new[] - { - GetDefaultToken(TokenType.KeywordMatch), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.LeftBrace), - GetDefaultToken(operatorTokenType), - new Token(TokenType.LiteralInteger, 5L), - GetDefaultToken(TokenType.Colon), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.Semicolon), - GetDefaultToken(TokenType.RightBrace) - }; - - var expectedExpression = new LiteralExpression(DataType.Integer, 5L); - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var patternMatchingExpression = expressionStatement.Expression.As(); - patternMatchingExpression.Should().NotBeNull(); - patternMatchingExpression.Branches.Should().HaveCount(1); - - var unaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); - unaryExpression.Should().NotBeNull(); - unaryExpression.Operator.Should().Be(expectedOperator); - unaryExpression.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - } - - [Trait("Category", "Unary expressions")] - [Trait("Category", "Pattern matching expressions")] - [Theory] - [InlineData(new[] { TokenType.KeywordIs }, TokenType.KeywordInt, Operator.PatternMatchingEqualTypeCheck, DataType.Integer)] - [InlineData(new[] { TokenType.KeywordIs, TokenType.KeywordNot }, TokenType.KeywordNull, Operator.PatternMatchingNotEqualTypeCheck, DataType.Null)] - public void TypeCheckingUnaryPatternMatchingExpressionsShouldBeParsedCorrectly(TokenType[] operatorTokenTypes, TokenType typeTokenType, Operator expectedOperator, DataType expectedType) - { - var tokenSequence = new[] - { - GetDefaultToken(TokenType.KeywordMatch), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.LeftBrace) - }.Concat(operatorTokenTypes.Select(GetDefaultToken)).Concat(new[] - { - GetDefaultToken(typeTokenType), - GetDefaultToken(TokenType.Colon), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.Semicolon), - GetDefaultToken(TokenType.RightBrace) - }).ToArray(); - - var expectedExpression = new TypeExpression(expectedType); - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var patternMatchingExpression = expressionStatement.Expression.As(); - patternMatchingExpression.Should().NotBeNull(); - patternMatchingExpression.Branches.Should().HaveCount(1); - - var unaryExpression = patternMatchingExpression.Branches[0].Pattern.As(); - unaryExpression.Should().NotBeNull(); - unaryExpression.Operator.Should().Be(expectedOperator); - unaryExpression.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - } - - [Trait("Category", "Nesting")] - [Fact] - public void NestedExpressionsShouldBeHandledCorrectly() - { - var tokenSequence = new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorEquals), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorEquals), - new Token(TokenType.LiteralInteger, 5L), - GetDefaultToken(TokenType.OperatorPlus), - GetDefaultToken(TokenType.OperatorMinus), - GetDefaultToken(TokenType.OperatorBang), - GetDefaultToken(TokenType.KeywordFalse) - }; - - var expectedTree = new BinaryExpression( - new IdentifierExpression("a"), - Operator.Assignment, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Assignment, - new BinaryExpression( - new LiteralExpression(DataType.Integer, 5L), - Operator.Addition, - new UnaryExpression( - Operator.ArithmeticNegation, - new UnaryExpression( - Operator.LogicalNegation, - new LiteralExpression(DataType.Bool, false)))))); - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - expressionStatement.Expression.Should().BeEquivalentTo(expectedTree, ProvideOptions); - } - - [Trait("Category", "Associativity")] - [Trait("Category", "Nesting")] - [Theory] - [MemberData(nameof(GenerateOperatorsAssociativityTestData))] - public void ExpressionsShouldBeParsedWithRespectToOperatorsAssociativity(Token[] tokenSequence, Expression expectedExpression) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - } - - [Trait("Category", "Priority")] - [Trait("Category", "Nesting")] - [Theory] - [MemberData(nameof(GenerateOperatorsPriorityTestData))] - public void ExpressionsShouldBeParsedWithRespectToOperatorsPriority(Token[] tokenSequence, Expression expectedExpression) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - } - - [Trait("Category", "Grouping expressions")] - [Trait("Category", "Nesting")] - [Fact] - public void GroupingExpressionsShouldBeParsedCorrectly() - { - var tokenSequence = new[] - { - GetDefaultToken(TokenType.LeftParenthesis), - GetDefaultToken(TokenType.LeftParenthesis), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.LiteralInteger, 5L), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.RightParenthesis) - }; - - var expectedExpression = new GroupingExpression(new GroupingExpression(new BinaryExpression( - new GroupingExpression(new IdentifierExpression("a")), - Operator.Addition, - new LiteralExpression(DataType.Integer, 5L)))); - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - expressionStatement.Expression.Should().BeEquivalentTo(expectedExpression, ProvideOptions); - } - - [Trait("Category", "Type cast expressions")] - [Theory] - [InlineData(TokenType.KeywordInt, DataType.Integer)] - [InlineData(TokenType.KeywordFloat, DataType.Float)] - [InlineData(TokenType.KeywordString, DataType.String)] - [InlineData(TokenType.KeywordBool, DataType.Bool)] - public void TypeCastExpressionsShouldBeParsedCorrectly(TokenType literalTokenType, DataType expectedCastType) - { - var tokenSequence = new[] - { - GetDefaultToken(literalTokenType), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis) - }; - - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - - var expression = expressionStatement.Expression.As(); - expression.Should().NotBeNull(); - expression.Type.Should().Be(expectedCastType); - } - - #region Generators - - public static IEnumerable GenerateBlockExpressionTestData() - { - var leftBraceToken = GetDefaultToken(TokenType.LeftBrace); - var rightBraceToken = GetDefaultToken(TokenType.RightBrace); - var semicolonToken = GetDefaultToken(TokenType.Semicolon); - // basic unterminated - yield return new object[] - { - new[] - { - leftBraceToken, - new(TokenType.Identifier, "a"), - rightBraceToken - }, - Array.Empty(), - new ExpressionStatement(new IdentifierExpression("a")) - }; - // basic terminated - yield return new object[] - { - new[] - { - leftBraceToken, - new(TokenType.Identifier, "a"), - semicolonToken, - rightBraceToken - }, - new Statement[] - { - new ExpressionStatement(new IdentifierExpression("a")) - { - IsTerminated = true - } - }, - (null as Statement)! - }; - // terminated and unterminated - yield return new object[] - { - new[] - { - leftBraceToken, - new(TokenType.Identifier, "a"), - semicolonToken, - new(TokenType.Identifier, "b"), - rightBraceToken - }, - new Statement[] - { - new ExpressionStatement(new IdentifierExpression("a")) - { - IsTerminated = true - } - }, - new ExpressionStatement(new IdentifierExpression("b")) - }; - // double terminated - yield return new object[] - { - new[] - { - leftBraceToken, - new(TokenType.Identifier, "a"), - semicolonToken, - new(TokenType.Identifier, "b"), - semicolonToken, - rightBraceToken - }, - new Statement[] - { - new ExpressionStatement(new IdentifierExpression("a")) - { - IsTerminated = true - }, - new ExpressionStatement(new IdentifierExpression("b")) - { - IsTerminated = true - } - }, - (null as Statement)! - }; - } - - public static IEnumerable GenerateConditionalExpressionTestData() - { - var ifToken = GetDefaultToken(TokenType.KeywordIf); - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - var elifToken = GetDefaultToken(TokenType.KeywordElif); - var elseToken = GetDefaultToken(TokenType.KeywordElse); - // basic - yield return new object[] - { - new[] - { - ifToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b") - }, - new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), - Array.Empty(), - (null as Expression)! - }; - // with else - yield return new object[] - { - new[] - { - ifToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b"), - elseToken, - new(TokenType.Identifier, "c") - }, - new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), - Array.Empty(), - new IdentifierExpression("c") - }; - // with elif - yield return new object[] - { - new[] - { - ifToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b"), - elifToken, - leftParenthesisToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken, - new(TokenType.Identifier, "d") - }, - new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), - new[] - { - new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")) - }, - (null as Expression)! - }; - // with more than one elif - yield return new object[] - { - new[] - { - ifToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b"), - elifToken, - leftParenthesisToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken, - new(TokenType.Identifier, "d"), - elifToken, - leftParenthesisToken, - new(TokenType.Identifier, "e"), - rightParenthesisToken, - new(TokenType.Identifier, "f") - }, - new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), - new[] - { - new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")), - new ConditionalElement(new IdentifierExpression("e"), new IdentifierExpression("f")) - }, - (null as Expression)! - }; - // with elif and else - yield return new object[] - { - new[] - { - ifToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b"), - elifToken, - leftParenthesisToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken, - new(TokenType.Identifier, "d"), - elseToken, - new(TokenType.Identifier, "e") - }, - new ConditionalElement(new IdentifierExpression("a"), new IdentifierExpression("b")), - new[] - { - new ConditionalElement(new IdentifierExpression("c"), new IdentifierExpression("d")) - }, - new IdentifierExpression("e") - }; - } - - public static IEnumerable GenerateForLoopExpressionTestData() - { - var forToken = GetDefaultToken(TokenType.KeywordFor); - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - var colonToken = GetDefaultToken(TokenType.Colon); - var comma = GetDefaultToken(TokenType.Comma); - // basic - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b") - }, - null!, - new ForLoopRange(new IdentifierExpression("a")), - new IdentifierExpression("b") - }; - // with start:stop range - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - colonToken, - new(TokenType.Identifier, "b"), - rightParenthesisToken, - new(TokenType.Identifier, "c") - }, - null!, - new ForLoopRange(Start: new IdentifierExpression("a"), PastTheEnd: new IdentifierExpression("b")), - new IdentifierExpression("c") - }; - // with start:stop:step range - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - colonToken, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken, - new(TokenType.Identifier, "d") - }, - null!, - new ForLoopRange(Start: new IdentifierExpression("a"), PastTheEnd: new IdentifierExpression("b"), Step: new IdentifierExpression("c")), - new IdentifierExpression("d") - }; - // with counter - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - comma, - new(TokenType.Identifier, "b"), - rightParenthesisToken, - new(TokenType.Identifier, "c") - }, - "a", - new ForLoopRange(new IdentifierExpression("b")), - new IdentifierExpression("c") - }; - // with counter and start:stop range - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - comma, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken, - new(TokenType.Identifier, "d") - }, - "a", - new ForLoopRange(Start: new IdentifierExpression("b"), PastTheEnd: new IdentifierExpression("c")), - new IdentifierExpression("d") - }; - // with counter and start:stop:step range - yield return new object[] - { - new[] - { - forToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - comma, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - colonToken, - new(TokenType.Identifier, "d"), - rightParenthesisToken, - new(TokenType.Identifier, "e") - }, - "a", - new ForLoopRange(Start: new IdentifierExpression("b"), PastTheEnd: new IdentifierExpression("c"), Step: new IdentifierExpression("d")), - new IdentifierExpression("e") - }; - } - - public static IEnumerable GenerateWhileLoopExpressionTestData() - { - var whileToken = GetDefaultToken(TokenType.KeywordWhile); - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - // basic - yield return new object[] - { - new[] - { - whileToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - new(TokenType.Identifier, "b") - }, - new IdentifierExpression("a"), - new IdentifierExpression("b") - }; - } - - public static IEnumerable GenerateFunctionDefinitionExpressionTestData() - { - var functiToken = GetDefaultToken(TokenType.KeywordFuncti); - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - var leftBrace = GetDefaultToken(TokenType.LeftBrace); - var rightBrace = GetDefaultToken(TokenType.RightBrace); - var constToken = GetDefaultToken(TokenType.KeywordConst); - var bangToken = GetDefaultToken(TokenType.OperatorBang); - var commaToken = GetDefaultToken(TokenType.Comma); - // basic - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "a"), - rightBrace - }, - Array.Empty(), - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("a"))) - }; - // with one parameter - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - rightBrace - }, - new[] - { - new FunctionParameter("a") - }, - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("b"))) - }; - // with one const parameter - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - constToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - rightBrace - }, - new[] - { - new FunctionParameter(IsConst: true, Name: "a") - }, - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("b"))) - }; - // with one required parameter - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - bangToken, - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - rightBrace - }, - new[] - { - new FunctionParameter("a", IsNullAllowed: false) - }, - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("b"))) - }; - // with more than one parameter - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - commaToken, - new(TokenType.Identifier, "b"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "c"), - rightBrace - }, - new[] - { - new FunctionParameter("a"), - new FunctionParameter("b") - }, - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("c"))) - }; - // with more than one parameter (including one const and non-nullable) - yield return new object[] - { - new[] - { - functiToken, - leftParenthesisToken, - constToken, - new(TokenType.Identifier, "a"), - bangToken, - commaToken, - new(TokenType.Identifier, "b"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "c"), - rightBrace - }, - new[] - { - new FunctionParameter(IsConst: true, Name: "a", IsNullAllowed: false), - new FunctionParameter("b") - }, - new BlockExpression(new List(), new ExpressionStatement(new IdentifierExpression("c"))) - }; - } - - public static IEnumerable GeneratePatternMatchingExpressionTestData() - { - var matchToken = GetDefaultToken(TokenType.KeywordMatch); - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - var leftBrace = GetDefaultToken(TokenType.LeftBrace); - var rightBrace = GetDefaultToken(TokenType.RightBrace); - var colonToken = GetDefaultToken(TokenType.Colon); - var semicolonToken = GetDefaultToken(TokenType.Semicolon); - var defaultToken = GetDefaultToken(TokenType.KeywordDefault); - // basic - yield return new object[] - { - new[] - { - matchToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - rightBrace - }, - new IdentifierExpression("a"), - Array.Empty(), - (null as Expression)! - }; - // with non-default branch - yield return new object[] - { - new[] - { - matchToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - semicolonToken, - rightBrace - }, - new IdentifierExpression("a"), - new[] - { - new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")) - }, - (null as Expression)! - }; - // with default branch - yield return new object[] - { - new[] - { - matchToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - defaultToken, - colonToken, - new(TokenType.Identifier, "b"), - semicolonToken, - rightBrace - }, - new IdentifierExpression("a"), - Array.Empty(), - new IdentifierExpression("b") - }; - // with more than one non-default branch - yield return new object[] - { - new[] - { - matchToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - semicolonToken, - new(TokenType.Identifier, "d"), - colonToken, - new(TokenType.Identifier, "e"), - semicolonToken, - rightBrace - }, - new IdentifierExpression("a"), - new[] - { - new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")), - new PatternMatchingBranch(new IdentifierExpression("d"), new IdentifierExpression("e")) - }, - (null as Expression)! - }; - // with both non-default and default branch - yield return new object[] - { - new[] - { - matchToken, - leftParenthesisToken, - new(TokenType.Identifier, "a"), - rightParenthesisToken, - leftBrace, - new(TokenType.Identifier, "b"), - colonToken, - new(TokenType.Identifier, "c"), - semicolonToken, - defaultToken, - colonToken, - new(TokenType.Identifier, "d"), - semicolonToken, - rightBrace - }, - new IdentifierExpression("a"), - new[] - { - new PatternMatchingBranch(new IdentifierExpression("b"), new IdentifierExpression("c")) - }, - new IdentifierExpression("d") - }; - } - - public static IEnumerable GenerateFunctionCallExpressionTestData() - { - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - var commaToken = GetDefaultToken(TokenType.Comma); - // basic - yield return new object[] - { - new[] - { - new(TokenType.Identifier, "a"), - leftParenthesisToken, - rightParenthesisToken - }, - new IdentifierExpression("a"), - Array.Empty() - }; - // with an argument - yield return new object[] - { - new[] - { - new(TokenType.Identifier, "a"), - leftParenthesisToken, - new(TokenType.Identifier, "b"), - rightParenthesisToken - }, - new IdentifierExpression("a"), - new[] - { - new IdentifierExpression("b") - } - }; - // with more than one argument - yield return new object[] - { - new[] - { - new(TokenType.Identifier, "a"), - leftParenthesisToken, - new(TokenType.Identifier, "b"), - commaToken, - new(TokenType.Identifier, "c"), - rightParenthesisToken - }, - new IdentifierExpression("a"), - new[] - { - new IdentifierExpression("b"), - new IdentifierExpression("c") - } - }; - } - - public static IEnumerable GenerateOperatorsPriorityTestData() - { - // same priority () . - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorDot), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.LeftParenthesis), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.OperatorDot), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new FunctionCallExpression(new BinaryExpression( - new IdentifierExpression("a"), - Operator.NamespaceAccess, - new IdentifierExpression("b")), new List()), - Operator.NamespaceAccess, - new IdentifierExpression("c")) - }; - // . higher than ^ - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorCaret), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorDot), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Exponentiation, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.NamespaceAccess, - new IdentifierExpression("c"))) - }; - // ^ higher than unary + - yield return new object[] - { - new[] - { - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorCaret), - new Token(TokenType.Identifier, "b") - }, - new UnaryExpression( - Operator.NumberPromotion, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Exponentiation, - new IdentifierExpression("b"))) - }; - // same priority unary + unary - unary ! - yield return new object[] - { - new[] - { - GetDefaultToken(TokenType.OperatorPlus), - GetDefaultToken(TokenType.OperatorMinus), - GetDefaultToken(TokenType.OperatorBang), - GetDefaultToken(TokenType.OperatorMinus), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "a") - }, - new UnaryExpression( - Operator.NumberPromotion, - new UnaryExpression( - Operator.ArithmeticNegation, - new UnaryExpression( - Operator.LogicalNegation, - new UnaryExpression( - Operator.ArithmeticNegation, - new UnaryExpression( - Operator.NumberPromotion, - new IdentifierExpression("a")))))) - }; - // unary + higher than * - yield return new object[] - { - new[] - { - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorAsterisk), - new Token(TokenType.Identifier, "b") - }, - new BinaryExpression( - new UnaryExpression( - Operator.NumberPromotion, - new IdentifierExpression("a")), - Operator.Multiplication, - new IdentifierExpression("b")) - }; - // same priority * / % - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorAsterisk), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorSlash), - new Token(TokenType.Identifier, "c"), - GetDefaultToken(TokenType.OperatorPercent), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.OperatorSlash), - new Token(TokenType.Identifier, "e"), - GetDefaultToken(TokenType.OperatorAsterisk), - new Token(TokenType.Identifier, "f") - }, - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Multiplication, - new IdentifierExpression("b")), - Operator.Division, - new IdentifierExpression("c")), - Operator.Remainder, - new IdentifierExpression("d")), - Operator.Division, - new IdentifierExpression("e")), - Operator.Multiplication, - new IdentifierExpression("f")) - }; - // * higher than binary + - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorAsterisk), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Addition, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Multiplication, - new IdentifierExpression("c"))) - }; - // same priority binary + binary - - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorMinus), - new Token(TokenType.Identifier, "c"), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "d") - }, - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Addition, - new IdentifierExpression("b")), - Operator.Subtraction, - new IdentifierExpression("c")), - Operator.Addition, - new IdentifierExpression("d")) - }; - // binary + higher than .. - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorDotDot), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Concatenation, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Addition, - new IdentifierExpression("c"))) - }; - // .. higher than < - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorLess), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorDotDot), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.LessThanComparison, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Concatenation, - new IdentifierExpression("c"))) - }; - // same priority < <= > >= == != - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorLess), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorLessEquals), - new Token(TokenType.Identifier, "c"), - GetDefaultToken(TokenType.OperatorGreater), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.OperatorGreaterEquals), - new Token(TokenType.Identifier, "e"), - GetDefaultToken(TokenType.OperatorEqualsEquals), - new Token(TokenType.Identifier, "f"), - GetDefaultToken(TokenType.OperatorBangEquals), - new Token(TokenType.Identifier, "g"), - GetDefaultToken(TokenType.OperatorEqualsEquals), - new Token(TokenType.Identifier, "h"), - GetDefaultToken(TokenType.OperatorGreaterEquals), - new Token(TokenType.Identifier, "i"), - GetDefaultToken(TokenType.OperatorGreater), - new Token(TokenType.Identifier, "j"), - GetDefaultToken(TokenType.OperatorLessEquals), - new Token(TokenType.Identifier, "k"), - GetDefaultToken(TokenType.OperatorLess), - new Token(TokenType.Identifier, "l") - }, - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - Operator.LessThanComparison, - new IdentifierExpression("b")), - Operator.LessOrEqualComparison, - new IdentifierExpression("c")), - Operator.GreaterThanComparison, - new IdentifierExpression("d")), - Operator.GreaterOrEqualComparison, - new IdentifierExpression("e")), - Operator.EqualComparison, - new IdentifierExpression("f")), - Operator.NotEqualComparison, - new IdentifierExpression("g")), - Operator.EqualComparison, - new IdentifierExpression("h")), - Operator.GreaterOrEqualComparison, - new IdentifierExpression("i")), - Operator.GreaterThanComparison, - new IdentifierExpression("j")), - Operator.LessOrEqualComparison, - new IdentifierExpression("k")), - Operator.LessThanComparison, - new IdentifierExpression("l")) - }; - // < higher than is - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorLess), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordInt) - }, - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - Operator.LessThanComparison, - new IdentifierExpression("b") - ), - Operator.EqualTypeCheck, - new TypeExpression(DataType.Integer)) - }; - // < higher than is not - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorLess), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordNot), - GetDefaultToken(TokenType.KeywordInt) - }, - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - Operator.LessThanComparison, - new IdentifierExpression("b") - ), - Operator.NotEqualTypeCheck, - new TypeExpression(DataType.Integer)) - }; - // is higher than && - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorAndAnd), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordInt) - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Conjunction, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.EqualTypeCheck, - new TypeExpression(DataType.Integer))) - }; - // is not higher than && - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorAndAnd), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordNot), - GetDefaultToken(TokenType.KeywordInt) - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Conjunction, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.NotEqualTypeCheck, - new TypeExpression(DataType.Integer))) - }; - // && higher than || - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorOrOr), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorAndAnd), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Disjunction, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Conjunction, - new IdentifierExpression("c"))) - }; - // || higher than ?> - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorQueryGreater), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorOrOr), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.NullSafePipe, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.Disjunction, - new IdentifierExpression("c"))) - }; - // ?> higher than ?? - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorQueryQuery), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorQueryGreater), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.NullCoalescing, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.NullSafePipe, - new IdentifierExpression("c"))) - }; - // ?? higher than = - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorEquals), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorQueryQuery), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Assignment, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.NullCoalescing, - new IdentifierExpression("c"))) - }; - // same priority = += -= *= /= %= - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorEquals), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.OperatorPlusEquals), - new Token(TokenType.Identifier, "c"), - GetDefaultToken(TokenType.OperatorMinusEquals), - new Token(TokenType.Identifier, "d"), - GetDefaultToken(TokenType.OperatorAsteriskEquals), - new Token(TokenType.Identifier, "e"), - GetDefaultToken(TokenType.OperatorSlashEquals), - new Token(TokenType.Identifier, "f"), - GetDefaultToken(TokenType.OperatorPercentEquals), - new Token(TokenType.Identifier, "g"), - GetDefaultToken(TokenType.OperatorSlashEquals), - new Token(TokenType.Identifier, "h"), - GetDefaultToken(TokenType.OperatorAsteriskEquals), - new Token(TokenType.Identifier, "i"), - GetDefaultToken(TokenType.OperatorMinusEquals), - new Token(TokenType.Identifier, "j"), - GetDefaultToken(TokenType.OperatorPlusEquals), - new Token(TokenType.Identifier, "k"), - GetDefaultToken(TokenType.OperatorEquals), - new Token(TokenType.Identifier, "l") - }, - new BinaryExpression( - new IdentifierExpression("a"), - Operator.Assignment, - new BinaryExpression( - new IdentifierExpression("b"), - Operator.AdditionAssignment, - new BinaryExpression( - new IdentifierExpression("c"), - Operator.SubtractionAssignment, - new BinaryExpression( - new IdentifierExpression("d"), - Operator.MultiplicationAssignment, - new BinaryExpression( - new IdentifierExpression("e"), - Operator.DivisionAssignment, - new BinaryExpression( - new IdentifierExpression("f"), - Operator.RemainderAssignment, - new BinaryExpression( - new IdentifierExpression("g"), - Operator.DivisionAssignment, - new BinaryExpression( - new IdentifierExpression("h"), - Operator.MultiplicationAssignment, - new BinaryExpression( - new IdentifierExpression("i"), - Operator.SubtractionAssignment, - new BinaryExpression( - new IdentifierExpression("j"), - Operator.AdditionAssignment, - new BinaryExpression( - new IdentifierExpression("k"), - Operator.Assignment, - new IdentifierExpression("l")))))))))))) - }; - } - - public static IEnumerable GenerateOperatorsAssociativityTestData() - { - static object[] GenerateLeftBinary(TokenType tokenType, Operator @operator) => - new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(tokenType), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(tokenType), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - @operator, - new IdentifierExpression("b")), - @operator, - new IdentifierExpression("c")) - }; - static object[] GenerateRightBinary(TokenType tokenType, Operator @operator) => - new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(tokenType), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(tokenType), - new Token(TokenType.Identifier, "c") - }, - new BinaryExpression( - new IdentifierExpression("a"), - @operator, - new BinaryExpression( - new IdentifierExpression("b"), - @operator, - new IdentifierExpression("c"))) - }; - static object[] GenerateRightUnary(TokenType tokenType, Operator @operator) => - new object[] - { - new[] - { - GetDefaultToken(tokenType), - GetDefaultToken(tokenType), - new Token(TokenType.Identifier, "a") - }, - new UnaryExpression( - @operator, - new UnaryExpression( - @operator, - new IdentifierExpression("a"))) - }; - static object[] GenerateTypeCheck(bool isNegated = false) => - new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordInt), - GetDefaultToken(TokenType.KeywordIs), - GetDefaultToken(TokenType.KeywordInt) - }.SelectMany(x => - x.Type == TokenType.KeywordIs - ? isNegated ? new[] { x, GetDefaultToken(TokenType.KeywordNot) } : new[] { x } - : new[] { x }).ToArray(), - new BinaryExpression( - new BinaryExpression( - new IdentifierExpression("a"), - isNegated ? Operator.NotEqualTypeCheck : Operator.EqualTypeCheck, - new TypeExpression(DataType.Integer)), - isNegated ? Operator.NotEqualTypeCheck : Operator.EqualTypeCheck, - new TypeExpression(DataType.Integer)) - }; - - // . - yield return GenerateLeftBinary(TokenType.OperatorDot, Operator.NamespaceAccess); - // () - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "b"), - GetDefaultToken(TokenType.RightParenthesis), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "c"), - GetDefaultToken(TokenType.RightParenthesis) - }, - new FunctionCallExpression( - new FunctionCallExpression( - new IdentifierExpression("a"), - new List { new IdentifierExpression("b") }), - new List { new IdentifierExpression("c") }) - }; - // ^ - yield return GenerateRightBinary(TokenType.OperatorCaret, Operator.Exponentiation); - // unary + - yield return GenerateRightUnary(TokenType.OperatorPlus, Operator.NumberPromotion); - // unary - - yield return GenerateRightUnary(TokenType.OperatorMinus, Operator.ArithmeticNegation); - // unary ! - yield return GenerateRightUnary(TokenType.OperatorBang, Operator.LogicalNegation); - // * - yield return GenerateLeftBinary(TokenType.OperatorAsterisk, Operator.Multiplication); - // / - yield return GenerateLeftBinary(TokenType.OperatorSlash, Operator.Division); - // % - yield return GenerateLeftBinary(TokenType.OperatorPercent, Operator.Remainder); - // binary + - yield return GenerateLeftBinary(TokenType.OperatorPlus, Operator.Addition); - // binary - - yield return GenerateLeftBinary(TokenType.OperatorMinus, Operator.Subtraction); - // .. - yield return GenerateLeftBinary(TokenType.OperatorDotDot, Operator.Concatenation); - // < - yield return GenerateLeftBinary(TokenType.OperatorLess, Operator.LessThanComparison); - // <= - yield return GenerateLeftBinary(TokenType.OperatorLessEquals, Operator.LessOrEqualComparison); - // > - yield return GenerateLeftBinary(TokenType.OperatorGreater, Operator.GreaterThanComparison); - // >= - yield return GenerateLeftBinary(TokenType.OperatorGreaterEquals, Operator.GreaterOrEqualComparison); - // == - yield return GenerateLeftBinary(TokenType.OperatorEqualsEquals, Operator.EqualComparison); - // != - yield return GenerateLeftBinary(TokenType.OperatorBangEquals, Operator.NotEqualComparison); - // is - yield return GenerateTypeCheck(); - // is not - yield return GenerateTypeCheck(true); - // && - yield return GenerateLeftBinary(TokenType.OperatorAndAnd, Operator.Conjunction); - // || - yield return GenerateLeftBinary(TokenType.OperatorOrOr, Operator.Disjunction); - // ?> - yield return GenerateLeftBinary(TokenType.OperatorQueryGreater, Operator.NullSafePipe); - // ?? - yield return GenerateLeftBinary(TokenType.OperatorQueryQuery, Operator.NullCoalescing); - // = - yield return GenerateRightBinary(TokenType.OperatorEquals, Operator.Assignment); - // += - yield return GenerateRightBinary(TokenType.OperatorPlusEquals, Operator.AdditionAssignment); - // -= - yield return GenerateRightBinary(TokenType.OperatorMinusEquals, Operator.SubtractionAssignment); - // *= - yield return GenerateRightBinary(TokenType.OperatorAsteriskEquals, Operator.MultiplicationAssignment); - // /= - yield return GenerateRightBinary(TokenType.OperatorSlashEquals, Operator.DivisionAssignment); - // %= - yield return GenerateRightBinary(TokenType.OperatorPercentEquals, Operator.RemainderAssignment); - } - - #endregion Generators -} diff --git a/Toffee.Tests/SyntacticAnalysis/ParserTests.Statements.cs b/Toffee.Tests/SyntacticAnalysis/ParserTests.Statements.cs deleted file mode 100644 index 398cf5f..0000000 --- a/Toffee.Tests/SyntacticAnalysis/ParserTests.Statements.cs +++ /dev/null @@ -1,389 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using FluentAssertions; -using Toffee.LexicalAnalysis; -using Toffee.SyntacticAnalysis; -using Xunit; - -namespace Toffee.Tests.SyntacticAnalysis; - -public partial class ParserTests -{ - [Trait("Category", "Variable initialization list statements")] - [Theory] - [InlineData("std")] - [InlineData("std", "io")] - [InlineData("one1", "two2", "three3")] - public void NamespaceImportStatementsShouldBeParsedCorrectly(params string[] namespaceSegments) - { - var pullToken = GetDefaultToken(TokenType.KeywordPull); - - var namespaceSegmentTokens = namespaceSegments.Select(x => new Token(TokenType.Identifier, x)); - var dotToken =GetDefaultToken(TokenType.OperatorDot); - var interleavedNamespaceSegments = namespaceSegmentTokens.SelectMany(x => new[] { x, dotToken }) - .Take(2 * namespaceSegments.Length - 1); - - var expectedNamespaceSegments = namespaceSegments.Select(x => new IdentifierExpression(x)).ToArray(); - - var lexerMock = new LexerMock(interleavedNamespaceSegments.Prepend(pullToken).ToArray()); - - IParser parser = new Parser(lexerMock); - - var namespaceImportStatement = parser.CurrentStatement.As(); - namespaceImportStatement.Should().NotBeNull(); - namespaceImportStatement!.IsTerminated.Should().Be(false); - namespaceImportStatement.NamespaceLevels.ToArray().Should().BeEquivalentTo(expectedNamespaceSegments, ProvideOptions); - } - - [Trait("Category", "Variable initialization list statements")] - [Theory] - [MemberData(nameof(GenerateVariableInitializationListStatementTestData))] - public void VariableInitializationListStatementsShouldBeParsedCorrectly(Token[] tokenSequence, VariableInitialization[] expectedVariableList) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var returnStatement = parser.CurrentStatement.As(); - returnStatement.Should().NotBeNull(); - returnStatement!.IsTerminated.Should().Be(false); - returnStatement.Items.Should().BeEquivalentTo(expectedVariableList, ProvideOptions); - } - - [Trait("Category", "Return statements")] - [Fact] - public void EmptyReturnStatementsShouldBeParsedCorrectly() - { - var returnToken = GetDefaultToken(TokenType.KeywordReturn); - - var lexerMock = new LexerMock(returnToken); - - IParser parser = new Parser(lexerMock); - - var returnStatement = parser.CurrentStatement.As(); - returnStatement.Should().NotBeNull(); - returnStatement!.IsTerminated.Should().Be(false); - returnStatement.Value.Should().BeNull(); - } - - [Trait("Category", "Return statements")] - [Fact] - public void ReturnStatementsContainingExpressionsShouldBeParsedCorrectly() - { - var returnToken = GetDefaultToken(TokenType.KeywordReturn); - - const string identifierName = "a"; - var identifierToken = new Token(TokenType.Identifier, identifierName); - - var lexerMock = new LexerMock(returnToken, identifierToken); - - IParser parser = new Parser(lexerMock); - - var returnStatement = parser.CurrentStatement.As(); - returnStatement.Should().NotBeNull(); - returnStatement!.IsTerminated.Should().Be(false); - returnStatement.Value.Should().NotBeNull(); - - var expression = returnStatement.Value.As(); - expression.Should().NotBeNull(); - expression.Name.Should().Be(identifierName); - } - - [Trait("Category", "Break statements")] - [Fact] - public void BreakStatementsShouldBeParsedCorrectly() - { - var breakToken = GetDefaultToken(TokenType.KeywordBreak); - - var lexerMock = new LexerMock(breakToken); - - IParser parser = new Parser(lexerMock); - - var breakStatement = parser.CurrentStatement.As(); - breakStatement.Should().NotBeNull(); - breakStatement!.IsTerminated.Should().Be(false); - } - - [Trait("Category", "Break if statements")] - [Fact] - public void BreakIfStatementsShouldBeParsedCorrectly() - { - var breakIfToken = GetDefaultToken(TokenType.KeywordBreakIf); - - var leftParenthesisToken = GetDefaultToken(TokenType.LeftParenthesis); - - const string identifierName = "a"; - var identifierToken = new Token(TokenType.Identifier, identifierName); - - var rightParenthesisToken = GetDefaultToken(TokenType.RightParenthesis); - - var lexerMock = new LexerMock(breakIfToken, leftParenthesisToken, identifierToken, rightParenthesisToken); - - IParser parser = new Parser(lexerMock); - - var breakIfStatement = parser.CurrentStatement.As(); - breakIfStatement.Should().NotBeNull(); - breakIfStatement!.IsTerminated.Should().Be(false); - - var expression = breakIfStatement.Condition.As(); - expression.Should().NotBeNull(); - expression!.Name.Should().Be(identifierName); - } - - [Trait("Category", "Expression statements")] - [Theory] - [MemberData(nameof(GenerateExpressionStatementTestData))] - public void ExpressionStatementsShouldBeParsedCorrectly(Token[] tokenSequence, Type expectedExpressionType) - { - var lexerMock = new LexerMock(tokenSequence); - - IParser parser = new Parser(lexerMock); - - var expressionStatement = parser.CurrentStatement.As(); - expressionStatement.Should().NotBeNull(); - expressionStatement!.IsTerminated.Should().Be(false); - expressionStatement.Expression.Should().BeOfType(expectedExpressionType); - } - - [Trait("Category", "Comments")] - [Fact] - public void CommentsShouldBeIgnoredWhileParsing() - { - var initToken = GetDefaultToken(TokenType.KeywordInit); - var constToken = GetDefaultToken(TokenType.KeywordConst); - var identifierToken = new Token(TokenType.Identifier, "a"); - var equalToken = GetDefaultToken(TokenType.OperatorEquals); - var leftTermToken = new Token(TokenType.LiteralInteger, 123L); - var plusToken = GetDefaultToken(TokenType.OperatorPlus); - var rightTermToken = new Token(TokenType.LiteralFloat, 3.14); - - var expectedStatement = new VariableInitializationListStatement(new List - { - new("a", - new BinaryExpression(new LiteralExpression(DataType.Integer, 123L), - Operator.Addition, - new LiteralExpression(DataType.Float, 3.14)), - true) - }); - - var comments = new[] - { - new Token(TokenType.LineComment, "line comment"), - new Token(TokenType.BlockComment, "block comment") - }; - - var lexerMock = - new LexerMock( - new[] { initToken, constToken, identifierToken, equalToken, leftTermToken, plusToken, rightTermToken } - .SelectMany((x, i) => new[] { x, comments[i % 2] }).ToArray()); - - IParser parser = new Parser(lexerMock); - - var statement = parser.CurrentStatement.As(); - statement.Should().NotBeNull(); - statement.Should().BeEquivalentTo(expectedStatement, ProvideOptions); - } - - #region Generators - - public static IEnumerable GenerateVariableInitializationListStatementTestData() - { - var initToken = GetDefaultToken(TokenType.KeywordInit); - var constToken = GetDefaultToken(TokenType.KeywordConst); - var assignmentToken = GetDefaultToken(TokenType.OperatorEquals); - var commaToken = GetDefaultToken(TokenType.Comma); - // basic - yield return new object[] - { - new[] - { - initToken, - new(TokenType.Identifier, "a") - }, - new VariableInitialization[] - { - new("a") - } - }; - // with const - yield return new object[] - { - new[] - { - initToken, - constToken, - new(TokenType.Identifier, "a") - }, - new VariableInitialization[] - { - new(IsConst: true, Name: "a") - } - }; - // with initialization - yield return new object[] - { - new[] - { - initToken, - new(TokenType.Identifier, "a"), - assignmentToken, - new Token(TokenType.LiteralInteger, 5L) - }, - new VariableInitialization[] - { - new("a", new LiteralExpression(DataType.Integer, 5L)) - } - }; - // with const and initialization - yield return new object[] - { - new[] - { - initToken, - constToken, - new(TokenType.Identifier, "a"), - assignmentToken, - new Token(TokenType.LiteralInteger, 5L) - }, - new VariableInitialization[] - { - new(IsConst: true, Name: "a", InitialValue: new LiteralExpression(DataType.Integer, 5L)) - } - }; - // more than one - yield return new object[] - { - new[] - { - initToken, - new(TokenType.Identifier, "a"), - commaToken, - new(TokenType.Identifier, "b") - }, - new VariableInitialization[] - { - new("a"), - new("b") - } - }; - // more than one, with const and initialization - yield return new object[] - { - new[] - { - initToken, - constToken, - new(TokenType.Identifier, "a"), - assignmentToken, - new Token(TokenType.LiteralInteger, 5L), - commaToken, - new(TokenType.Identifier, "b") - }, - new VariableInitialization[] - { - new(IsConst: true, Name: "a", InitialValue: new LiteralExpression(DataType.Integer, 5L)), - new("b") - } - }; - } - - public static IEnumerable GenerateExpressionStatementTestData() - { - // block - yield return new[] - { - GenerateBlockExpressionTestData().First()[0], - typeof(BlockExpression) - }; - // if - yield return new[] - { - GenerateConditionalExpressionTestData().First()[0], - typeof(ConditionalExpression) - }; - // for - yield return new[] - { - GenerateForLoopExpressionTestData().First()[0], - typeof(ForLoopExpression) - }; - // while - yield return new[] - { - GenerateWhileLoopExpressionTestData().First()[0], - typeof(WhileLoopExpression) - }; - // functi - yield return new[] - { - GenerateFunctionDefinitionExpressionTestData().First()[0], - typeof(FunctionDefinitionExpression) - }; - // match - yield return new[] - { - GeneratePatternMatchingExpressionTestData().First()[0], - typeof(PatternMatchingExpression) - }; - // binary - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "b") - }, - typeof(BinaryExpression) - }; - // unary - yield return new object[] - { - new[] - { - GetDefaultToken(TokenType.OperatorPlus), - new Token(TokenType.Identifier, "a") - }, - typeof(UnaryExpression) - }; - // call - yield return new[] - { - GenerateFunctionCallExpressionTestData().First()[0], - typeof(FunctionCallExpression) - }; - // identifier - yield return new object[] - { - new[] - { - new Token(TokenType.Identifier, "a") - }, - typeof(IdentifierExpression) - }; - // literal - yield return new object[] - { - new[] - { - new Token(TokenType.LiteralInteger, 18L) - }, - typeof(LiteralExpression) - }; - // type cast - yield return new object[] - { - new[] - { - GetDefaultToken(TokenType.KeywordInt), - GetDefaultToken(TokenType.LeftParenthesis), - new Token(TokenType.Identifier, "a"), - GetDefaultToken(TokenType.RightParenthesis) - }, - typeof(TypeCastExpression) - }; - } - - #endregion Generators -} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Break.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Break.cs new file mode 100644 index 0000000..843cdbe --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Break.cs @@ -0,0 +1,29 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Break statements")] + [Fact] + public void BreakStatementsShouldBeParsedCorrectly() + { + var breakToken = Helpers.GetDefaultToken(TokenType.KeywordBreak); + + var lexerMock = new LexerMock(breakToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var breakStatement = parser.CurrentStatement.As(); + breakStatement.Should().NotBeNull(); + breakStatement!.IsTerminated.Should().Be(true); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.BreakIf.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.BreakIf.cs new file mode 100644 index 0000000..82818fb --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.BreakIf.cs @@ -0,0 +1,44 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Break if statements")] + [Fact] + public void BreakIfStatementsShouldBeParsedCorrectly() + { + var breakIfToken = Helpers.GetDefaultToken(TokenType.KeywordBreakIf); + + var leftParenthesisToken = Helpers.GetDefaultToken(TokenType.LeftParenthesis); + + const string identifierName = "a"; + var identifierToken = new Token(TokenType.Identifier, identifierName); + + var rightParenthesisToken = Helpers.GetDefaultToken(TokenType.RightParenthesis); + + var lexerMock = new LexerMock(breakIfToken, + leftParenthesisToken, + identifierToken, + rightParenthesisToken, + Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var breakIfStatement = parser.CurrentStatement.As(); + breakIfStatement.Should().NotBeNull(); + breakIfStatement!.IsTerminated.Should().Be(true); + + var expression = breakIfStatement.Condition.As(); + expression.Should().NotBeNull(); + expression!.Name.Should().Be(identifierName); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Expression.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Expression.cs new file mode 100644 index 0000000..fd21860 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Expression.cs @@ -0,0 +1,31 @@ +using System; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Expression statements")] + [Theory] + [ClassData(typeof(ExpressionStatementTestData))] + public void ExpressionStatementsShouldBeParsedCorrectly(Token[] tokenSequence, Type expectedExpressionType) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var expressionStatement = parser.CurrentStatement.As(); + expressionStatement.Should().NotBeNull(); + expressionStatement!.IsTerminated.Should().Be(true); + expressionStatement.Expression.Should().BeOfType(expectedExpressionType); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.NamespaceImport.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.NamespaceImport.cs new file mode 100644 index 0000000..bee5b12 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.NamespaceImport.cs @@ -0,0 +1,61 @@ +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Namespace import statements")] + [Theory] + [InlineData("std")] + [InlineData("std", "io")] + [InlineData("one1", "two2", "three3")] + public void NamespaceImportStatementsShouldBeParsedCorrectly(params string[] namespaceSegments) + { + var pullToken = Helpers.GetDefaultToken(TokenType.KeywordPull); + + var namespaceSegmentTokens = namespaceSegments.Select(x => new Token(TokenType.Identifier, x)); + var dotToken = Helpers.GetDefaultToken(TokenType.OperatorDot); + var interleavedNamespaceSegments = namespaceSegmentTokens.SelectMany(x => new[] { x, dotToken }) + .Take(2 * namespaceSegments.Length - 1); + + var expectedNamespaceSegments = namespaceSegments.Select(x => new IdentifierExpression(x)).ToArray(); + + var lexerMock = new LexerMock(interleavedNamespaceSegments.Prepend(pullToken).AppendSemicolon()); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var namespaceImportStatement = parser.CurrentStatement.As(); + namespaceImportStatement.Should().NotBeNull(); + namespaceImportStatement!.IsTerminated.Should().Be(true); + namespaceImportStatement.NamespaceLevels.ToArray().Should().BeEquivalentTo(expectedNamespaceSegments, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Namespace import statements")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(NamespaceImportStatementNonIdentifiersTestData))] + public void NonIdentifiersInNamespaceImportStatementsShouldBeDetectedProperly(Token[] tokenSequence, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Return.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Return.cs new file mode 100644 index 0000000..2428dd4 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.Return.cs @@ -0,0 +1,58 @@ +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Return statements")] + [Fact] + public void EmptyReturnStatementsShouldBeParsedCorrectly() + { + var returnToken = Helpers.GetDefaultToken(TokenType.KeywordReturn); + + var lexerMock = new LexerMock(returnToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var returnStatement = parser.CurrentStatement.As(); + returnStatement.Should().NotBeNull(); + returnStatement!.IsTerminated.Should().Be(true); + returnStatement.Value.Should().BeNull(); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Return statements")] + [Fact] + public void ReturnStatementsContainingExpressionsShouldBeParsedCorrectly() + { + var returnToken = Helpers.GetDefaultToken(TokenType.KeywordReturn); + + const string identifierName = "a"; + var identifierToken = new Token(TokenType.Identifier, identifierName); + + var lexerMock = new LexerMock(returnToken, identifierToken, Helpers.GetDefaultToken(TokenType.Semicolon)); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var returnStatement = parser.CurrentStatement.As(); + returnStatement.Should().NotBeNull(); + returnStatement!.IsTerminated.Should().Be(true); + returnStatement.Value.Should().NotBeNull(); + + var expression = returnStatement.Value.As(); + expression.Should().NotBeNull(); + expression.Name.Should().Be(identifierName); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.VariableInitializationList.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.VariableInitializationList.cs new file mode 100644 index 0000000..150d961 --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.VariableInitializationList.cs @@ -0,0 +1,138 @@ +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Toffee.Tests.SyntacticAnalysis.Generators; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Variable initialization list statements")] + [Theory] + [ClassData(typeof(VariableInitializationListStatementTestData))] + public void VariableInitializationListStatementsShouldBeParsedCorrectly(Token[] tokenSequence, VariableInitialization[] expectedVariableList) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var variableInitializationStatement = parser.CurrentStatement.As(); + variableInitializationStatement.Should().NotBeNull(); + variableInitializationStatement!.IsTerminated.Should().Be(true); + variableInitializationStatement.Items.Should().BeEquivalentTo(expectedVariableList, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Variable initialization list statements")] + [Trait("Category", "Negative")] + [Theory] + [ClassData(typeof(VariableInitializationListStatementMissingVariableTestData))] + public void MissingVariableInVariableInitializationListStatementsShouldBeDetectedProperly(Token[] tokenSequence, ParserError expectedError) + { + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Variable initialization list statements")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MissingInitialValueInVariableInitializationListStatementsShouldBeDetectedProperly(bool constKeywordUsed) + { + var tokenSequence = (constKeywordUsed + ? new[] { Helpers.GetDefaultToken(TokenType.KeywordInit), Helpers.GetDefaultToken(TokenType.KeywordConst) } + : new[] { Helpers.GetDefaultToken(TokenType.KeywordInit) }) + .Concat(new[] + { + new Token(TokenType.Identifier, "a"), + Helpers.GetDefaultToken(TokenType.OperatorEquals) + }) + .AppendSemicolon(); + + var errorPosition = 3u + (constKeywordUsed ? 1u : 0u); + var expectedError = new ExpectedExpression(new Position(errorPosition, 1, errorPosition), TokenType.Semicolon); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + parser.CurrentStatement.Should().BeNull(); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } + + [Trait("Category", "Variable initialization list statements")] + [Trait("Category", "Negative")] + [Theory] + [InlineData(TokenType.OperatorEqualsEquals, true)] + [InlineData(TokenType.OperatorPlusEquals, true)] + [InlineData(TokenType.OperatorMinusEquals, true)] + [InlineData(TokenType.OperatorAsteriskEquals, true)] + [InlineData(TokenType.OperatorSlashEquals, true)] + [InlineData(TokenType.OperatorPercentEquals, true)] + [InlineData(TokenType.OperatorEqualsEquals, false)] + [InlineData(TokenType.OperatorPlusEquals, false)] + [InlineData(TokenType.OperatorMinusEquals, false)] + [InlineData(TokenType.OperatorAsteriskEquals, false)] + [InlineData(TokenType.OperatorSlashEquals, false)] + [InlineData(TokenType.OperatorPercentEquals, false)] + public void BadAssignmentOperatorInVariableInitializationListStatementsShouldBeDetectedProperly(TokenType operatorTokenType, bool constKeywordUsed) + { + const string initializedVariableName = "a"; + + var tokenSequence = (constKeywordUsed + ? new[] { Helpers.GetDefaultToken(TokenType.KeywordInit), Helpers.GetDefaultToken(TokenType.KeywordConst) } + : new[] { Helpers.GetDefaultToken(TokenType.KeywordInit) }) + .Concat(new[] + { + new Token(TokenType.Identifier, initializedVariableName), + Helpers.GetDefaultToken(operatorTokenType), + new Token(TokenType.LiteralInteger, 123ul) + }) + .AppendSemicolon(); + + var expectedStatement = new VariableInitializationListStatement(new List + { + new(initializedVariableName, new LiteralExpression(DataType.Integer, 123ul), constKeywordUsed) + }) { IsTerminated = true }; + + var errorPosition = 2u + (constKeywordUsed ? 1u : 0u); + var expectedError = new UnexpectedToken(new Position(errorPosition, 1, errorPosition), + operatorTokenType, TokenType.OperatorEquals); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var variableInitializationStatement = parser.CurrentStatement.As(); + variableInitializationStatement.Should().BeEquivalentTo(expectedStatement, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.cs b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.cs new file mode 100644 index 0000000..acea37c --- /dev/null +++ b/Toffee.Tests/SyntacticAnalysis/StatementParsingTests.cs @@ -0,0 +1,85 @@ +using System.Collections.Generic; +using System.Linq; +using FluentAssertions; +using Toffee.LexicalAnalysis; +using Toffee.Scanning; +using Toffee.SyntacticAnalysis; +using Xunit; + +namespace Toffee.Tests.SyntacticAnalysis; + +public partial class StatementParsingTests +{ + [Trait("Category", "Comments")] + [Fact] + public void CommentsShouldBeIgnoredWhileParsing() + { + var initToken = Helpers.GetDefaultToken(TokenType.KeywordInit); + var constToken = Helpers.GetDefaultToken(TokenType.KeywordConst); + var identifierToken = new Token(TokenType.Identifier, "a"); + var equalToken = Helpers.GetDefaultToken(TokenType.OperatorEquals); + var leftTermToken = new Token(TokenType.LiteralInteger, 123ul); + var plusToken = Helpers.GetDefaultToken(TokenType.OperatorPlus); + var rightTermToken = new Token(TokenType.LiteralFloat, 3.14); + + var expectedStatement = new VariableInitializationListStatement(new List + { + new("a", + new BinaryExpression(new LiteralExpression(DataType.Integer, 123ul), + Operator.Addition, + new LiteralExpression(DataType.Float, 3.14)), + true) + }) { IsTerminated = true }; + + var comments = new[] + { + new Token(TokenType.LineComment, "line comment"), + new Token(TokenType.BlockComment, "block comment") + }; + + var lexerMock = + new LexerMock( + new[] { initToken, constToken, identifierToken, equalToken, leftTermToken, plusToken, rightTermToken } + .SelectMany((x, i) => new[] { x, comments[i % 2] }).AppendSemicolon()); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var statement = parser.CurrentStatement.As(); + statement.Should().NotBeNull(); + statement.Should().BeEquivalentTo(expectedStatement, Helpers.ProvideOptions); + + Assert.False(errorHandlerMock.HadErrors); + Assert.False(errorHandlerMock.HadWarnings); + } + + + [Trait("Category", "Negative")] + [Fact] + public void UnterminatedTopLevelStatementsShouldBeDetectedProperly() + { + var tokenSequence = new[] + { + Helpers.GetDefaultToken(TokenType.KeywordBreak) + }; + + var expectedStatement = new BreakStatement(); + + var expectedError = new ExpectedSemicolon(new Position(1, 1, 1), TokenType.EndOfText); + + var lexerMock = new LexerMock(tokenSequence); + var errorHandlerMock = new ParserErrorHandlerMock(); + IParser parser = new Parser(lexerMock, errorHandlerMock); + + parser.Advance(); + + var statement = parser.CurrentStatement.As(); + statement.Should().NotBeNull(); + statement.Should().BeEquivalentTo(expectedStatement, Helpers.ProvideOptions); + + errorHandlerMock.HandledErrors[0].Should().BeEquivalentTo(expectedError); + + Assert.False(errorHandlerMock.HadWarnings); + } +} diff --git a/Toffee.Tests/Toffee.Tests.csproj b/Toffee.Tests/Toffee.Tests.csproj index 20fa087..7b705bf 100644 --- a/Toffee.Tests/Toffee.Tests.csproj +++ b/Toffee.Tests/Toffee.Tests.csproj @@ -28,11 +28,77 @@ - - ParserTests.cs + + LexerTests.cs - - ParserTests.cs + + LexerTests.cs + + + LexerTests.cs + + + LexerTests.cs + + + ExpressionParsingTests.cs + + + StatementParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + StatementParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + ExpressionParsingTests.cs + + + StatementParsingTests.cs + + + StatementParsingTests.cs + + + StatementParsingTests.cs + + + StatementParsingTests.cs diff --git a/Toffee/CommandLine/Application.cs b/Toffee/CommandLine/Application.cs index d6439f4..9b9f070 100644 --- a/Toffee/CommandLine/Application.cs +++ b/Toffee/CommandLine/Application.cs @@ -12,7 +12,7 @@ public class Application private string? _sourceName; private TextReader? _reader; private IScanner? _scanner; - private ILexerErrorHandler? _logger; + private ConsoleErrorHandler? _logger; private ILexer? _lexer; private IParser? _parser; @@ -30,7 +30,7 @@ public void Execute( _scanner = new Scanner(_reader); _logger = new ConsoleErrorHandler(_sourceName); _lexer = new Lexer(_scanner, _logger, maxLexemeLength); - _parser = new Parser(_lexer); + _parser = new Parser(_lexer, _logger); RunParser(); } @@ -58,10 +58,7 @@ static string FormatPosition(Position position) => private void RunParser() { var printer = new AstPrinter(_sourceName!); - while (_parser!.CurrentStatement is not null) - { - printer.Print(_parser.CurrentStatement); - _parser.Advance(); - } + while (_parser!.Advance() is not null) + printer.Print(_parser.CurrentStatement!); } } diff --git a/Toffee/LexicalAnalysis/BaseLexer.cs b/Toffee/LexicalAnalysis/BaseLexer.cs index b8db46a..2fdfd6a 100644 --- a/Toffee/LexicalAnalysis/BaseLexer.cs +++ b/Toffee/LexicalAnalysis/BaseLexer.cs @@ -16,7 +16,6 @@ public int MaxLexemeLength public Token CurrentToken { get; protected set; } - public bool HadError { get; protected set; } public LexerError? CurrentError { get; protected set; } protected BaseLexer(int? maxLexemeLength = null) @@ -25,10 +24,4 @@ protected BaseLexer(int? maxLexemeLength = null) } public abstract Token Advance(); - - public void ResetError() - { - CurrentError = null; - HadError = false; - } } diff --git a/Toffee/LexicalAnalysis/ILexer.cs b/Toffee/LexicalAnalysis/ILexer.cs index db4e7e1..36a4225 100644 --- a/Toffee/LexicalAnalysis/ILexer.cs +++ b/Toffee/LexicalAnalysis/ILexer.cs @@ -6,7 +6,6 @@ public interface ILexer Token CurrentToken { get; } - bool HadError { get; } LexerError? CurrentError { get; } /// @@ -14,6 +13,4 @@ public interface ILexer /// /// Superseded token - the current one from before the method was called Token Advance(); - - void ResetError(); } diff --git a/Toffee/LexicalAnalysis/Lexer.Numbers.cs b/Toffee/LexicalAnalysis/Lexer.Numbers.cs index 5a09c21..2d70880 100644 --- a/Toffee/LexicalAnalysis/Lexer.Numbers.cs +++ b/Toffee/LexicalAnalysis/Lexer.Numbers.cs @@ -6,7 +6,6 @@ public sealed partial class Lexer { private Token? MatchNumber() { - // TODO: handle <-9223372036854775808 and >9223372036854775808 "literals" in parser if (!IsDigitGivenRadix(10, _scanner.CurrentCharacter)) return null; diff --git a/Toffee/LexicalAnalysis/LexerErrors.cs b/Toffee/LexicalAnalysis/LexerErrors.cs index 97b5a98..0ad8a72 100644 --- a/Toffee/LexicalAnalysis/LexerErrors.cs +++ b/Toffee/LexicalAnalysis/LexerErrors.cs @@ -1,4 +1,4 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.ErrorHandling; using Toffee.Scanning; @@ -16,7 +16,7 @@ public record MissingExponent(Position Position) : LexerError(Position); public static class LexerErrorExtensions { - private static readonly ReadOnlyDictionary MessageMap = new(new Dictionary + private static readonly ImmutableDictionary MessageMap = new Dictionary { { typeof(UnexpectedEndOfText), "Unexpected end of text" }, { typeof(ExceededMaxLexemeLength), "Unexpected end of text" }, @@ -25,7 +25,7 @@ public static class LexerErrorExtensions { typeof(InvalidNonDecimalPrefix), "Unknown non-decimal number prefix" }, { typeof(MissingNonDecimalDigits), "No digits after non-decimal number prefix" }, { typeof(MissingExponent), "No digits after scientific notation prefix" } - }); + }.ToImmutableDictionary(); public static string ToMessage(this LexerError error) => MessageMap.GetValueOrDefault(error.GetType(), "Lexical error"); diff --git a/Toffee/LexicalAnalysis/LexerWarnings.cs b/Toffee/LexicalAnalysis/LexerWarnings.cs index c96156f..e50cc01 100644 --- a/Toffee/LexicalAnalysis/LexerWarnings.cs +++ b/Toffee/LexicalAnalysis/LexerWarnings.cs @@ -1,4 +1,4 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.ErrorHandling; using Toffee.Scanning; @@ -11,11 +11,11 @@ public record MissingHexCharCode(Position Position) : LexerWarning(Position); public static class LexerWarningExtensions { - private static readonly ReadOnlyDictionary MessageMap = new(new Dictionary + private static readonly ImmutableDictionary MessageMap = new Dictionary { { typeof(UnknownEscapeSequence), "Unknown escape sequence in string" }, { typeof(MissingHexCharCode), "Hexadecimal character code missing in escape sequence in string" } - }); + }.ToImmutableDictionary(); public static string ToMessage(this LexerWarning warning) => MessageMap.GetValueOrDefault(warning.GetType(), "Lexical warning"); diff --git a/Toffee/LexicalAnalysis/TokenMappers.cs b/Toffee/LexicalAnalysis/TokenMappers.cs index f0896e5..c27fc54 100644 --- a/Toffee/LexicalAnalysis/TokenMappers.cs +++ b/Toffee/LexicalAnalysis/TokenMappers.cs @@ -1,10 +1,10 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; namespace Toffee.LexicalAnalysis; public static class KeywordOrIdentifierMapper { - private static readonly ReadOnlyDictionary KeywordMap = new(new Dictionary + private static readonly ImmutableDictionary KeywordMap = new Dictionary { { "int", TokenType.KeywordInt }, { "float", TokenType.KeywordFloat }, @@ -32,7 +32,7 @@ public static class KeywordOrIdentifierMapper { "default", TokenType.KeywordDefault }, { "false", TokenType.KeywordFalse }, { "true", TokenType.KeywordTrue } - }); + }.ToImmutableDictionary(); public static Token MapToKeywordOrIdentifier(string name) => new(KeywordMap.GetValueOrDefault(name, TokenType.Identifier), name); @@ -40,7 +40,7 @@ public static Token MapToKeywordOrIdentifier(string name) => public static class OperatorMapper { - private static readonly ReadOnlyDictionary OperatorMap = new(new Dictionary + private static readonly ImmutableDictionary OperatorMap = new Dictionary { { ".", TokenType.OperatorDot }, { "^", TokenType.OperatorCaret }, @@ -74,13 +74,13 @@ public static class OperatorMapper { ",", TokenType.Comma }, { ":", TokenType.Colon }, { ";", TokenType.Semicolon } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary CommentMap = new(new Dictionary + private static readonly ImmutableDictionary CommentMap = new Dictionary { { "//", TokenType.LineComment }, { "/*", TokenType.BlockComment } - }); + }.ToImmutableDictionary(); public static bool IsTransitionExistent(string currentContent, char input) => CommentMap.Keys.All(x => x != currentContent) && diff --git a/Toffee/Running/AstPrinter.Expressions.cs b/Toffee/Running/AstPrinter.Expressions.cs index 0f796e5..3495779 100644 --- a/Toffee/Running/AstPrinter.Expressions.cs +++ b/Toffee/Running/AstPrinter.Expressions.cs @@ -8,10 +8,18 @@ public partial class AstPrinter { public void Print(Expression expression, int indentLevel = 0) { - var position = $"{_inputName}:{expression.Position.Line}:{expression.Position.Column}"; - var header = $"{expression.GetType().Name.Humanize(LetterCasing.LowerCase)} [{position}]"; + PrintDynamic(expression as dynamic, indentLevel); + } + + private void PrintHeader(Expression expression, int indentLevel = 0, string? details = null) + { + var startPosition = $"{_inputName}:{expression.StartPosition.Line}:{expression.StartPosition.Column}"; + var endPosition = $"{expression.EndPosition.Line}:{expression.EndPosition.Column}"; + var header = $"{expression.GetType().Name.Humanize(LetterCasing.LowerCase)}"; + if (!string.IsNullOrEmpty(details)) + header += $" ({details})"; + header += $" [{startPosition} - {endPosition}]"; Print(header, indentLevel); - PrintDynamic(expression as dynamic, indentLevel + 1); } private void Print(ConditionalElement conditional, int indentLevel) @@ -55,11 +63,6 @@ private void Print(PatternMatchingBranch branch, int indentLevel) Print(branch.Consequent, indentLevel + 1); } - private static void Print(Operator @operator, int indentLevel) - { - Print($"operator: {@operator.ToString().Humanize(LetterCasing.LowerCase)}", indentLevel); - } - private static void Print(DataType type, int indentLevel) { Print($"type: {type.Humanize(LetterCasing.LowerCase)}", indentLevel); @@ -71,138 +74,141 @@ private static void PrintDynamic(Expression expression, int indentLevel) private void PrintDynamic(BlockExpression expression, int indentLevel) { + PrintHeader(expression, indentLevel); foreach (var substatement in expression.Statements) { - Print("terminated", indentLevel); - Print(substatement, indentLevel + 1); + Print("regular statements", indentLevel + 1); + Print(substatement, indentLevel + 2); } - if (expression.UnterminatedStatement is null) + if (expression.ResultExpression is null) return; - Print("unterminated", indentLevel); - Print(expression.UnterminatedStatement, indentLevel + 1); + Print("result expression", indentLevel + 1); + Print(expression.ResultExpression, indentLevel + 2); } private void PrintDynamic(ConditionalExpression expression, int indentLevel) { - Print("if", indentLevel); - Print(expression.IfPart, indentLevel + 1); + PrintHeader(expression, indentLevel); + Print("if", indentLevel + 1); + Print(expression.IfPart, indentLevel + 2); foreach (var elifPart in expression.ElifParts) { - Print("elif", indentLevel); - Print(elifPart, indentLevel + 1); + Print("elif", indentLevel + 1); + Print(elifPart, indentLevel + 2); } if (expression.ElsePart is null) return; - Print("else", indentLevel); - Print(expression.ElsePart, indentLevel + 1); + Print("else", indentLevel + 1); + Print(expression.ElsePart, indentLevel + 2); } private void PrintDynamic(ForLoopExpression expression, int indentLevel) { + PrintHeader(expression, indentLevel); Print(expression.CounterName is not null ? $"counter: {expression.CounterName}" : "no counter", - indentLevel); - Print("range", indentLevel); - Print(expression.Range, indentLevel + 1); - Print("body", indentLevel); - Print(expression.Body, indentLevel + 1); + indentLevel + 1); + Print("range", indentLevel + 1); + Print(expression.Range, indentLevel + 2); + Print("body", indentLevel + 1); + Print(expression.Body, indentLevel + 2); } private void PrintDynamic(WhileLoopExpression expression, int indentLevel) { - Print("condition", indentLevel); - Print(expression.Condition, indentLevel + 1); - Print("body", indentLevel); - Print(expression.Body, indentLevel + 1); + PrintHeader(expression, indentLevel); + Print("condition", indentLevel + 1); + Print(expression.Condition, indentLevel + 2); + Print("body", indentLevel + 1); + Print(expression.Body, indentLevel + 2); } private void PrintDynamic(FunctionDefinitionExpression expression, int indentLevel) { - Print($"{(expression.Parameters.Count == 0 ? "no " : "")}parameters", indentLevel); + PrintHeader(expression, indentLevel); + Print($"{(expression.Parameters.Count == 0 ? "no " : "")}parameters", indentLevel + 1); foreach (var parameter in expression.Parameters) - Print(parameter, indentLevel + 1); - Print("body", indentLevel); - Print(expression.Body, indentLevel + 1); + Print(parameter, indentLevel + 2); + Print("body", indentLevel + 1); + Print(expression.Body, indentLevel + 2); } private void PrintDynamic(PatternMatchingExpression expression, int indentLevel) { - Print("argument", indentLevel); - Print(expression.Argument, indentLevel + 1); + PrintHeader(expression, indentLevel); + Print("argument", indentLevel + 1); + Print(expression.Argument, indentLevel + 2); foreach (var branch in expression.Branches) { - Print("branch", indentLevel); - Print(branch, indentLevel + 1); + Print("branch", indentLevel + 1); + Print(branch, indentLevel + 2); } if (expression.Default is null) return; - Print("default", indentLevel); - Print(expression.Default, indentLevel); + Print("default", indentLevel + 1); + Print(expression.Default, indentLevel + 2); } private void PrintDynamic(GroupingExpression expression, int indentLevel) { - Print(expression.Expression, indentLevel); + PrintHeader(expression, indentLevel); + Print(expression.Expression, indentLevel + 1); } private void PrintDynamic(BinaryExpression expression, int indentLevel) { - // TODO: shorter form (operator) - Print(expression.Operator, indentLevel); - Print(expression.Left, indentLevel); - Print(expression.Right, indentLevel); + PrintHeader(expression, indentLevel, expression.Operator.ToString().Humanize(LetterCasing.LowerCase)); + Print(expression.Left, indentLevel + 1); + Print(expression.Right, indentLevel + 1); } private void PrintDynamic(UnaryExpression expression, int indentLevel) { - // TODO: shorter form (operator) - Print(expression.Operator, indentLevel); - Print(expression.Expression, indentLevel); + PrintHeader(expression, indentLevel, expression.Operator.ToString().Humanize(LetterCasing.LowerCase)); + Print(expression.Expression, indentLevel + 1); } private void PrintDynamic(FunctionCallExpression expression, int indentLevel) { - Print("expression", indentLevel); - Print(expression.Expression, indentLevel + 1); + PrintHeader(expression, indentLevel); + Print("expression", indentLevel + 1); + Print(expression.Expression, indentLevel + 2); foreach (var argument in expression.Arguments) { - Print("argument", indentLevel); - Print(argument, indentLevel + 1); + Print("argument", indentLevel + 1); + Print(argument, indentLevel + 2); } } - private static void PrintDynamic(IdentifierExpression expression, int indentLevel) + private void PrintDynamic(IdentifierExpression expression, int indentLevel) { - // TODO: shorter form (name) - Print(expression.Name, indentLevel); + PrintHeader(expression, indentLevel, expression.Name); } - private static void PrintDynamic(LiteralExpression expression, int indentLevel) + private void PrintDynamic(LiteralExpression expression, int indentLevel) { - // TODO: shorter form (type, value) var description = expression.Type switch { DataType.Null => null, DataType.String => $"\"{expression.Value}\"", - DataType.Float => ((float)expression.Value!).ToString(CultureInfo.InvariantCulture), + DataType.Float => ((double)expression.Value!).ToString(CultureInfo.InvariantCulture), DataType.Integer => expression.Value!.ToString(), DataType.Bool => expression.Value is true ? "true" : "false", - // TODO: exclude function from literal types _ => throw new ArgumentOutOfRangeException(nameof(expression.Type), expression.Type, null) }; if (description is not null) description = $": {description}"; - Print($"{expression.Type.Humanize(LetterCasing.LowerCase)}{description}", indentLevel); + PrintHeader(expression, indentLevel, $"{expression.Type.Humanize(LetterCasing.LowerCase)}{description}"); } private void PrintDynamic(TypeCastExpression expression, int indentLevel) { - Print(expression.Type, indentLevel); - Print(expression.Expression, indentLevel); + PrintHeader(expression, indentLevel); + Print(expression.Type, indentLevel + 1); + Print(expression.Expression, indentLevel + 1); } - private static void PrintDynamic(TypeExpression expression, int indentLevel) + private void PrintDynamic(TypeExpression expression, int indentLevel) { - // TODO: shorter form (type) - Print(expression.Type, indentLevel); + PrintHeader(expression, indentLevel, expression.Type.Humanize(LetterCasing.LowerCase)); } } diff --git a/Toffee/Running/AstPrinter.Statements.cs b/Toffee/Running/AstPrinter.Statements.cs index 767b1bc..68deb7c 100644 --- a/Toffee/Running/AstPrinter.Statements.cs +++ b/Toffee/Running/AstPrinter.Statements.cs @@ -7,8 +7,9 @@ public partial class AstPrinter { public void Print(Statement statement, int indentLevel = 0) { - var position = $"{_inputName}:{statement.Position.Line}:{statement.Position.Column}"; - var header = $"{statement.GetType().Name.Humanize(LetterCasing.LowerCase)} [{position}]"; + var startPosition = $"{_inputName}:{statement.StartPosition.Line}:{statement.StartPosition.Column}"; + var endPosition = $"{statement.EndPosition.Line}:{statement.EndPosition.Column}"; + var header = $"{statement.GetType().Name.Humanize(LetterCasing.LowerCase)} [{startPosition} - {endPosition}]"; Print(header, indentLevel); PrintDynamic(statement as dynamic, indentLevel + 1); } diff --git a/Toffee/SyntacticAnalysis/CommentSkippingLexer.cs b/Toffee/SyntacticAnalysis/CommentSkippingLexer.cs index 8e6ead1..7e67c39 100644 --- a/Toffee/SyntacticAnalysis/CommentSkippingLexer.cs +++ b/Toffee/SyntacticAnalysis/CommentSkippingLexer.cs @@ -13,8 +13,6 @@ public int MaxLexemeLength } public Token CurrentToken => _lexer.CurrentToken; - - public bool HadError => _lexer.HadError; public LexerError? CurrentError => _lexer.CurrentError; public CommentSkippingLexer(ILexer lexer) @@ -35,6 +33,4 @@ public Token Advance() SkipComments(); return returnedToken; } - - public void ResetError() => _lexer.ResetError(); } diff --git a/Toffee/SyntacticAnalysis/Expression.cs b/Toffee/SyntacticAnalysis/Expression.cs index c68681d..abd765b 100644 --- a/Toffee/SyntacticAnalysis/Expression.cs +++ b/Toffee/SyntacticAnalysis/Expression.cs @@ -2,30 +2,39 @@ namespace Toffee.SyntacticAnalysis; -public abstract record Expression(Position Position); +public abstract record Expression(Position StartPosition, Position EndPosition); -public record BlockExpression(IList Statements, Statement? UnterminatedStatement = null) - : Expression(new Position()); -public record ConditionalExpression(ConditionalElement IfPart, IList ElifParts, - Expression? ElsePart = null) : Expression(new Position()); +public record BlockExpression(IList Statements, Expression? ResultExpression = null) + : Expression(new Position(), new Position()); +public record ConditionalExpression(ConditionalElement IfPart, IList ElifParts, Expression? ElsePart = null) + : Expression(new Position(), new Position()); public record ForLoopExpression(ForLoopRange Range, Expression Body, string? CounterName = null) - : Expression(new Position()); -public record WhileLoopExpression(Expression Condition, Expression Body) : Expression(new Position()); + : Expression(new Position(), new Position()); +public record WhileLoopExpression(Expression Condition, Expression Body) + : Expression(new Position(), new Position()); public record FunctionDefinitionExpression(IList Parameters, BlockExpression Body) - : Expression(new Position()); -public record PatternMatchingExpression(Expression Argument, IList Branches, - Expression? Default = null) : Expression(new Position()); + : Expression(new Position(), new Position()); +public record PatternMatchingExpression(Expression Argument, IList Branches, Expression? Default = null) + : Expression(new Position(), new Position()); public record ConditionalElement(Expression Condition, Expression Consequent); public record ForLoopRange(Expression PastTheEnd, Expression? Start = null, Expression? Step = null); public record FunctionParameter(string Name, bool IsConst = false, bool IsNullAllowed = true); public record PatternMatchingBranch(Expression? Pattern, Expression Consequent); -public record GroupingExpression(Expression Expression) : Expression(new Position()); -public record BinaryExpression(Expression Left, Operator Operator, Expression Right) : Expression(new Position()); -public record UnaryExpression(Operator Operator, Expression Expression) : Expression(new Position()); -public record FunctionCallExpression(Expression Expression, IList Arguments) : Expression(new Position()); -public record IdentifierExpression(string Name) : Expression(new Position()); -public record LiteralExpression(DataType Type, object? Value) : Expression(new Position()); -public record TypeCastExpression(DataType Type, Expression Expression) : Expression(new Position()); -public record TypeExpression(DataType Type) : Expression(new Position()); +public record GroupingExpression(Expression Expression) + : Expression(new Position(), new Position()); +public record BinaryExpression(Expression Left, Operator Operator, Expression Right) + : Expression(new Position(), new Position()); +public record UnaryExpression(Operator Operator, Expression Expression) + : Expression(new Position(), new Position()); +public record FunctionCallExpression(Expression Expression, IList Arguments) + : Expression(new Position(), new Position()); +public record IdentifierExpression(string Name) + : Expression(new Position(), new Position()); +public record LiteralExpression(DataType Type, object? Value) + : Expression(new Position(), new Position()); +public record TypeCastExpression(DataType Type, Expression Expression) + : Expression(new Position(), new Position()); +public record TypeExpression(DataType Type) + : Expression(new Position(), new Position()); diff --git a/Toffee/SyntacticAnalysis/IParser.cs b/Toffee/SyntacticAnalysis/IParser.cs index c5f642a..4617add 100644 --- a/Toffee/SyntacticAnalysis/IParser.cs +++ b/Toffee/SyntacticAnalysis/IParser.cs @@ -1,6 +1,6 @@ namespace Toffee.SyntacticAnalysis; -public interface IParser +public interface IParser // TODO: implement IEnumerable? { /// /// Current top-level statement. @@ -11,6 +11,6 @@ public interface IParser /// /// Advances the position of the parser in the statement stream. /// - /// Superseded statement - the current one from before the method was called + /// Parsed statement - the current one after the method was called Statement? Advance(); } diff --git a/Toffee/SyntacticAnalysis/LiteralMapper.cs b/Toffee/SyntacticAnalysis/LiteralMapper.cs index 04b256d..0735591 100644 --- a/Toffee/SyntacticAnalysis/LiteralMapper.cs +++ b/Toffee/SyntacticAnalysis/LiteralMapper.cs @@ -1,11 +1,11 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.LexicalAnalysis; namespace Toffee.SyntacticAnalysis; public static class LiteralMapper { - private static readonly ReadOnlyDictionary TypeMap = new(new Dictionary + private static readonly ImmutableDictionary TypeMap = new Dictionary { { TokenType.LiteralInteger, DataType.Integer }, { TokenType.LiteralFloat, DataType.Float }, @@ -13,11 +13,13 @@ public static class LiteralMapper { TokenType.KeywordTrue, DataType.Bool }, { TokenType.KeywordFalse, DataType.Bool }, { TokenType.KeywordNull, DataType.Null } - }); + }.ToImmutableDictionary(); + + public static TokenType[] LiteralTokenTypes { get; } = TypeMap.Keys.ToArray(); public static LiteralExpression MapToLiteralExpression(Token literalToken) { - var literalType = TypeMap[literalToken.Type]; // TODO: throws + var literalType = TypeMap[literalToken.Type]; var literalValue = literalType switch { DataType.Bool => literalToken.Type == TokenType.KeywordTrue, diff --git a/Toffee/SyntacticAnalysis/OperatorMapper.cs b/Toffee/SyntacticAnalysis/OperatorMapper.cs index 0929fb7..5f530cc 100644 --- a/Toffee/SyntacticAnalysis/OperatorMapper.cs +++ b/Toffee/SyntacticAnalysis/OperatorMapper.cs @@ -1,11 +1,11 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.LexicalAnalysis; namespace Toffee.SyntacticAnalysis; public static class OperatorMapper { - private static readonly ReadOnlyDictionary ComparisonMap = new(new Dictionary + private static readonly ImmutableDictionary ComparisonMap = new Dictionary { { TokenType.OperatorLess, Operator.LessThanComparison }, { TokenType.OperatorLessEquals, Operator.LessOrEqualComparison }, @@ -13,9 +13,9 @@ public static class OperatorMapper { TokenType.OperatorGreaterEquals, Operator.GreaterOrEqualComparison }, { TokenType.OperatorEqualsEquals, Operator.EqualComparison }, { TokenType.OperatorBangEquals, Operator.NotEqualComparison } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary PatternMatchingComparisonMap = new(new Dictionary + private static readonly ImmutableDictionary PatternMatchingComparisonMap = new Dictionary { { TokenType.OperatorLess, Operator.PatternMatchingLessThanComparison }, { TokenType.OperatorLessEquals, Operator.PatternMatchingLessOrEqualComparison }, @@ -23,9 +23,9 @@ public static class OperatorMapper { TokenType.OperatorGreaterEquals, Operator.PatternMatchingGreaterOrEqualComparison }, { TokenType.OperatorEqualsEquals, Operator.PatternMatchingEqualComparison }, { TokenType.OperatorBangEquals, Operator.PatternMatchingNotEqualComparison } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary AssignmentMap = new(new Dictionary + private static readonly ImmutableDictionary AssignmentMap = new Dictionary { { TokenType.OperatorEquals, Operator.Assignment }, { TokenType.OperatorPlusEquals, Operator.AdditionAssignment }, @@ -33,33 +33,40 @@ public static class OperatorMapper { TokenType.OperatorAsteriskEquals, Operator.MultiplicationAssignment }, { TokenType.OperatorSlashEquals, Operator.DivisionAssignment }, { TokenType.OperatorPercentEquals, Operator.RemainderAssignment } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary AdditiveMap = new(new Dictionary + private static readonly ImmutableDictionary AdditiveMap = new Dictionary { { TokenType.OperatorPlus, Operator.Addition }, { TokenType.OperatorMinus, Operator.Subtraction } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary MultiplicativeMap = new(new Dictionary + private static readonly ImmutableDictionary MultiplicativeMap = new Dictionary { { TokenType.OperatorAsterisk, Operator.Multiplication }, { TokenType.OperatorSlash, Operator.Division }, { TokenType.OperatorPercent, Operator.Remainder } - }); + }.ToImmutableDictionary(); - private static readonly ReadOnlyDictionary UnaryMap = new(new Dictionary + private static readonly ImmutableDictionary UnaryMap = new Dictionary { { TokenType.OperatorPlus, Operator.NumberPromotion }, { TokenType.OperatorMinus, Operator.ArithmeticNegation }, { TokenType.OperatorBang, Operator.LogicalNegation } - }); + }.ToImmutableDictionary(); - public static Operator MapComparisonOperator(TokenType tokenType) => ComparisonMap[tokenType]; // TODO: throws + public static TokenType[] ComparisonTokenTypes { get; } = ComparisonMap.Keys.ToArray(); + public static TokenType[] PatternMatchingComparisonTokenTypes { get; } = PatternMatchingComparisonMap.Keys.ToArray(); + public static TokenType[] AssignmentTokenTypes { get; } = AssignmentMap.Keys.ToArray(); + public static TokenType[] AdditiveTokenTypes { get; } = AdditiveMap.Keys.ToArray(); + public static TokenType[] MultiplicativeTokenTypes { get; } = MultiplicativeMap.Keys.ToArray(); + public static TokenType[] UnaryTokenTypes { get; } = UnaryMap.Keys.ToArray(); + + public static Operator MapComparisonOperator(TokenType tokenType) => ComparisonMap[tokenType]; public static Operator MapPatternMatchingComparisonOperator(TokenType tokenType) => - PatternMatchingComparisonMap[tokenType]; // TODO: throws - public static Operator MapAssignmentOperator(TokenType tokenType) => AssignmentMap[tokenType]; // TODO: throws - public static Operator MapAdditiveOperator(TokenType tokenType) => AdditiveMap[tokenType]; // TODO: throws - public static Operator MapMultiplicativeOperator(TokenType tokenType) => MultiplicativeMap[tokenType]; // TODO: throws - public static Operator MapUnaryOperator(TokenType tokenType) => UnaryMap[tokenType]; // TODO: throws + PatternMatchingComparisonMap[tokenType]; + public static Operator MapAssignmentOperator(TokenType tokenType) => AssignmentMap[tokenType]; + public static Operator MapAdditiveOperator(TokenType tokenType) => AdditiveMap[tokenType]; + public static Operator MapMultiplicativeOperator(TokenType tokenType) => MultiplicativeMap[tokenType]; + public static Operator MapUnaryOperator(TokenType tokenType) => UnaryMap[tokenType]; } diff --git a/Toffee/SyntacticAnalysis/Parser.Expressions.cs b/Toffee/SyntacticAnalysis/Parser.Expressions.cs index 3da38ce..1fd2e6b 100644 --- a/Toffee/SyntacticAnalysis/Parser.Expressions.cs +++ b/Toffee/SyntacticAnalysis/Parser.Expressions.cs @@ -4,74 +4,6 @@ namespace Toffee.SyntacticAnalysis; public partial class Parser { - private readonly TokenType[] _comparisonOperators = - { - TokenType.OperatorLess, - TokenType.OperatorLessEquals, - TokenType.OperatorGreater, - TokenType.OperatorGreaterEquals, - TokenType.OperatorEqualsEquals, - TokenType.OperatorBangEquals - }; - - private readonly TokenType[] _additiveOperator = - { - TokenType.OperatorMinus, - TokenType.OperatorPlus - }; - - private readonly TokenType[] _multiplicativeOperators = - { - TokenType.OperatorAsterisk, - TokenType.OperatorSlash, - TokenType.OperatorPercent - }; - - private readonly TokenType[] _unaryOperators = - { - TokenType.OperatorPlus, - TokenType.OperatorMinus, - TokenType.OperatorBang - }; - - private readonly TokenType[] _literalTokenTypes = - { - TokenType.LiteralInteger, - TokenType.LiteralFloat, - TokenType.LiteralString, - TokenType.KeywordTrue, - TokenType.KeywordFalse, - TokenType.KeywordNull - }; - - private readonly TokenType[] _typeTokenTypes = - { - TokenType.KeywordFloat, - TokenType.KeywordInt, - TokenType.KeywordBool, - TokenType.KeywordString, - TokenType.KeywordFunction, - TokenType.KeywordNull - }; - - private readonly TokenType[] _assignmentTokenTypes = - { - TokenType.OperatorEquals, - TokenType.OperatorPlusEquals, - TokenType.OperatorMinusEquals, - TokenType.OperatorAsteriskEquals, - TokenType.OperatorSlashEquals, - TokenType.OperatorPercentEquals - }; - - private readonly TokenType[] _castingTypeTokenTypes = - { - TokenType.KeywordInt, - TokenType.KeywordFloat, - TokenType.KeywordString, - TokenType.KeywordBool - }; - // expression // = block // | conditional_expression @@ -82,16 +14,21 @@ public partial class Parser // | assignment; private bool TryParseExpression(out Expression? parsedExpression) { - parsedExpression = null; - foreach (var parser in _expressionParsers) + var expressionParsers = new List { - var parserResult = parser(); - if (parserResult is null) - continue; - parsedExpression = parserResult; - return true; - } - return false; + ParseBlockExpression, + ParseConditionalExpression, + ParseForLoopExpression, + ParseWhileLoopExpression, + ParseFunctionDefinitionExpression, + ParsePatternMatchingExpression, + ParseAssignmentExpression + }; + + parsedExpression = expressionParsers + .Select(parser => parser()) + .FirstOrDefault(result => result is not null); + return parsedExpression is not null; } private Expression ParseExpression() @@ -102,29 +39,39 @@ private Expression ParseExpression() } // block - // = LEFT_BRACE, unterminated_statement, { SEMICOLON, { SEMICOLON }, unterminated_statement }, RIGHT_BRACE; + // = LEFT_BRACE, unterminated_statement, { SEMICOLON, [ unterminated_statement ] }, RIGHT_BRACE; private Expression? ParseBlockExpression() => SupplyPosition(() => { if (!TryConsumeToken(out _, TokenType.LeftBrace)) return null; - // TODO: check for missing braces var statementList = new List(); - var unterminatedStatement = (Statement?)null; + var resultStatement = (Statement?)null; while (TryParseStatement(out var parsedStatement)) { - if (!parsedStatement!.IsTerminated) + if (resultStatement is not null) { - unterminatedStatement = parsedStatement; - break; + if (!resultStatement.IsTerminated) + EmitError(new ExpectedSemicolon(parsedStatement!)); + statementList.Add(resultStatement); } - statementList.Add(parsedStatement); + resultStatement = parsedStatement; SkipSemicolons(); } + if (resultStatement is not null && resultStatement.IsTerminated) + { + statementList.Add(resultStatement); + resultStatement = null; + } + Expression? resultExpression = null; + if (resultStatement is ExpressionStatement resultExpressionStatement) + resultExpression = resultExpressionStatement.Expression; + else if (resultStatement is not null) + statementList.Add(resultStatement); - ConsumeToken(TokenType.RightBrace); + InterceptParserError(() => ConsumeToken(TokenType.RightBrace)); - return new BlockExpression(statementList, unterminatedStatement); + return new BlockExpression(statementList, resultExpression); }); // conditional_expression @@ -176,10 +123,9 @@ private bool TryMatchConditionalElsePart(out Expression? elsePart) // = LEFT_PARENTHESIS, expression, RIGHT_PARENTHESIS; private Expression ParseParenthesizedExpression() => SupplyPosition(() => { - // TODO: check for missing parentheses - ConsumeToken(TokenType.LeftParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.LeftParenthesis)); var expression = ParseExpression(); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); return expression; })!; @@ -204,15 +150,14 @@ private Expression ParseParenthesizedExpression() => SupplyPosition(() => /// private (string?, ForLoopRange) ParseForLoopSpecification() { - // TODO: check for missing parentheses - ConsumeToken(TokenType.LeftParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.LeftParenthesis)); var firstElement = ParseExpression(); var hasCounter = firstElement is IdentifierExpression && TryConsumeToken(out _, TokenType.Comma); var range = ParseForLoopRange(hasCounter ? null : firstElement); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); return (hasCounter ? (firstElement as IdentifierExpression)!.Name : null, range); } @@ -251,10 +196,9 @@ private ForLoopRange ParseForLoopRange(Expression? first) if (!TryConsumeToken(out _, TokenType.KeywordFuncti)) return null; - // TODO: check for missing parentheses - ConsumeToken(TokenType.LeftParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.LeftParenthesis)); var parameterList = ParseParameterList(); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); var body = ParseBlockExpression(); if (body is null) @@ -269,7 +213,9 @@ private List ParseParameterList() { var list = new List(); if (!TryParseParameter(out var firstParameter)) - return list; + return !TryConsumeToken(out var commaToken, TokenType.Comma) + ? list + : throw new ParserException(new ExpectedParameter(commaToken)); list.Add(firstParameter!); while (TryConsumeToken(out _, TokenType.Comma)) list.Add(ParseParameter()); @@ -307,9 +253,8 @@ private FunctionParameter ParseParameter() if (!TryConsumeToken(out _, TokenType.KeywordMatch)) return null; - // TODO: check for missing parentheses and braces var argument = ParseParenthesizedExpression(); - ConsumeToken(TokenType.LeftBrace); + InterceptParserError(() => ConsumeToken(TokenType.LeftBrace)); var patternSpecificationList = new List(); var defaultConsequent = (Expression?)null; @@ -323,7 +268,7 @@ private FunctionParameter ParseParameter() patternSpecificationList.Add(specification); } - ConsumeToken(TokenType.RightBrace); + InterceptParserError(() => ConsumeToken(TokenType.RightBrace)); return new PatternMatchingExpression(argument, patternSpecificationList, defaultConsequent); }); @@ -340,9 +285,9 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati if (!isDefault && (condition = ParseDisjunctionPatternExpression()) is null) return false; - ConsumeToken(TokenType.Colon); + InterceptParserError(() => ConsumeToken(TokenType.Colon)); var consequent = ParseExpression(); - ConsumeToken(TokenType.Semicolon); + InterceptParserError(() => ConsumeToken(TokenType.Semicolon)); specification = new PatternMatchingBranch(condition, consequent); return true; } @@ -392,31 +337,30 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati // | LEFT_PARENTHESIS, pattern_expression_disjunction, RIGHT_PARENTHESIS; private Expression? ParseNonAssociativePatternExpression() => SupplyPosition(() => { - if (TryConsumeToken(out var comparisonOperator, _comparisonOperators)) - if (TryConsumeToken(out var literal, _literalTokenTypes)) + if (TryConsumeToken(out var comparisonOperator, OperatorMapper.PatternMatchingComparisonTokenTypes)) + if (TryConsumeToken(out var literal, LiteralMapper.LiteralTokenTypes)) return new UnaryExpression(OperatorMapper.MapPatternMatchingComparisonOperator(comparisonOperator.Type), LiteralMapper.MapToLiteralExpression(literal)); else - throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, _literalTokenTypes)); + throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, LiteralMapper.LiteralTokenTypes)); if (TryConsumeToken(out _, TokenType.KeywordIs)) { var typeCheckOperator = TryConsumeToken(out _, TokenType.KeywordNot) ? Operator.PatternMatchingNotEqualTypeCheck : Operator.PatternMatchingEqualTypeCheck; - if (TryConsumeToken(out var type, _typeTokenTypes)) - return new UnaryExpression(typeCheckOperator, TypeMapper.MapToTypeExpression(type.Type)); - throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, _typeTokenTypes)); + if (TryConsumeToken(out var type, TypeMapper.TypeTokenTypes)) + return new UnaryExpression(typeCheckOperator, new TypeExpression(TypeMapper.MapToType(type.Type))); + throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, TypeMapper.TypeTokenTypes)); } if (!TryConsumeToken(out _, TokenType.LeftParenthesis)) return ParseAssignmentExpression(); - // TODO: check for missing parentheses var patternExpression = ParseDisjunctionPatternExpression(); if (patternExpression is null) throw new ParserException(new ExpectedPatternExpression(_lexer.CurrentToken)); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); return new GroupingExpression(patternExpression); }); @@ -428,7 +372,7 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati if (left is null) return null; - return TryConsumeToken(out var @operator, _assignmentTokenTypes) + return TryConsumeToken(out var @operator, OperatorMapper.AssignmentTokenTypes) ? new BinaryExpression(left, OperatorMapper.MapAssignmentOperator(@operator.Type), ParseExpression()) : left; }); @@ -518,9 +462,7 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati var typeCheckOperator = TryConsumeToken(out _, TokenType.KeywordNot) ? Operator.NotEqualTypeCheck : Operator.EqualTypeCheck; - var right = TypeMapper.MapToTypeExpression(ConsumeToken(_typeTokenTypes).Type); - if (right is null) - throw new ParserException(new ExpectedExpression(_lexer.CurrentToken)); + var right = new TypeExpression(TypeMapper.MapToType(ConsumeToken(TypeMapper.TypeTokenTypes).Type)); expression = new BinaryExpression(expression, typeCheckOperator, right); } return expression; @@ -534,7 +476,7 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati if (expression is null) return null; - while (TryConsumeToken(out var comparisonOperator, _comparisonOperators)) + while (TryConsumeToken(out var comparisonOperator, OperatorMapper.ComparisonTokenTypes)) { var right = ParseConcatenationExpression(); if (right is null) @@ -572,7 +514,7 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati if (expression is null) return null; - while (TryConsumeToken(out var additiveOperator, _additiveOperator)) + while (TryConsumeToken(out var additiveOperator, OperatorMapper.AdditiveTokenTypes)) { var right = ParseFactorExpression(); if (right is null) @@ -592,7 +534,7 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati if (expression is null) return null; - while (TryConsumeToken(out var multiplicativeOperator, _multiplicativeOperators)) + while (TryConsumeToken(out var multiplicativeOperator, OperatorMapper.MultiplicativeTokenTypes)) { var right = ParseUnaryPrefixedExpression(); if (right is null) @@ -609,51 +551,90 @@ private bool TryParsePatternSpecification(out PatternMatchingBranch? specificati // | exponentiation; private Expression? ParseUnaryPrefixedExpression() => SupplyPosition(() => { - if (!TryConsumeToken(out var unaryOperator, _unaryOperators)) - return ParseExponentiationExpression(); - var expression = ParseUnaryPrefixedExpression(); + var unaryOperators = new Stack(); + while (TryConsumeToken(out var unaryOperator, OperatorMapper.UnaryTokenTypes)) + unaryOperators.Push(unaryOperator); + + var expression = ParseExponentiationExpression(); if (expression is null) + { + if (unaryOperators.Count == 0) + return null; throw new ParserException(new ExpectedExpression(_lexer.CurrentToken)); + } - return new UnaryExpression(OperatorMapper.MapUnaryOperator(unaryOperator.Type), expression); + if (expression is LiteralExpression { Type: DataType.Integer, Value: > 9223372036854775807ul } badLiteral) + { + var isNegative = false; + var errorPosition = badLiteral.StartPosition; + if (unaryOperators.TryPeek(out var nearestOperator)) + { + errorPosition = new UnaryExpression(OperatorMapper.MapUnaryOperator(nearestOperator.Type), expression) + .StartPosition; + isNegative = nearestOperator.Type == TokenType.OperatorMinus; + } + if (badLiteral.Value is not 9223372036854775808ul || !isNegative) + EmitError(new IntegerOutOfRange(errorPosition, (ulong)badLiteral.Value, isNegative)); + } + + while (unaryOperators.TryPop(out var unaryOperator)) + expression = new UnaryExpression(OperatorMapper.MapUnaryOperator(unaryOperator.Type), expression); + return expression; }); // exponentiation // = namespace_access_or_function_call, { OP_CARET, exponentiation }; private Expression? ParseExponentiationExpression() => SupplyPosition(() => { + var components = new Stack(); var expression = ParseNamespaceAccessOrFunctionCallExpression(); if (expression is null) return null; - if (!TryConsumeToken(out _, TokenType.OperatorCaret)) - return expression; - var right = ParseExponentiationExpression(); - if (right is null) - throw new ParserException(new ExpectedExpression(_lexer.CurrentToken)); - return new BinaryExpression(expression, Operator.Exponentiation, right); + while (TryConsumeToken(out _, TokenType.OperatorCaret)) + { + if (expression is LiteralExpression { Type: DataType.Integer, Value: > 9223372036854775807ul } badLiteral) + EmitError(new IntegerOutOfRange(badLiteral)); + components.Push(expression); + expression = ParseNamespaceAccessOrFunctionCallExpression(); + if (expression is null) + throw new ParserException(new ExpectedExpression(_lexer.CurrentToken)); + } + + while (components.TryPop(out var left)) + expression = new BinaryExpression(left, Operator.Exponentiation, expression); + return expression; }); // namespace_access_or_function_call - // = namespace_access, { function_call_part } { OP_DOT, namespace_access, { function_call_part } }; + // = primary_expression, { function_call_part } { OP_DOT, primary_expression, { function_call_part } }; private Expression? ParseNamespaceAccessOrFunctionCallExpression() => SupplyPosition(() => { var expression = ParsePrimaryExpression(); if (expression is null) return null; + var lastNonFunctionElement = expression; while (TryParseFunctionCallPart(out var arguments)) expression = new FunctionCallExpression(expression, arguments!); + var lastElement = expression; + var consumedAnyDot = false; while (TryConsumeToken(out _, TokenType.OperatorDot)) { + consumedAnyDot = true; + if (lastElement is not IdentifierExpression) + EmitError(new ExpectedIdentifier(lastElement)); var right = ParsePrimaryExpression(); if (right is null) throw new ParserException(new ExpectedExpression(_lexer.CurrentToken)); + lastNonFunctionElement = lastElement = right; expression = new BinaryExpression(expression, Operator.NamespaceAccess, right); while (TryParseFunctionCallPart(out var arguments)) - expression = new FunctionCallExpression(expression, arguments!); + lastElement = expression = new FunctionCallExpression(expression, arguments!); } + if (consumedAnyDot && lastNonFunctionElement is not IdentifierExpression) + EmitError(new ExpectedIdentifier(lastNonFunctionElement)); return expression; }); @@ -664,9 +645,8 @@ private bool TryParseFunctionCallPart(out List? functionArguments) functionArguments = new List(); if (!TryConsumeToken(out _, TokenType.LeftParenthesis)) return false; - // TODO: check for missing parentheses functionArguments = ParseArgumentsList(); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); return true; } @@ -676,7 +656,9 @@ private List ParseArgumentsList() { var list = new List(); if (!TryParseExpression(out var firstArgument)) - return list; + return !TryConsumeToken(out var commaToken, TokenType.Comma) + ? list + : throw new ParserException(new ExpectedExpression(commaToken)); list.Add(firstArgument!); while (TryConsumeToken(out _, TokenType.Comma)) { @@ -692,18 +674,17 @@ private List ParseArgumentsList() // | [ CASTING_TYPE ], LEFT_PARENTHESIS, expression, RIGHT_PARENTHESIS; private Expression? ParsePrimaryExpression() => SupplyPosition(() => { - if (TryConsumeToken(out var literal, _literalTokenTypes)) + if (TryConsumeToken(out var literal, LiteralMapper.LiteralTokenTypes)) return LiteralMapper.MapToLiteralExpression(literal); if (TryConsumeToken(out var identifier, TokenType.Identifier)) return new IdentifierExpression((string)identifier.Content!); - if (TryConsumeToken(out var castingType, _castingTypeTokenTypes)) + if (TryConsumeToken(out var castingType, TypeMapper.CastingTypeTokenTypes)) return new TypeCastExpression(TypeMapper.MapToCastingType(castingType.Type), ParseParenthesizedExpression()); if (!TryConsumeToken(out _, TokenType.LeftParenthesis)) return null; - // TODO: check for missing parentheses var expression = ParseExpression(); - ConsumeToken(TokenType.RightParenthesis); + InterceptParserError(() => ConsumeToken(TokenType.RightParenthesis)); return new GroupingExpression(expression); }); @@ -711,6 +692,8 @@ private List ParseArgumentsList() { var expressionPosition = _lexer.CurrentToken.StartPosition; var expression = parseMethod(); - return expression is null ? null : expression with { Position = expressionPosition }; + return expression is null + ? null + : expression with { StartPosition = expressionPosition, EndPosition = _lastTokenEndPosition }; } } diff --git a/Toffee/SyntacticAnalysis/Parser.Statements.cs b/Toffee/SyntacticAnalysis/Parser.Statements.cs index d03bd09..57b113d 100644 --- a/Toffee/SyntacticAnalysis/Parser.Statements.cs +++ b/Toffee/SyntacticAnalysis/Parser.Statements.cs @@ -17,6 +17,13 @@ private bool TryParseStatement(out Statement? parsedStatement) return true; } + private Statement ParseStatement() + { + if (TryParseStatement(out var parsedStatement)) + return parsedStatement!; + throw new ParserException(new ExpectedStatement(_lexer.CurrentToken)); + } + // unterminated_statement // = namespace_import // | variable_initialization_list @@ -26,16 +33,20 @@ private bool TryParseStatement(out Statement? parsedStatement) // | expression; private bool TryParseUnterminatedStatement(out Statement? parsedStatement) { - parsedStatement = null; - foreach (var parser in _statementParsers) + var statementParsers = new List { - var parserResult = parser(); - if (parserResult is null) - continue; - parsedStatement = parserResult; - return true; - } - return false; + ParseNamespaceImportStatement, + ParseVariableInitializationListStatement, + ParseBreakStatement, + ParseBreakIfStatement, + ParseReturnStatement, + ParseExpressionStatement + }; + + parsedStatement = statementParsers + .Select(parser => parser()) + .FirstOrDefault(result => result is not null); + return parsedStatement is not null; } // namespace_import @@ -51,8 +62,9 @@ private bool TryParseUnterminatedStatement(out Statement? parsedStatement) var firstIdentifier = ConsumeToken(TokenType.Identifier); list.Add(new IdentifierExpression((string)firstIdentifier.Content!)); - while (TryConsumeToken(out _, TokenType.OperatorDot)) + while (!TryEnsureToken(TokenType.Semicolon)) { + ConsumeToken(TokenType.OperatorDot); var nextIdentifier = ConsumeToken(TokenType.Identifier); list.Add(new IdentifierExpression((string)nextIdentifier.Content!)); } @@ -82,16 +94,25 @@ private bool TryParseUnterminatedStatement(out Statement? parsedStatement) private VariableInitialization ParseVariableInitialization() { var isConst = TryConsumeToken(out _, TokenType.KeywordConst); + var assignmentLikeTokenTypes = + OperatorMapper.AssignmentTokenTypes.Append(TokenType.OperatorEqualsEquals).ToArray(); + var tokenTypesAllowedAfterIdentifier = + assignmentLikeTokenTypes.Append(TokenType.Comma).Append(TokenType.Semicolon).ToArray(); if (!TryConsumeToken(out var identifier, TokenType.Identifier)) throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, isConst ? new[] { TokenType.Identifier } : new[] { TokenType.KeywordConst, TokenType.Identifier })); var variableName = (string)identifier.Content!; - // TODO: check for other operators containing =, eg. ==, += - return TryConsumeToken(out _, TokenType.OperatorEquals) - ? new VariableInitialization(variableName, ParseExpression(), isConst) - : new VariableInitialization(variableName, null, isConst); + EnsureToken(tokenTypesAllowedAfterIdentifier); + + if (!TryConsumeToken(out var assignmentToken, assignmentLikeTokenTypes)) + return new VariableInitialization(variableName, null, isConst); + var initialValue = ParseExpression(); + if (assignmentToken.Type == TokenType.OperatorEquals) + return new VariableInitialization(variableName, initialValue, isConst); + EmitError(new UnexpectedToken(assignmentToken, TokenType.OperatorEquals)); + return new VariableInitialization(variableName, initialValue, isConst); } // break @@ -129,6 +150,8 @@ private VariableInitialization ParseVariableInitialization() { var statementPosition = _lexer.CurrentToken.StartPosition; var statement = parseMethod(); - return statement is null ? null : statement with { Position = statementPosition }; + return statement is null + ? null + : statement with { StartPosition = statementPosition, EndPosition = _lastTokenEndPosition }; } } diff --git a/Toffee/SyntacticAnalysis/Parser.cs b/Toffee/SyntacticAnalysis/Parser.cs index dcba380..f4295f2 100644 --- a/Toffee/SyntacticAnalysis/Parser.cs +++ b/Toffee/SyntacticAnalysis/Parser.cs @@ -1,5 +1,6 @@ using Toffee.ErrorHandling; using Toffee.LexicalAnalysis; +using Toffee.Scanning; namespace Toffee.SyntacticAnalysis; @@ -8,41 +9,17 @@ public partial class Parser : IParser private readonly ILexer _lexer; private readonly IParserErrorHandler? _errorHandler; + private Position _lastTokenEndPosition = new(); + public Statement? CurrentStatement { get; private set; } private delegate Statement? ParseStatementDelegate(); - private readonly List _statementParsers; - private delegate Expression? ParseExpressionDelegate(); - private readonly List _expressionParsers; public Parser(ILexer lexer, IParserErrorHandler? errorHandler = null) { _lexer = new CommentSkippingLexer(lexer); _errorHandler = errorHandler; - - _statementParsers = new List - { - ParseNamespaceImportStatement, - ParseVariableInitializationListStatement, - ParseBreakStatement, - ParseBreakIfStatement, - ParseReturnStatement, - ParseExpressionStatement - }; - - _expressionParsers = new List - { - ParseBlockExpression, - ParseConditionalExpression, - ParseForLoopExpression, - ParseWhileLoopExpression, - ParseFunctionDefinitionExpression, - ParsePatternMatchingExpression, - ParseAssignmentExpression - }; - - Advance(); } private bool TryEnsureToken(params TokenType[] expectedType) => @@ -50,54 +27,88 @@ private bool TryEnsureToken(params TokenType[] expectedType) => private void EnsureToken(params TokenType[] expectedType) { - if (!expectedType.Contains(_lexer.CurrentToken.Type)) + if (!TryEnsureToken(expectedType)) throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, expectedType)); } private bool TryConsumeToken(out Token matchedToken, params TokenType[] expectedType) { matchedToken = new Token(TokenType.Unknown); - if (!expectedType.Contains(_lexer.CurrentToken.Type)) + if (!TryEnsureToken(expectedType)) return false; matchedToken = _lexer.Advance(); + _lastTokenEndPosition = matchedToken.EndPosition; return true; } private Token ConsumeToken(params TokenType[] expectedType) { - if (!expectedType.Contains(_lexer.CurrentToken.Type)) + if (!TryConsumeToken(out var matchedToken, expectedType)) throw new ParserException(new UnexpectedToken(_lexer.CurrentToken, expectedType)); - return _lexer.Advance(); + return matchedToken; } - private void EmitError(ParserError error) + private void EmitError(ParserError error) => _errorHandler?.Handle(error); + + // TODO: actually use the method + private void EmitWarning(ParserWarning warning) => _errorHandler?.Handle(warning); + + private void InterceptParserError(Action throwingAction) { - _errorHandler?.Handle(error); + try + { + throwingAction(); + } + catch (ParserException e) + { + EmitError(e.Error); + } } - private void EmitWarning(ParserWarning warning) + private T? InterceptParserError(Func throwingFunc) { - _errorHandler?.Handle(warning); + try + { + return throwingFunc(); + } + catch (ParserException e) + { + EmitError(e.Error); + } + return default; } private void SkipSemicolons() { while (_lexer.CurrentToken.Type == TokenType.Semicolon) - _lexer.Advance(); + _lastTokenEndPosition = _lexer.Advance().EndPosition; + } + + private void SkipUntilNextStatement() + { + while (_lexer.CurrentToken.Type is not (TokenType.Semicolon or TokenType.EndOfText)) + _lastTokenEndPosition = _lexer.Advance().EndPosition; } public Statement? Advance() { - var supersededStatement = CurrentStatement; SkipSemicolons(); - if (_lexer.CurrentToken.Type == TokenType.EndOfText) - CurrentStatement = null; - else if (TryParseStatement(out var parsedStatement)) - CurrentStatement = parsedStatement; - else - throw new ParserException(new ExpectedStatement(_lexer.CurrentToken)); + return CurrentStatement = null; + + Statement? parsedStatement; + while ((parsedStatement = InterceptParserError(ParseStatement)) is null) + { + SkipUntilNextStatement(); + SkipSemicolons(); + if (_lexer.CurrentToken.Type == TokenType.EndOfText) + return CurrentStatement = null; + } + + CurrentStatement = parsedStatement; + if (!CurrentStatement.IsTerminated) + EmitError(new ExpectedSemicolon(_lexer.CurrentToken)); - return supersededStatement; + return CurrentStatement; } } diff --git a/Toffee/SyntacticAnalysis/ParserErrors.cs b/Toffee/SyntacticAnalysis/ParserErrors.cs index 373425e..92114e9 100644 --- a/Toffee/SyntacticAnalysis/ParserErrors.cs +++ b/Toffee/SyntacticAnalysis/ParserErrors.cs @@ -1,4 +1,4 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.ErrorHandling; using Toffee.LexicalAnalysis; using Toffee.Scanning; @@ -7,19 +7,76 @@ namespace Toffee.SyntacticAnalysis; public abstract record ParserError(Position Position) : Error(Position); -public record UnexpectedToken(Token ActualToken, params TokenType[] ExpectedType) : ParserError(ActualToken.StartPosition); -public record ExpectedStatement(Token ActualToken) : ParserError(ActualToken.StartPosition); -public record ExpectedExpression(Token ActualToken) : ParserError(ActualToken.StartPosition); -public record ExpectedBlockExpression(Token ActualToken) : ParserError(ActualToken.StartPosition); -public record ExpectedPatternExpression(Token ActualToken) : ParserError(ActualToken.StartPosition); +public record UnexpectedToken(Position Position, TokenType ActualType, params TokenType[] ExpectedType) + : ParserError(Position) +{ + public UnexpectedToken(Token actualToken, params TokenType[] expectedType) + : this(actualToken.StartPosition, actualToken.Type, expectedType) + { } +} +public record ExpectedStatement(Position Position, TokenType ActualType) : ParserError(Position) +{ + public ExpectedStatement(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } +} +public record ExpectedExpression(Position Position, TokenType ActualType) : ParserError(Position) +{ + public ExpectedExpression(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } +} +public record ExpectedBlockExpression(Position Position, TokenType ActualType) : ParserError(Position) +{ + public ExpectedBlockExpression(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } +} +public record ExpectedPatternExpression(Position Position, TokenType ActualType) : ParserError(Position) +{ + public ExpectedPatternExpression(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } +} +public record ExpectedSemicolon(Position Position, TokenType? ActualTokenType = null, Type? ActualType = null) + : ParserError(Position) +{ + public ExpectedSemicolon(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } + + public ExpectedSemicolon(Statement actualStatement) + : this(actualStatement.StartPosition, null, actualStatement.GetType()) + { } +} +public record ExpectedIdentifier(Position Position, Type ActualType) : ParserError(Position) +{ + public ExpectedIdentifier(Expression actualExpression) + : this(actualExpression.StartPosition, actualExpression.GetType()) + { } +} +public record IntegerOutOfRange(Position Position, ulong Value, bool Negative = false) : ParserError(Position) +{ + public IntegerOutOfRange(LiteralExpression actualExpression) + : this(actualExpression.StartPosition, (ulong)actualExpression.Value!) + { } +} +public record ExpectedParameter(Position Position, TokenType ActualType) : ParserError(Position) +{ + public ExpectedParameter(Token actualToken) : this(actualToken.StartPosition, actualToken.Type) + { } +} public static class ParserErrorExtensions { - private static readonly ReadOnlyDictionary MessageMap = new(new Dictionary + private static readonly ImmutableDictionary MessageMap = new Dictionary { - { typeof(UnexpectedToken), "Unexpected token" } - }); + { typeof(UnexpectedToken), "Unexpected token" }, + { typeof(ExpectedStatement), "Unexpected token instead of a statement" }, + { typeof(ExpectedExpression), "Unexpected token instead of an expression" }, + { typeof(ExpectedBlockExpression), "Unexpected token instead of a block expression" }, + { typeof(ExpectedPatternExpression), "Unexpected token instead of a pattern expression" }, + { typeof(ExpectedSemicolon), "Expected terminating semicolon" }, + { typeof(ExpectedIdentifier), "Expected only identifiers in namespace path" }, + { typeof(IntegerOutOfRange), "Literal integer above maximum (positive) value or below minimum (negative) value" }, + { typeof(ExpectedParameter), "Expected parameter in parameter list" } + }.ToImmutableDictionary(); public static string ToMessage(this ParserError error) => - MessageMap.GetValueOrDefault(error.GetType(), "Lexical error"); + MessageMap.GetValueOrDefault(error.GetType(), "Syntax error"); } diff --git a/Toffee/SyntacticAnalysis/ParserException.cs b/Toffee/SyntacticAnalysis/ParserException.cs index 028856d..2a8ff23 100644 --- a/Toffee/SyntacticAnalysis/ParserException.cs +++ b/Toffee/SyntacticAnalysis/ParserException.cs @@ -2,14 +2,14 @@ public class ParserException : Exception { - private readonly ParserError _error; + public ParserError Error { get; } public ParserException(ParserError error) => - _error = error; + Error = error; public ParserException(ParserError error, string message) : base(message) => - _error = error; + Error = error; public ParserException(ParserError error, string message, Exception inner) : base(message, inner) => - _error = error; + Error = error; } diff --git a/Toffee/SyntacticAnalysis/ParserWarnings.cs b/Toffee/SyntacticAnalysis/ParserWarnings.cs index 712a139..3a37aa5 100644 --- a/Toffee/SyntacticAnalysis/ParserWarnings.cs +++ b/Toffee/SyntacticAnalysis/ParserWarnings.cs @@ -1,4 +1,4 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.ErrorHandling; using Toffee.Scanning; @@ -8,8 +8,8 @@ public abstract record ParserWarning(Position Position) : Warning(Position); public static class ParserWarningExtensions { - private static readonly ReadOnlyDictionary MessageMap = new(new Dictionary()); + private static readonly ImmutableDictionary MessageMap = ImmutableDictionary.Empty; public static string ToMessage(this ParserWarning warning) => - MessageMap.GetValueOrDefault(warning.GetType(), "Lexical warning"); + MessageMap.GetValueOrDefault(warning.GetType(), "Syntax warning"); } diff --git a/Toffee/SyntacticAnalysis/Statement.cs b/Toffee/SyntacticAnalysis/Statement.cs index 4212609..2d650f3 100644 --- a/Toffee/SyntacticAnalysis/Statement.cs +++ b/Toffee/SyntacticAnalysis/Statement.cs @@ -2,13 +2,20 @@ namespace Toffee.SyntacticAnalysis; -public abstract record Statement(Position Position, bool IsTerminated = false); +public abstract record Statement(Position StartPosition, Position EndPosition, bool IsTerminated = false); -public record NamespaceImportStatement(IList NamespaceLevels) : Statement(new Position()); -public record VariableInitializationListStatement(IList Items) : Statement(new Position()); -public record BreakStatement() : Statement(new Position()); -public record BreakIfStatement(Expression Condition) : Statement(new Position()); -public record ReturnStatement(Expression? Value = null) : Statement(new Position()); -public record ExpressionStatement(Expression Expression) : Statement(new Position()); +public record NamespaceImportStatement(IList NamespaceLevels) + : Statement(new Position(), new Position()); +public record VariableInitializationListStatement(IList Items) + : Statement(new Position(), new Position()); +public record BreakStatement() + : Statement(new Position(), new Position()); +public record BreakIfStatement(Expression Condition) + : Statement(new Position(), new Position()); +public record ReturnStatement(Expression? Value = null) + : Statement(new Position(), new Position()); +public record ExpressionStatement(Expression Expression) + : Statement(new Position(), new Position()); -public record VariableInitialization(string Name, Expression? InitialValue = null, bool IsConst = false) : Statement(new Position()); +public record VariableInitialization(string Name, Expression? InitialValue = null, bool IsConst = false) + : Statement(new Position(), new Position()); diff --git a/Toffee/SyntacticAnalysis/TypeMapper.cs b/Toffee/SyntacticAnalysis/TypeMapper.cs index d69a6f5..ad94442 100644 --- a/Toffee/SyntacticAnalysis/TypeMapper.cs +++ b/Toffee/SyntacticAnalysis/TypeMapper.cs @@ -1,20 +1,27 @@ -using System.Collections.ObjectModel; +using System.Collections.Immutable; using Toffee.LexicalAnalysis; namespace Toffee.SyntacticAnalysis; public static class TypeMapper { - private static readonly ReadOnlyDictionary TypeMap = new(new Dictionary + private static readonly ImmutableDictionary CastingTypeMap = new Dictionary { { TokenType.KeywordInt, DataType.Integer }, { TokenType.KeywordFloat, DataType.Float }, { TokenType.KeywordString, DataType.String }, - { TokenType.KeywordBool, DataType.Bool }, - { TokenType.KeywordFunction, DataType.Function}, - { TokenType.KeywordNull, DataType.Null } + { TokenType.KeywordBool, DataType.Bool } + }.ToImmutableDictionary(); + + private static readonly ImmutableDictionary TypeMap = CastingTypeMap.AddRange(new [] + { + KeyValuePair.Create(TokenType.KeywordFunction, DataType.Function), + KeyValuePair.Create(TokenType.KeywordNull, DataType.Null) }); - public static TypeExpression MapToTypeExpression(TokenType type) => new(TypeMap[type]); // TODO: throws - public static DataType MapToCastingType(TokenType type) => TypeMap[type]; // TODO: throws + public static TokenType[] CastingTypeTokenTypes { get; } = CastingTypeMap.Keys.ToArray(); + public static TokenType[] TypeTokenTypes { get; } = TypeMap.Keys.ToArray(); + + public static DataType MapToType(TokenType type) => TypeMap[type]; + public static DataType MapToCastingType(TokenType type) => CastingTypeMap[type]; } diff --git a/docs/gramatyka.md b/docs/gramatyka.md index d5b480c..e51dc78 100644 --- a/docs/gramatyka.md +++ b/docs/gramatyka.md @@ -35,7 +35,7 @@ expression | pattern_matching; | assignment; block - = LEFT_BRACE, unterminated_statement, { SEMICOLON, { SEMICOLON }, unterminated_statement }, RIGHT_BRACE + = LEFT_BRACE, [ unterminated_statement, { SEMICOLON, [ unterminated_statement ] } ], RIGHT_BRACE; conditional_expression = conditional_if_part, { conditional_elif_part }, [ conditional_else_part ]; conditional_if_part @@ -103,7 +103,7 @@ unary_prefixed exponentiation = namespace_access_or_function_call, { OP_CARET, exponentiation }; namespace_access_or_function_call - = namespace_access, { function_call_part } { OP_DOT, namespace_access, { function_call_part } }; + = primary_expression, { function_call_part } { OP_DOT, primary_expression, { function_call_part } }; function_call_part = LEFT_PARENTHESIS, arguments_list, RIGHT_PARENTHESIS; arguments_list