Skip to content

Commit

Permalink
Poprawki do etapu 4 (Analizator składniowy)
Browse files Browse the repository at this point in the history
  • Loading branch information
Dawid Sygocki committed Jun 4, 2022
1 parent da9faa7 commit e8d8676
Show file tree
Hide file tree
Showing 82 changed files with 6,173 additions and 3,044 deletions.
17 changes: 17 additions & 0 deletions Toffee.Tests/LexicalAnalysis/LexerErrorHandlerMock.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
using System.Collections.Generic;
using Toffee.ErrorHandling;
using Toffee.LexicalAnalysis;

namespace Toffee.Tests.LexicalAnalysis;

public class LexerErrorHandlerMock : ILexerErrorHandler
{
public List<LexerError> HandledErrors = new();
public List<LexerWarning> HandledWarnings = new();

public bool HadErrors => HandledErrors.Count > 0;
public bool HadWarnings => HandledWarnings.Count > 0;

public void Handle(LexerError lexerError) => HandledErrors.Add(lexerError);
public void Handle(LexerWarning lexerWarning) => HandledWarnings.Add(lexerWarning);
}
69 changes: 69 additions & 0 deletions Toffee.Tests/LexicalAnalysis/LexerTests.KeywordsAndIdentifiers.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
using Toffee.LexicalAnalysis;
using Xunit;

namespace Toffee.Tests.LexicalAnalysis;

public partial class LexerTests
{
[Trait("Category", "Keywords")]
[Theory]
[InlineData("int", TokenType.KeywordInt)]
[InlineData("float", TokenType.KeywordFloat)]
[InlineData("string", TokenType.KeywordString)]
[InlineData("bool", TokenType.KeywordBool)]
[InlineData("function", TokenType.KeywordFunction)]
[InlineData("null", TokenType.KeywordNull)]
[InlineData("init", TokenType.KeywordInit)]
[InlineData("const", TokenType.KeywordConst)]
[InlineData("pull", TokenType.KeywordPull)]
[InlineData("if", TokenType.KeywordIf)]
[InlineData("elif", TokenType.KeywordElif)]
[InlineData("else", TokenType.KeywordElse)]
[InlineData("while", TokenType.KeywordWhile)]
[InlineData("for", TokenType.KeywordFor)]
[InlineData("break", TokenType.KeywordBreak)]
[InlineData("break_if", TokenType.KeywordBreakIf)]
[InlineData("functi", TokenType.KeywordFuncti)]
[InlineData("return", TokenType.KeywordReturn)]
[InlineData("match", TokenType.KeywordMatch)]
[InlineData("and", TokenType.KeywordAnd)]
[InlineData("or", TokenType.KeywordOr)]
[InlineData("is", TokenType.KeywordIs)]
[InlineData("not", TokenType.KeywordNot)]
[InlineData("default", TokenType.KeywordDefault)]
[InlineData("false", TokenType.KeywordFalse)]
[InlineData("true", TokenType.KeywordTrue)]
public void KeywordsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(expectedTokenType, lexer.CurrentToken.Type);

Assert.False(errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Identifiers")]
[Theory]
[InlineData("integer")]
[InlineData("INIT")]
[InlineData("constantinople")]
[InlineData("ppull")]
[InlineData("iff")]
[InlineData("and2")]
[InlineData("defaul")]
public void IdentifiersBasedOnKeywordsShouldBeRecognizedCorrectly(string input)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.Identifier, lexer.CurrentToken.Type);
Assert.Equal(input, lexer.CurrentToken.Content);

Assert.False(errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}
}
148 changes: 148 additions & 0 deletions Toffee.Tests/LexicalAnalysis/LexerTests.Numbers.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
using Toffee.LexicalAnalysis;
using Toffee.Scanning;
using Xunit;

namespace Toffee.Tests.LexicalAnalysis;

public partial class LexerTests
{
[Trait("Category", "Numbers")]
[Theory]
[InlineData("1", 1ul)]
[InlineData("0", 0ul)]
[InlineData("9223372036854775807", 9223372036854775807ul)]
[InlineData("9223372036854775808", 9223372036854775808ul)]
[InlineData("18446744073709551615", 18446744073709551615ul)]
[InlineData("0000001", 1ul)]
[InlineData("01", 1ul)]
[InlineData("0x1", 1ul)]
[InlineData("0x001", 1ul)]
[InlineData("0xabCD", 43981ul)]
[InlineData("0c1", 1ul)]
[InlineData("0c001", 1ul)]
[InlineData("0c741", 481ul)]
[InlineData("0b1", 1ul)]
[InlineData("0b0001", 1ul)]
[InlineData("0b1011", 11ul)]
public void IntegersShouldBeRecognizedCorrectly(string input, ulong expectedContent)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);

Assert.False(errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Numbers")]
[Theory]
[InlineData("1.", 1.0)]
[InlineData("0.", 0.0)]
[InlineData("1.2345", 1.2345)]
[InlineData("000000.1", 0.1)]
[InlineData("1.7976931348623157E+308", 1.7976931348623157e308)]
[InlineData("2.2e1", 22.0)]
[InlineData("2.2e-1", 0.22)]
[InlineData("2.2e+1", 22.0)]
[InlineData("002.e1", 20.0)]
[InlineData("0.0e0", 0.0)]
[InlineData("2.E-0", 2.0)]
public void FloatsShouldBeRecognizedCorrectly(string input, double expectedContent)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);

Assert.False(errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Numbers")]
[Theory]
[InlineData("18446744073709551616", TokenType.LiteralInteger, 1844674407370955161ul, 19u)]
[InlineData("10.99999999999999999999", TokenType.LiteralFloat, 10.9999999999999999999, 22u)]
[InlineData("3.14e99999999999999999999", TokenType.LiteralFloat, double.PositiveInfinity, 24u)]
public void NumberLiteralOverflowShouldBeDetectedProperly(string input, TokenType expectedTokenType,
object expectedContent, uint expectedOffset)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(expectedTokenType, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);
Assert.Equal(typeof(NumberLiteralTooLarge), lexer.CurrentError?.GetType());
Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position);

Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Numbers")]
[Theory]
[InlineData("0x", 'x', 0ul, 2u)]
[InlineData("0xx", 'x', 0ul, 2u)]
[InlineData("0c", 'c', 0ul, 2u)]
[InlineData("0b", 'b', 0ul, 2u)]
public void MissingNonDecimalDigitsShouldBeDetectedProperly(string input, char prefix, object expectedContent,
uint expectedOffset)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);
Assert.Equal(typeof(MissingNonDecimalDigits), lexer.CurrentError?.GetType());
Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position);
Assert.Equal(prefix, (lexer.CurrentError as MissingNonDecimalDigits)!.NonDecimalPrefix);

Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Numbers")]
[Theory]
[InlineData("0a", 'a', 0ul, 1u)]
[InlineData("0z", 'z', 0ul, 1u)]
[InlineData("0u", 'u', 0ul, 1u)]
public void InvalidNonDecimalPrefixesShouldBeDetectedProperly(string input, char prefix, object expectedContent,
uint expectedOffset)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.LiteralInteger, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);
Assert.Equal(typeof(InvalidNonDecimalPrefix), lexer.CurrentError?.GetType());
Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position);
Assert.Equal(prefix, (lexer.CurrentError as InvalidNonDecimalPrefix)!.NonDecimalPrefix);

Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Numbers")]
[Theory]
[InlineData("12.e", 12.0, 4u)]
[InlineData("1234.5678e+", 1234.5678, 11u)]
[InlineData("0.5e--", 0.5, 5u)]
[InlineData("789.ee", 789.0, 5u)]
public void MissingExponentShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.LiteralFloat, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);
Assert.Equal(typeof(MissingExponent), lexer.CurrentError?.GetType());
Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position);

Assert.False(errorHandlerMock.HadWarnings);
}
}
120 changes: 120 additions & 0 deletions Toffee.Tests/LexicalAnalysis/LexerTests.OperatorsAndComments.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
using Toffee.LexicalAnalysis;
using Toffee.Scanning;
using Xunit;

namespace Toffee.Tests.LexicalAnalysis;

public partial class LexerTests
{
[Trait("Category", "Operators")]
[Theory]
[InlineData(".", TokenType.OperatorDot)]
[InlineData("^", TokenType.OperatorCaret)]
[InlineData("+", TokenType.OperatorPlus)]
[InlineData("-", TokenType.OperatorMinus)]
[InlineData("!", TokenType.OperatorBang)]
[InlineData("*", TokenType.OperatorAsterisk)]
[InlineData("/", TokenType.OperatorSlash)]
[InlineData("%", TokenType.OperatorPercent)]
[InlineData("..", TokenType.OperatorDotDot)]
[InlineData("<", TokenType.OperatorLess)]
[InlineData("<=", TokenType.OperatorLessEquals)]
[InlineData(">", TokenType.OperatorGreater)]
[InlineData(">=", TokenType.OperatorGreaterEquals)]
[InlineData("==", TokenType.OperatorEqualsEquals)]
[InlineData("!=", TokenType.OperatorBangEquals)]
[InlineData("&&", TokenType.OperatorAndAnd)]
[InlineData("||", TokenType.OperatorOrOr)]
[InlineData("??", TokenType.OperatorQueryQuery)]
[InlineData("?>", TokenType.OperatorQueryGreater)]
[InlineData("=", TokenType.OperatorEquals)]
[InlineData("+=", TokenType.OperatorPlusEquals)]
[InlineData("-=", TokenType.OperatorMinusEquals)]
[InlineData("*=", TokenType.OperatorAsteriskEquals)]
[InlineData("/=", TokenType.OperatorSlashEquals)]
[InlineData("%=", TokenType.OperatorPercentEquals)]
[InlineData("(", TokenType.LeftParenthesis)]
[InlineData(")", TokenType.RightParenthesis)]
[InlineData("{", TokenType.LeftBrace)]
[InlineData("}", TokenType.RightBrace)]
[InlineData(",", TokenType.Comma)]
[InlineData(":", TokenType.Colon)]
[InlineData(";", TokenType.Semicolon)]
public void OperatorsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(expectedTokenType, lexer.CurrentToken.Type);

Assert.False(errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Comments")]
[Theory]
[InlineData("//", TokenType.LineComment, false)]
[InlineData("/*", TokenType.BlockComment, true)]
[InlineData("/**/", TokenType.BlockComment, false)]
public void CommentsShouldBeRecognizedCorrectly(string input, TokenType expectedTokenType, bool shouldProduceError)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(expectedTokenType, lexer.CurrentToken.Type);

Assert.False(shouldProduceError ^ errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Comments")]
[Theory]
[InlineData("//", false, "", false)]
[InlineData("// ", false, " ", false)]
[InlineData("// example content", false, " example content", false)]
[InlineData("// example\nmultiline\ncontent", false, " example", false)]
[InlineData("///**/", false, "/**/", false)]
[InlineData("/*", true, "", true)]
[InlineData("/**/", true, "", false)]
[InlineData("/* */", true, " ", false)]
[InlineData("/* example content */", true, " example content ", false)]
[InlineData("/* example\nmultiline\ncontent */", true, " example\nmultiline\ncontent ", false)]
[InlineData("/*///* /**/", true, "///* /*", false)]
public void ContentOfCommentsShouldBePreservedProperly(string input, bool isBlock, string expectedContent, bool shouldProduceError)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(isBlock ? TokenType.BlockComment : TokenType.LineComment, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);

Assert.False(shouldProduceError ^ errorHandlerMock.HadErrors);
Assert.False(errorHandlerMock.HadWarnings);
}

[Trait("Category", "Operators")]
[Theory]
[InlineData("`", "`", 0u)]
[InlineData("🐲", "🐲", 0u)]
[InlineData("\a", "\a", 0u)]
[InlineData("?", "?", 0u)]
[InlineData("&", "&", 0u)]
[InlineData("|", "|", 0u)]
public void UnknownTokensShouldBeDetectedProperly(string input, object expectedContent, uint expectedOffset)
{
var scannerMock = new ScannerMock(input);
var errorHandlerMock = new LexerErrorHandlerMock();
ILexer lexer = new Lexer(scannerMock, errorHandlerMock);

Assert.Equal(TokenType.Unknown, lexer.CurrentToken.Type);
Assert.Equal(expectedContent, lexer.CurrentToken.Content);
Assert.Equal(typeof(UnknownToken), lexer.CurrentError?.GetType());
Assert.Equal(new Position(expectedOffset, 1, expectedOffset), lexer.CurrentError!.Position);
Assert.Equal(input, (lexer.CurrentError as UnknownToken)!.Content);

Assert.False(errorHandlerMock.HadWarnings);
}
}
Loading

0 comments on commit e8d8676

Please sign in to comment.