Skip to content

Commit

Permalink
feat: add token and lexer
Browse files Browse the repository at this point in the history
  • Loading branch information
Aden-Q committed Apr 27, 2024
1 parent d4b1dc1 commit f907741
Show file tree
Hide file tree
Showing 2 changed files with 117 additions and 0 deletions.
54 changes: 54 additions & 0 deletions internal/lexer/lexer.go
Original file line number Diff line number Diff line change
@@ -1 +1,55 @@
package lexer

import (
"github.com/Aden-Q/monkey/internal/token"
)

type Lexer struct {
input string
position uint32 // current position index in input
}

func New(input string) *Lexer {
return &Lexer{
input: input,
}
}

func (l *Lexer) NextToken() (*token.Token, bool) {
if !l.hasNext() {
return nil, false
}

var tok *token.Token
ok := true

switch l.input[l.position] {
case '=':
tok = token.New(token.ASSIGN, "=")
case '+':
tok = token.New(token.PLUS, "+")
case ',':
tok = token.New(token.COMMA, ",")
case ';':
tok = token.New(token.SEMICOLON, ";")
case '(':
tok = token.New(token.LPAREN, "(")
case ')':
tok = token.New(token.RPAREN, ")")
case '{':
tok = token.New(token.LBRACE, "{")
case '}':
tok = token.New(token.RBRACE, "}")
default:
tok = token.New(token.ILLEGAL, "ILLEGAL")
ok = false
}

l.position++

return tok, ok
}

func (l *Lexer) hasNext() bool {
return l.position < uint32(len(l.input))
}
63 changes: 63 additions & 0 deletions internal/lexer/lexer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package lexer_test

import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"

"github.com/Aden-Q/monkey/internal/lexer"
"github.com/Aden-Q/monkey/internal/token"
)

var _ = Describe("Lexer", func() {

Describe("NextToken test", func() {
Context("valid input", func() {
It("should equal", func() {
input := `=+(){},;`
expected_tokens := []*token.Token{
{
Type: token.ASSIGN,
Literal: "=",
},
{
Type: token.PLUS,
Literal: "+",
},
{
Type: token.LPAREN,
Literal: "(",
},
{
Type: token.RPAREN,
Literal: ")",
},
{
Type: token.LBRACE,
Literal: "{",
},
{
Type: token.RBRACE,
Literal: "}",
},
{
Type: token.COMMA,
Literal: ",",
},
{
Type: token.SEMICOLON,
Literal: ";",
},
}

l := lexer.New(input)

for _, expected_token := range expected_tokens {
token, ok := l.NextToken()
Expect(ok).To(Equal(true))
Expect(token).To(Equal(expected_token))
}
})
})
})

})

0 comments on commit f907741

Please sign in to comment.