-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtokenizer.go
68 lines (56 loc) · 2.06 KB
/
tokenizer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
package golex
import "slices"
type TokenizerType string
const (
TypeNoTokenizer TokenizerType = ""
TypeCommentTokenizer TokenizerType = "BuildInCommentTokenizer"
TypeStringTokenizer TokenizerType = "BuildInStringTokenizer"
TypeNumberTokenizer TokenizerType = "BuildInNumberTokenizer"
TypeLiteralTokenizer TokenizerType = "BuildInLiteralTokenizer"
TypeSymbolTokenizer TokenizerType = "BuildInSymbolTokenizer"
TypeBooleanTokenizer TokenizerType = "BuildInBooleanTokenizer"
)
type Tokenizer interface {
CanTokenize(*Lexer) bool
Tokenize(*Lexer) (Token, error)
}
type TokenizerInserter struct {
tokenizerType TokenizerType
tokenizer Tokenizer
Before TokenizerType
After TokenizerType
}
func (ti TokenizerInserter) Insert(tokenizers map[TokenizerType]Tokenizer, tokenizationOrder []TokenizerType) (map[TokenizerType]Tokenizer, []TokenizerType) {
tokenizers[ti.tokenizerType] = ti.tokenizer
for i, t := range tokenizationOrder {
if t == ti.Before {
return tokenizers, slices.Insert(tokenizationOrder, i, ti.tokenizerType)
}
if t == ti.After {
return tokenizers, slices.Insert(tokenizationOrder, i+1, ti.tokenizerType)
}
}
// The specified Before or After was not in the tokenization order,
if ti.Before != TypeNoTokenizer {
// The insertion point was specified as "Before", so lets prepend it to the list
tokenizationOrder = append([]TokenizerType{ti.tokenizerType}, tokenizationOrder...)
} else {
// The insertion point was specified as "After", so lets append it to the list
tokenizationOrder = append(tokenizationOrder, ti.tokenizerType)
}
return tokenizers, tokenizationOrder
}
func InsertBefore(before TokenizerType, tokenizerType TokenizerType, tokenizer Tokenizer) TokenizerInserter {
return TokenizerInserter{
tokenizerType: tokenizerType,
tokenizer: tokenizer,
Before: before,
}
}
func InsertAfter(after TokenizerType, tokenizerType TokenizerType, tokenizer Tokenizer) TokenizerInserter {
return TokenizerInserter{
tokenizerType: tokenizerType,
tokenizer: tokenizer,
After: after,
}
}