feat: undo
This commit is contained in:
@@ -4,7 +4,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A rule describes a single lexical pattern for the scanner.
|
||||
@@ -41,10 +40,8 @@ func (s *Scanner[T]) On(pattern string, typ T) *Scanner[T] {
|
||||
// Skip registers a rule that consumes matching text without emitting a token.
|
||||
// This is used for whitespace and comments.
|
||||
func (s *Scanner[T]) Skip(pattern string) *Scanner[T] {
|
||||
var zero T
|
||||
s.rules = append(s.rules, rule[T]{
|
||||
pattern: compileAnchored(pattern),
|
||||
typ: zero,
|
||||
skip: true,
|
||||
})
|
||||
return s
|
||||
@@ -52,48 +49,37 @@ func (s *Scanner[T]) Skip(pattern string) *Scanner[T] {
|
||||
|
||||
// Scan tokenizes the input string using the registered rules.
|
||||
// At each position, all rules are tested and the longest match wins.
|
||||
// If no rule matches, an error is recorded and the scanner advances one rune.
|
||||
// If no rule matches, an error is recorded and the scanner advances one byte.
|
||||
func (s *Scanner[T]) Scan(input string) ([]Token[T], error) {
|
||||
tokens := []Token[T]{}
|
||||
errorList := []error{}
|
||||
pos := 0
|
||||
column := 0
|
||||
|
||||
for pos < len(input) {
|
||||
for pos := 0; pos < len(input); {
|
||||
bestLen := 0
|
||||
bestRule := -1
|
||||
|
||||
for idx, r := range s.rules {
|
||||
loc := r.pattern.FindStringIndex(input[pos:])
|
||||
if loc == nil {
|
||||
continue
|
||||
}
|
||||
if matchLen := loc[1]; matchLen > bestLen {
|
||||
bestLen = matchLen
|
||||
if loc != nil && loc[1] > bestLen {
|
||||
bestLen = loc[1]
|
||||
bestRule = idx
|
||||
}
|
||||
}
|
||||
|
||||
if bestRule == -1 || bestLen == 0 {
|
||||
_, size := utf8.DecodeRuneInString(input[pos:])
|
||||
errorList = append(errorList, fmt.Errorf("unknown character '%v'", input[pos:pos+size]))
|
||||
pos += size
|
||||
column++
|
||||
errorList = append(errorList, fmt.Errorf("unknown character '%v'", string(input[pos])))
|
||||
pos++
|
||||
continue
|
||||
}
|
||||
|
||||
matched := input[pos : pos+bestLen]
|
||||
r := s.rules[bestRule]
|
||||
|
||||
if !r.skip {
|
||||
if r := s.rules[bestRule]; !r.skip {
|
||||
tokens = append(tokens, Token[T]{
|
||||
Type: r.typ,
|
||||
Value: matched,
|
||||
Column: column,
|
||||
Value: input[pos : pos+bestLen],
|
||||
Column: pos,
|
||||
})
|
||||
}
|
||||
|
||||
column += utf8.RuneCountInString(matched)
|
||||
pos += bestLen
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user