feat: new parser strategy

This commit is contained in:
2026-02-09 23:46:36 -05:00
parent 361f529bdc
commit 87f2370d81
12 changed files with 377 additions and 144 deletions

View File

@@ -5,37 +5,26 @@ import (
"fmt"
"git.maximhutz.com/max/lambda/pkg/iterator"
"git.maximhutz.com/max/lambda/pkg/token"
)
type tokenIterator = iterator.Iterator[Token]
func parseRawToken(i *tokenIterator, expected TokenType) (*Token, error) {
return iterator.Do(i, func(i *tokenIterator) (*Token, error) {
if tok, err := i.Next(); err != nil {
return nil, err
} else if tok.Type != expected {
return nil, fmt.Errorf("expected token %v, got %v'", expected.Name(), tok.Value)
} else {
return &tok, nil
}
})
}
func passSoftBreaks(i *tokenIterator) {
for {
if _, err := parseRawToken(i, TokenSoftBreak); err != nil {
if _, err := token.ParseRawToken(i, TokenSoftBreak); err != nil {
return
}
}
}
func parseToken(i *tokenIterator, expected TokenType, ignoreSoftBreaks bool) (*Token, error) {
return iterator.Do(i, func(i *tokenIterator) (*Token, error) {
return iterator.Try(i, func(i *tokenIterator) (*Token, error) {
if ignoreSoftBreaks {
passSoftBreaks(i)
}
return parseRawToken(i, expected)
return token.ParseRawToken(i, expected)
})
}
@@ -48,35 +37,20 @@ func parseString(i *tokenIterator) (string, error) {
}
func parseBreak(i *tokenIterator) (*Token, error) {
if tok, softErr := parseRawToken(i, TokenSoftBreak); softErr == nil {
if tok, softErr := token.ParseRawToken(i, TokenSoftBreak); softErr == nil {
return tok, nil
} else if tok, hardErr := parseRawToken(i, TokenHardBreak); hardErr == nil {
} else if tok, hardErr := token.ParseRawToken(i, TokenHardBreak); hardErr == nil {
return tok, nil
} else {
return nil, errors.Join(softErr, hardErr)
}
}
func parseList[U any](i *tokenIterator, fn func(*tokenIterator) (U, error), minimum int) ([]U, error) {
results := []U{}
for {
if u, err := fn(i); err != nil {
if len(results) < minimum {
return nil, fmt.Errorf("expected at least '%v' items, got only '%v': %w", minimum, len(results), err)
}
return results, nil
} else {
results = append(results, u)
}
}
}
func parseAbstraction(i *tokenIterator) (*Abstraction, error) {
return iterator.Do(i, func(i *tokenIterator) (*Abstraction, error) {
return iterator.Try(i, func(i *tokenIterator) (*Abstraction, error) {
if _, err := parseToken(i, TokenSlash, true); err != nil {
return nil, fmt.Errorf("no function slash (col %d): %w", i.MustGet().Column, err)
} else if parameters, err := parseList(i, parseString, 0); err != nil {
} else if parameters, err := token.ParseList(i, parseString, 0); err != nil {
return nil, err
} else if _, err = parseToken(i, TokenDot, true); err != nil {
return nil, fmt.Errorf("no function dot (col %d): %w", i.MustGet().Column, err)
@@ -89,10 +63,10 @@ func parseAbstraction(i *tokenIterator) (*Abstraction, error) {
}
func parseApplication(i *tokenIterator) (*Application, error) {
return iterator.Do(i, func(i *tokenIterator) (*Application, error) {
return iterator.Try(i, func(i *tokenIterator) (*Application, error) {
if _, err := parseToken(i, TokenOpenParen, true); err != nil {
return nil, fmt.Errorf("no openning brackets (col %d): %w", i.MustGet().Column, err)
} else if expressions, err := parseList(i, parseExpression, 1); err != nil {
} else if expressions, err := token.ParseList(i, parseExpression, 1); err != nil {
return nil, err
} else if _, err := parseToken(i, TokenCloseParen, true); err != nil {
return nil, fmt.Errorf("no closing brackets (col %d): %w", i.MustGet().Column, err)
@@ -114,12 +88,12 @@ func parseStatements(i *tokenIterator) ([]Statement, error) {
statements := []Statement{}
//nolint:errcheck
parseList(i, parseBreak, 0)
token.ParseList(i, parseBreak, 0)
for {
if statement, err := parseStatement(i); err != nil {
break
} else if _, err := parseList(i, parseBreak, 1); err != nil && !i.Done() {
} else if _, err := token.ParseList(i, parseBreak, 1); err != nil && !i.Done() {
break
} else {
statements = append(statements, statement)
@@ -159,7 +133,7 @@ func parseClause(i *tokenIterator, braces bool) (*Clause, error) {
}
func parseExpression(i *tokenIterator) (Expression, error) {
return iterator.Do(i, func(i *tokenIterator) (Expression, error) {
return iterator.Try(i, func(i *tokenIterator) (Expression, error) {
passSoftBreaks(i)
switch peek := i.MustGet(); peek.Type {
@@ -178,8 +152,8 @@ func parseExpression(i *tokenIterator) (Expression, error) {
}
func parseLet(i *tokenIterator) (*LetStatement, error) {
return iterator.Do(i, func(i *tokenIterator) (*LetStatement, error) {
if parameters, err := parseList(i, parseString, 1); err != nil {
return iterator.Try(i, func(i *tokenIterator) (*LetStatement, error) {
if parameters, err := token.ParseList(i, parseString, 1); err != nil {
return nil, err
} else if _, err := parseToken(i, TokenAssign, true); err != nil {
return nil, err

View File

@@ -1,44 +1,13 @@
package saccharine
import (
"errors"
"fmt"
"unicode"
"git.maximhutz.com/max/lambda/pkg/iterator"
"git.maximhutz.com/max/lambda/pkg/token"
)
// isVariables determines whether a rune can be a valid variable.
func isVariable(r rune) bool {
return unicode.IsLetter(r) || unicode.IsNumber(r)
}
func scanRune(i *iterator.Iterator[rune], expected func(rune) bool) (rune, error) {
i2 := i.Copy()
if r, err := i2.Next(); err != nil {
return r, err
} else if !expected(r) {
return r, fmt.Errorf("got unexpected rune %v'", r)
} else {
i.Sync(i2)
return r, nil
}
}
func scanCharacter(i *iterator.Iterator[rune], expected rune) (rune, error) {
i2 := i.Copy()
if r, err := i2.Next(); err != nil {
return r, err
} else if r != expected {
return r, fmt.Errorf("got unexpected rune %v'", r)
} else {
i.Sync(i2)
return r, nil
}
}
// Pulls the next token from an iterator over runes. If it cannot, it will
// return nil. If an error occurs, it will return that.
func scanToken(i *iterator.Iterator[rune]) (*Token, error) {
@@ -55,27 +24,27 @@ func scanToken(i *iterator.Iterator[rune]) (*Token, error) {
switch {
case letter == '(':
return NewToken(TokenOpenParen, index), nil
return token.New(TokenOpenParen, index), nil
case letter == ')':
return NewToken(TokenCloseParen, index), nil
return token.New(TokenCloseParen, index), nil
case letter == '.':
return NewToken(TokenDot, index), nil
return token.New(TokenDot, index), nil
case letter == '\\':
return NewToken(TokenSlash, index), nil
return token.New(TokenSlash, index), nil
case letter == '\n':
return NewToken(TokenSoftBreak, index), nil
return token.New(TokenSoftBreak, index), nil
case letter == '{':
return NewToken(TokenOpenBrace, index), nil
return token.New(TokenOpenBrace, index), nil
case letter == '}':
return NewToken(TokenCloseBrace, index), nil
return token.New(TokenCloseBrace, index), nil
case letter == ':':
if _, err := scanCharacter(i, '='); err != nil {
if _, err := token.ScanCharacter(i, '='); err != nil {
return nil, err
} else {
return NewToken(TokenAssign, index), nil
return token.New(TokenAssign, index), nil
}
case letter == ';':
return NewToken(TokenHardBreak, index), nil
return token.New(TokenHardBreak, index), nil
case letter == '#':
// Skip everything until the next newline or EOF.
for !i.Done() {
@@ -93,18 +62,8 @@ func scanToken(i *iterator.Iterator[rune]) (*Token, error) {
return nil, nil
case unicode.IsSpace(letter):
return nil, nil
case isVariable(letter):
atom := []rune{letter}
for {
if r, err := scanRune(i, isVariable); err != nil {
break
} else {
atom = append(atom, r)
}
}
return NewTokenAtom(string(atom), index), nil
case token.IsVariable(letter):
return token.ScanAtom(i, letter, TokenAtom, index), nil
}
return nil, fmt.Errorf("unknown character '%v'", string(letter))
@@ -112,18 +71,5 @@ func scanToken(i *iterator.Iterator[rune]) (*Token, error) {
// scan a string into tokens.
func scan(input string) ([]Token, error) {
i := iterator.Of([]rune(input))
tokens := []Token{}
errorList := []error{}
for !i.Done() {
token, err := scanToken(i)
if err != nil {
errorList = append(errorList, err)
} else if token != nil {
tokens = append(tokens, *token)
}
}
return tokens, errors.Join(errorList...)
return token.Scan(input, scanToken)
}

View File

@@ -1,6 +1,10 @@
package saccharine
import "fmt"
import (
"fmt"
"git.maximhutz.com/max/lambda/pkg/token"
)
// A TokenType is an identifier for any token in the Saccharine language.
type TokenType int
@@ -21,7 +25,7 @@ const (
TokenAssign
// TokenAtom denotes an alpha-numeric variable.
TokenAtom
// TokenSlash denotes the '/' token.
// TokenSlash denotes the '\\' token.
TokenSlash
// TokenDot denotes the '.' token.
TokenDot
@@ -29,24 +33,6 @@ const (
TokenSoftBreak
)
// A Token in the Saccharine language.
type Token struct {
Column int // Where the token begins in the source text.
Type TokenType // What type the token is.
Value string // The value of the token.
}
// NewToken creates a [Token] of the given type at the given column.
// The token's value is derived from its [TokenType].
func NewToken(typ TokenType, column int) *Token {
return &Token{Type: typ, Column: column, Value: typ.Name()}
}
// NewTokenAtom creates a [TokenAtom] with the given name at the given column.
func NewTokenAtom(name string, column int) *Token {
return &Token{Type: TokenAtom, Column: column, Value: name}
}
// Name returns the type of the TokenType, as a string.
func (t TokenType) Name() string {
switch t {
@@ -75,7 +61,5 @@ func (t TokenType) Name() string {
}
}
// Name returns the type of the Token, as a string.
func (t Token) Name() string {
return t.Type.Name()
}
// Token is the concrete token type for the Saccharine language.
type Token = token.Token[TokenType]