diff --git a/cmd/lambda/lambda.go b/cmd/lambda/lambda.go index 4ac4180..76b2df3 100644 --- a/cmd/lambda/lambda.go +++ b/cmd/lambda/lambda.go @@ -8,8 +8,7 @@ import ( "git.maximhutz.com/max/lambda/internal/cli" "git.maximhutz.com/max/lambda/internal/config" "git.maximhutz.com/max/lambda/pkg/lambda" - "git.maximhutz.com/max/lambda/pkg/parser" - "git.maximhutz.com/max/lambda/pkg/tokenizer" + "git.maximhutz.com/max/lambda/pkg/saccharine" ) // Run main application. @@ -25,12 +24,12 @@ func main() { cli.HandleError(err) // Parse tokens. - tokens, err := tokenizer.GetTokens([]rune(input)) + tokens, err := saccharine.GetTokens([]rune(input)) cli.HandleError(err) logger.Info("Parsed tokens.", "tokens", tokens) // Turn tokens into syntax tree. - expression, err := parser.GetTree(tokens) + expression, err := saccharine.GetTree(tokens) cli.HandleError(err) logger.Info("Parsed syntax tree.", "tree", lambda.Stringify(expression)) diff --git a/pkg/lambda/get_free_variables.go b/pkg/lambda/get_free_variables.go index d729a84..3cb8b0e 100644 --- a/pkg/lambda/get_free_variables.go +++ b/pkg/lambda/get_free_variables.go @@ -12,7 +12,7 @@ func GetFreeVariables(e Expression) set.Set[string] { return vars case *Application: vars := GetFreeVariables(e.Abstraction) - vars.Union(GetFreeVariables(e.Argument)) + vars.Merge(GetFreeVariables(e.Argument)) return vars default: return nil diff --git a/pkg/lambda/reduce.go b/pkg/lambda/reduce.go index 95a5507..3a812d9 100644 --- a/pkg/lambda/reduce.go +++ b/pkg/lambda/reduce.go @@ -5,14 +5,13 @@ func ReduceOnce(e *Expression) bool { case *Abstraction: return ReduceOnce(&typed.Body) case *Application: - fn, fnOk := typed.Abstraction.(*Abstraction) - if fnOk { + if fn, fnOk := typed.Abstraction.(*Abstraction); fnOk { Substitute(&fn.Body, fn.Parameter, typed.Argument) *e = fn.Body return true } - good := ReduceOnce(&typed.Abstraction) - if good { + + if ReduceOnce(&typed.Abstraction) { return true } diff --git a/pkg/lambda/substitute.go b/pkg/lambda/substitute.go index 8d12145..ebc83c5 100644 --- a/pkg/lambda/substitute.go +++ b/pkg/lambda/substitute.go @@ -18,7 +18,7 @@ func Substitute(e *Expression, target string, replacement Expression) { } used := GetFreeVariables(typed.Body) - used.Union(replacementFreeVars) + used.Merge(replacementFreeVars) freshVar := GenerateFreshName(used) Rename(typed, typed.Parameter, freshVar) Substitute(&typed.Body, target, replacement) diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go deleted file mode 100644 index 81a6016..0000000 --- a/pkg/parser/parser.go +++ /dev/null @@ -1,98 +0,0 @@ -package parser - -import ( - "fmt" - - "git.maximhutz.com/max/lambda/pkg/iterator" - "git.maximhutz.com/max/lambda/pkg/lambda" - "git.maximhutz.com/max/lambda/pkg/tokenizer" -) - -func ParseExpression(i *iterator.Iterator[tokenizer.Token]) (lambda.Expression, error) { - token, err := i.Pop() - if err != nil { - return nil, fmt.Errorf("could not get next token: %w", err) - } - - switch token.Type { - case tokenizer.TokenVariable: - return lambda.NewVariable(token.Value), nil - case tokenizer.TokenDot: - return nil, fmt.Errorf("token '.' found without a corresponding slash (column %d)", token.Index) - case tokenizer.TokenSlash: - atoms := []string{} - - for { - atom, atomErr := i.Pop() - if atomErr != nil { - return nil, fmt.Errorf("could not find parameter or terminator of function: %w", atomErr) - } else if atom.Type == tokenizer.TokenVariable { - atoms = append(atoms, atom.Value) - } else if atom.Type == tokenizer.TokenDot { - break - } else { - return nil, fmt.Errorf("expected function parameter or terminator, got '%v' (column %d)", atom.Value, atom.Index) - } - } - - if len(atoms) == 0 { - return nil, fmt.Errorf("every function must have atleast one parameter (column %d)", token.Index) - } - - body, bodyErr := ParseExpression(i) - if bodyErr != nil { - return nil, fmt.Errorf("could not parse function body: %w", bodyErr) - } - - // Construction. - result := body - for i := len(atoms) - 1; i >= 0; i-- { - result = lambda.NewAbstraction(atoms[i], result) - } - - return result, nil - case tokenizer.TokenOpenParen: - fn, fnErr := ParseExpression(i) - if fnErr != nil { - return nil, fmt.Errorf("could not parse call function: %w", fnErr) - } - - args := []lambda.Expression{} - - for { - if next, nextErr := i.Peek(); nextErr == nil && next.Type == tokenizer.TokenCloseParen { - break - } - - arg, argErr := ParseExpression(i) - if argErr != nil { - return nil, fmt.Errorf("could not parse call argument: %w", argErr) - } - - args = append(args, arg) - } - - closing, closingErr := i.Pop() - if closingErr != nil { - return nil, fmt.Errorf("could not parse call terminating parenthesis: %w", closingErr) - } else if closing.Type != tokenizer.TokenCloseParen { - return nil, fmt.Errorf("expected call terminating parenthesis, got '%v' (column %v)", closing.Value, closing.Index) - } - - // Construction. - result := fn - for _, arg := range args { - result = lambda.NewApplication(result, arg) - } - - return result, nil - case tokenizer.TokenCloseParen: - return nil, fmt.Errorf("token ')' found without a corresponding openning parenthesis (column %d)", token.Index) - default: - return nil, fmt.Errorf("unknown token '%v' (column %d)", token.Value, token.Index) - } -} - -func GetTree(tokens []tokenizer.Token) (lambda.Expression, error) { - return ParseExpression(iterator.New(tokens)) -} diff --git a/pkg/saccharine/ast.go b/pkg/saccharine/ast.go new file mode 100644 index 0000000..e0c4b05 --- /dev/null +++ b/pkg/saccharine/ast.go @@ -0,0 +1,41 @@ +package saccharine + +type Node interface { + IsNode() +} + +/** ------------------------------------------------------------------------- */ + +type Abstraction struct { + Parameters []string + Body Node +} + +type Application struct { + Abstraction Node + Arguments []Node +} + +type Variable struct { + Name string +} + +/** ------------------------------------------------------------------------- */ + +func NewAbstraction(parameter []string, body Node) *Abstraction { + return &Abstraction{Parameters: parameter, Body: body} +} + +func NewApplication(abstraction Node, arguments []Node) *Application { + return &Application{Abstraction: abstraction, Arguments: arguments} +} + +func NewVariable(name string) *Variable { + return &Variable{Name: name} +} + +/** ------------------------------------------------------------------------- */ + +func (_ Abstraction) IsNode() {} +func (_ Application) IsNode() {} +func (_ Variable) IsNode() {} diff --git a/pkg/saccharine/parser.go b/pkg/saccharine/parser.go new file mode 100644 index 0000000..08aa5b5 --- /dev/null +++ b/pkg/saccharine/parser.go @@ -0,0 +1,80 @@ +package saccharine + +import ( + "fmt" + + "git.maximhutz.com/max/lambda/pkg/iterator" +) + +func isVariableToken(t Token) bool { + return t.Type == TokenVariable +} + +func ParseExpression(i *iterator.Iterator[Token]) (Node, error) { + token, err := i.Pop() + if err != nil { + return nil, fmt.Errorf("could not get next token: %w", err) + } + + switch token.Type { + case TokenVariable: + return NewVariable(token.Value), nil + case TokenDot: + return nil, fmt.Errorf("token '.' found without a corresponding slash (column %d)", token.Index) + case TokenSlash: + tokens := i.PopWhile(isVariableToken) + variables := []string{} + + for _, token := range tokens { + variables = append(variables, token.Value) + } + + if dot, dotErr := i.Pop(); dotErr != nil { + return nil, fmt.Errorf("could not find parameter terminator: %w", dotErr) + } else if dot.Type != TokenDot { + return nil, fmt.Errorf("expected '.', got '%v' (column %d)", dot.Value, dot.Index) + } + + body, bodyErr := ParseExpression(i) + if bodyErr != nil { + return nil, fmt.Errorf("could not parse function body: %w", bodyErr) + } + + return NewAbstraction(variables, body), nil + case TokenOpenParen: + fn, fnErr := ParseExpression(i) + if fnErr != nil { + return nil, fmt.Errorf("could not parse call function: %w", fnErr) + } + + args := []Node{} + + for { + if next, nextErr := i.Peek(); nextErr == nil && next.Type == TokenCloseParen { + break + } + + arg, argErr := ParseExpression(i) + if argErr != nil { + return nil, fmt.Errorf("could not parse call argument: %w", argErr) + } + + args = append(args, arg) + } + + closing, closingErr := i.Pop() + if closingErr != nil { + return nil, fmt.Errorf("could not parse call terminating parenthesis: %w", closingErr) + } else if closing.Type != TokenCloseParen { + return nil, fmt.Errorf("expected call terminating parenthesis, got '%v' (column %v)", closing.Value, closing.Index) + } + + return NewApplication(fn, args), nil + } + + return nil, fmt.Errorf("unexpected token '%v' (column %d)", token.Value, token.Index) +} + +func GetTree(tokens []Token) (Node, error) { + return ParseExpression(iterator.New(tokens)) +} diff --git a/pkg/tokenizer/token.go b/pkg/saccharine/token.go similarity index 96% rename from pkg/tokenizer/token.go rename to pkg/saccharine/token.go index 52a92d3..f21091c 100644 --- a/pkg/tokenizer/token.go +++ b/pkg/saccharine/token.go @@ -1,4 +1,4 @@ -package tokenizer +package saccharine // All tokens in the pseudo-lambda language. type TokenType int diff --git a/pkg/tokenizer/tokenizer.go b/pkg/saccharine/tokenizer.go similarity index 99% rename from pkg/tokenizer/tokenizer.go rename to pkg/saccharine/tokenizer.go index af2c946..598f5eb 100644 --- a/pkg/tokenizer/tokenizer.go +++ b/pkg/saccharine/tokenizer.go @@ -1,4 +1,4 @@ -package tokenizer +package saccharine import ( "errors" diff --git a/pkg/set/set.go b/pkg/set/set.go index a55e1f3..dc4ac29 100644 --- a/pkg/set/set.go +++ b/pkg/set/set.go @@ -18,7 +18,7 @@ func (s *Set[T]) Remove(items ...T) { } } -func (s *Set[T]) Union(o Set[T]) { +func (s *Set[T]) Merge(o Set[T]) { for item := range o { s.Add(item) }