wip: new folder structure, overhaul language
This commit is contained in:
@@ -12,7 +12,7 @@ func GetFreeVariables(e Expression) set.Set[string] {
|
||||
return vars
|
||||
case *Application:
|
||||
vars := GetFreeVariables(e.Abstraction)
|
||||
vars.Union(GetFreeVariables(e.Argument))
|
||||
vars.Merge(GetFreeVariables(e.Argument))
|
||||
return vars
|
||||
default:
|
||||
return nil
|
||||
|
||||
@@ -5,14 +5,13 @@ func ReduceOnce(e *Expression) bool {
|
||||
case *Abstraction:
|
||||
return ReduceOnce(&typed.Body)
|
||||
case *Application:
|
||||
fn, fnOk := typed.Abstraction.(*Abstraction)
|
||||
if fnOk {
|
||||
if fn, fnOk := typed.Abstraction.(*Abstraction); fnOk {
|
||||
Substitute(&fn.Body, fn.Parameter, typed.Argument)
|
||||
*e = fn.Body
|
||||
return true
|
||||
}
|
||||
good := ReduceOnce(&typed.Abstraction)
|
||||
if good {
|
||||
|
||||
if ReduceOnce(&typed.Abstraction) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ func Substitute(e *Expression, target string, replacement Expression) {
|
||||
}
|
||||
|
||||
used := GetFreeVariables(typed.Body)
|
||||
used.Union(replacementFreeVars)
|
||||
used.Merge(replacementFreeVars)
|
||||
freshVar := GenerateFreshName(used)
|
||||
Rename(typed, typed.Parameter, freshVar)
|
||||
Substitute(&typed.Body, target, replacement)
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.maximhutz.com/max/lambda/pkg/iterator"
|
||||
"git.maximhutz.com/max/lambda/pkg/lambda"
|
||||
"git.maximhutz.com/max/lambda/pkg/tokenizer"
|
||||
)
|
||||
|
||||
func ParseExpression(i *iterator.Iterator[tokenizer.Token]) (lambda.Expression, error) {
|
||||
token, err := i.Pop()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get next token: %w", err)
|
||||
}
|
||||
|
||||
switch token.Type {
|
||||
case tokenizer.TokenVariable:
|
||||
return lambda.NewVariable(token.Value), nil
|
||||
case tokenizer.TokenDot:
|
||||
return nil, fmt.Errorf("token '.' found without a corresponding slash (column %d)", token.Index)
|
||||
case tokenizer.TokenSlash:
|
||||
atoms := []string{}
|
||||
|
||||
for {
|
||||
atom, atomErr := i.Pop()
|
||||
if atomErr != nil {
|
||||
return nil, fmt.Errorf("could not find parameter or terminator of function: %w", atomErr)
|
||||
} else if atom.Type == tokenizer.TokenVariable {
|
||||
atoms = append(atoms, atom.Value)
|
||||
} else if atom.Type == tokenizer.TokenDot {
|
||||
break
|
||||
} else {
|
||||
return nil, fmt.Errorf("expected function parameter or terminator, got '%v' (column %d)", atom.Value, atom.Index)
|
||||
}
|
||||
}
|
||||
|
||||
if len(atoms) == 0 {
|
||||
return nil, fmt.Errorf("every function must have atleast one parameter (column %d)", token.Index)
|
||||
}
|
||||
|
||||
body, bodyErr := ParseExpression(i)
|
||||
if bodyErr != nil {
|
||||
return nil, fmt.Errorf("could not parse function body: %w", bodyErr)
|
||||
}
|
||||
|
||||
// Construction.
|
||||
result := body
|
||||
for i := len(atoms) - 1; i >= 0; i-- {
|
||||
result = lambda.NewAbstraction(atoms[i], result)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
case tokenizer.TokenOpenParen:
|
||||
fn, fnErr := ParseExpression(i)
|
||||
if fnErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call function: %w", fnErr)
|
||||
}
|
||||
|
||||
args := []lambda.Expression{}
|
||||
|
||||
for {
|
||||
if next, nextErr := i.Peek(); nextErr == nil && next.Type == tokenizer.TokenCloseParen {
|
||||
break
|
||||
}
|
||||
|
||||
arg, argErr := ParseExpression(i)
|
||||
if argErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call argument: %w", argErr)
|
||||
}
|
||||
|
||||
args = append(args, arg)
|
||||
}
|
||||
|
||||
closing, closingErr := i.Pop()
|
||||
if closingErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call terminating parenthesis: %w", closingErr)
|
||||
} else if closing.Type != tokenizer.TokenCloseParen {
|
||||
return nil, fmt.Errorf("expected call terminating parenthesis, got '%v' (column %v)", closing.Value, closing.Index)
|
||||
}
|
||||
|
||||
// Construction.
|
||||
result := fn
|
||||
for _, arg := range args {
|
||||
result = lambda.NewApplication(result, arg)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
case tokenizer.TokenCloseParen:
|
||||
return nil, fmt.Errorf("token ')' found without a corresponding openning parenthesis (column %d)", token.Index)
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown token '%v' (column %d)", token.Value, token.Index)
|
||||
}
|
||||
}
|
||||
|
||||
func GetTree(tokens []tokenizer.Token) (lambda.Expression, error) {
|
||||
return ParseExpression(iterator.New(tokens))
|
||||
}
|
||||
41
pkg/saccharine/ast.go
Normal file
41
pkg/saccharine/ast.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package saccharine
|
||||
|
||||
type Node interface {
|
||||
IsNode()
|
||||
}
|
||||
|
||||
/** ------------------------------------------------------------------------- */
|
||||
|
||||
type Abstraction struct {
|
||||
Parameters []string
|
||||
Body Node
|
||||
}
|
||||
|
||||
type Application struct {
|
||||
Abstraction Node
|
||||
Arguments []Node
|
||||
}
|
||||
|
||||
type Variable struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
/** ------------------------------------------------------------------------- */
|
||||
|
||||
func NewAbstraction(parameter []string, body Node) *Abstraction {
|
||||
return &Abstraction{Parameters: parameter, Body: body}
|
||||
}
|
||||
|
||||
func NewApplication(abstraction Node, arguments []Node) *Application {
|
||||
return &Application{Abstraction: abstraction, Arguments: arguments}
|
||||
}
|
||||
|
||||
func NewVariable(name string) *Variable {
|
||||
return &Variable{Name: name}
|
||||
}
|
||||
|
||||
/** ------------------------------------------------------------------------- */
|
||||
|
||||
func (_ Abstraction) IsNode() {}
|
||||
func (_ Application) IsNode() {}
|
||||
func (_ Variable) IsNode() {}
|
||||
80
pkg/saccharine/parser.go
Normal file
80
pkg/saccharine/parser.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package saccharine
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.maximhutz.com/max/lambda/pkg/iterator"
|
||||
)
|
||||
|
||||
func isVariableToken(t Token) bool {
|
||||
return t.Type == TokenVariable
|
||||
}
|
||||
|
||||
func ParseExpression(i *iterator.Iterator[Token]) (Node, error) {
|
||||
token, err := i.Pop()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get next token: %w", err)
|
||||
}
|
||||
|
||||
switch token.Type {
|
||||
case TokenVariable:
|
||||
return NewVariable(token.Value), nil
|
||||
case TokenDot:
|
||||
return nil, fmt.Errorf("token '.' found without a corresponding slash (column %d)", token.Index)
|
||||
case TokenSlash:
|
||||
tokens := i.PopWhile(isVariableToken)
|
||||
variables := []string{}
|
||||
|
||||
for _, token := range tokens {
|
||||
variables = append(variables, token.Value)
|
||||
}
|
||||
|
||||
if dot, dotErr := i.Pop(); dotErr != nil {
|
||||
return nil, fmt.Errorf("could not find parameter terminator: %w", dotErr)
|
||||
} else if dot.Type != TokenDot {
|
||||
return nil, fmt.Errorf("expected '.', got '%v' (column %d)", dot.Value, dot.Index)
|
||||
}
|
||||
|
||||
body, bodyErr := ParseExpression(i)
|
||||
if bodyErr != nil {
|
||||
return nil, fmt.Errorf("could not parse function body: %w", bodyErr)
|
||||
}
|
||||
|
||||
return NewAbstraction(variables, body), nil
|
||||
case TokenOpenParen:
|
||||
fn, fnErr := ParseExpression(i)
|
||||
if fnErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call function: %w", fnErr)
|
||||
}
|
||||
|
||||
args := []Node{}
|
||||
|
||||
for {
|
||||
if next, nextErr := i.Peek(); nextErr == nil && next.Type == TokenCloseParen {
|
||||
break
|
||||
}
|
||||
|
||||
arg, argErr := ParseExpression(i)
|
||||
if argErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call argument: %w", argErr)
|
||||
}
|
||||
|
||||
args = append(args, arg)
|
||||
}
|
||||
|
||||
closing, closingErr := i.Pop()
|
||||
if closingErr != nil {
|
||||
return nil, fmt.Errorf("could not parse call terminating parenthesis: %w", closingErr)
|
||||
} else if closing.Type != TokenCloseParen {
|
||||
return nil, fmt.Errorf("expected call terminating parenthesis, got '%v' (column %v)", closing.Value, closing.Index)
|
||||
}
|
||||
|
||||
return NewApplication(fn, args), nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("unexpected token '%v' (column %d)", token.Value, token.Index)
|
||||
}
|
||||
|
||||
func GetTree(tokens []Token) (Node, error) {
|
||||
return ParseExpression(iterator.New(tokens))
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package tokenizer
|
||||
package saccharine
|
||||
|
||||
// All tokens in the pseudo-lambda language.
|
||||
type TokenType int
|
||||
@@ -1,4 +1,4 @@
|
||||
package tokenizer
|
||||
package saccharine
|
||||
|
||||
import (
|
||||
"errors"
|
||||
@@ -18,7 +18,7 @@ func (s *Set[T]) Remove(items ...T) {
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Set[T]) Union(o Set[T]) {
|
||||
func (s *Set[T]) Merge(o Set[T]) {
|
||||
for item := range o {
|
||||
s.Add(item)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user