feat: tokenizer
This commit is contained in:
24
cmd/lambda/lambda.go
Normal file
24
cmd/lambda/lambda.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"git.maximhutz.com/max/lambda/pkg/cli"
|
||||
"git.maximhutz.com/max/lambda/pkg/tokenizer"
|
||||
)
|
||||
|
||||
func main() {
|
||||
slog.Info("Using program arguments.", "args", os.Args)
|
||||
options, err := cli.ParseOptions(os.Args[1:])
|
||||
cli.HandleError(err)
|
||||
|
||||
slog.Info("Parsed CLI options.", "options", options)
|
||||
tokens, fails := tokenizer.GetTokens([]rune(options.Input))
|
||||
if len(fails) > 0 {
|
||||
cli.HandleError(errors.Join(fails...))
|
||||
}
|
||||
|
||||
slog.Info("Parsed tokens.", "tokens", tokens)
|
||||
}
|
||||
Reference in New Issue
Block a user