<- Back to shtanton's homepage
aboutsummaryrefslogtreecommitdiff
path: root/main/parse.go
diff options
context:
space:
mode:
authorCharlie Stanton <charlie@shtanton.xyz>2022-08-26 11:51:46 +0100
committerCharlie Stanton <charlie@shtanton.xyz>2022-08-26 11:51:46 +0100
commitce5c224211a94bfd4c898b51d15febdf2ed9d6f2 (patch)
tree8d1c9db463d9c1793bd3aad2b6875a22d4add90c /main/parse.go
parentececdecdaf6c6f6295d31a92f0663d703e7760dd (diff)
downloadstred-go-ce5c224211a94bfd4c898b51d15febdf2ed9d6f2.tar
Refactors some stuff and adds lexing and parsing
Diffstat (limited to 'main/parse.go')
-rw-r--r--main/parse.go145
1 files changed, 145 insertions, 0 deletions
diff --git a/main/parse.go b/main/parse.go
new file mode 100644
index 0000000..e876010
--- /dev/null
+++ b/main/parse.go
@@ -0,0 +1,145 @@
+package main
+
+import (
+ "strings"
+ "strconv"
+ "fmt"
+)
+
+type parser struct {
+ tokenStream chan Token
+ rewinds []Token
+}
+func (p *parser) next() Token {
+ if len(p.rewinds) == 0 {
+ return <- p.tokenStream
+ }
+ token := p.rewinds[len(p.rewinds)-1]
+ p.rewinds = p.rewinds[:len(p.rewinds)-1]
+ return token
+}
+func (p *parser) rewind(token Token) {
+ p.rewinds = append(p.rewinds, token)
+}
+func (p *parser) peek() Token {
+ token := p.next()
+ p.rewind(token)
+ return token
+}
+
+// TODO: make a pratt parser
+func (p *parser) parsePathPatternFilter() PathFilterAST {
+ var segments []PathFilterAST
+ loop: for {
+ token := p.next()
+ switch token.typ {
+ case TokenHash:
+ stringIndex := p.next()
+ if stringIndex.typ != TokenPatternStringIndex {
+ panic("Expected string index after # in pattern")
+ }
+ segments = append(segments, StringSegmentPathFilterAST{stringIndex.val})
+ case TokenAt:
+ intIndex := p.next()
+ if intIndex.typ != TokenPatternIntegerIndex {
+ panic("Expected integer index after @ in pattern")
+ }
+ index, err := strconv.Atoi(intIndex.val)
+ if err != nil {
+ panic("Expected integer index after @ in pattern")
+ }
+ segments = append(segments, IntegerSegmentPathFilterAST{index})
+ case TokenDot:
+ segments = append(segments, AnySegmentPathFilterAST{})
+ case TokenAst:
+ if len(segments) == 0 {
+ panic("Invalid * in pattern, * must go after something")
+ }
+ segments[len(segments) - 1] = RepeatPathFilterAST {segments[len(segments)-1]}
+ default:
+ p.rewind(token)
+ break loop
+ }
+ }
+ return SequencePathFilterAST {segments}
+}
+
+// TODO: should only return a single filter
+func (p *parser) parseFilter() []Filter {
+ var filters []Filter
+ token := p.next()
+ switch token.typ {
+ case TokenHash, TokenAt, TokenDot:
+ p.rewind(token)
+ filterAst := p.parsePathPatternFilter()
+ filters = append(filters, compilePathFilterAST(filterAst))
+ token = p.next()
+ }
+ if len(filters) == 0 {
+ panic("Missing filter")
+ }
+ p.rewind(token)
+ return filters
+}
+
+func (p *parser) parseBasicCommand(commandChar rune) Command {
+ switch commandChar {
+ case 'p':
+ return PrintValueCommand{}
+ case 'd':
+ return DeleteAllCommand{}
+ default:
+ panic("Invalid command")
+ }
+}
+
+func (p *parser) parseCommand() Command {
+ token := p.next()
+ switch token.typ {
+ case TokenHash, TokenAt, TokenDot:
+ p.rewind(token)
+ filters := p.parseFilter()
+ command := p.parseCommand()
+ for _, filter := range filters {
+ command = FilteredCommand {
+ filter: filter,
+ command: command,
+ }
+ }
+ return command
+ case TokenCommand:
+ commandChar, _, err := strings.NewReader(token.val).ReadRune()
+ if err != nil {
+ panic("Error reading a command character!?")
+ }
+ return p.parseBasicCommand(commandChar)
+ default:
+ fmt.Println(token)
+ panic("Invalid token, expected command")
+ }
+}
+
+func (p *parser) parseCommands() []Command {
+ var commands []Command
+ for {
+ nextToken := p.peek()
+ if nextToken.typ == TokenEOF || nextToken.typ == TokenRBrace {
+ return commands
+ }
+ commands = append(commands, p.parseCommand())
+ semicolon := p.next()
+ if semicolon.typ == TokenEOF || semicolon.typ == TokenRBrace {
+ return commands
+ }
+ if semicolon.typ != TokenSemicolon {
+ panic("Expected ; after command")
+ }
+ }
+}
+
+func Parse(tokens chan Token) []Command {
+ p := parser {
+ tokenStream: tokens,
+ }
+ return p.parseCommands()
+}