<- Back to shtanton's homepage
aboutsummaryrefslogtreecommitdiff
path: root/main/parse.go
diff options
context:
space:
mode:
Diffstat (limited to 'main/parse.go')
-rw-r--r--main/parse.go145
1 files changed, 145 insertions, 0 deletions
diff --git a/main/parse.go b/main/parse.go
new file mode 100644
index 0000000..e876010
--- /dev/null
+++ b/main/parse.go
@@ -0,0 +1,145 @@
+package main
+
+import (
+ "strings"
+ "strconv"
+ "fmt"
+)
+
+type parser struct {
+ tokenStream chan Token
+ rewinds []Token
+}
+func (p *parser) next() Token {
+ if len(p.rewinds) == 0 {
+ return <- p.tokenStream
+ }
+ token := p.rewinds[len(p.rewinds)-1]
+ p.rewinds = p.rewinds[:len(p.rewinds)-1]
+ return token
+}
+func (p *parser) rewind(token Token) {
+ p.rewinds = append(p.rewinds, token)
+}
+func (p *parser) peek() Token {
+ token := p.next()
+ p.rewind(token)
+ return token
+}
+
+// TODO: make a pratt parser
+func (p *parser) parsePathPatternFilter() PathFilterAST {
+ var segments []PathFilterAST
+ loop: for {
+ token := p.next()
+ switch token.typ {
+ case TokenHash:
+ stringIndex := p.next()
+ if stringIndex.typ != TokenPatternStringIndex {
+ panic("Expected string index after # in pattern")
+ }
+ segments = append(segments, StringSegmentPathFilterAST{stringIndex.val})
+ case TokenAt:
+ intIndex := p.next()
+ if intIndex.typ != TokenPatternIntegerIndex {
+ panic("Expected integer index after @ in pattern")
+ }
+ index, err := strconv.Atoi(intIndex.val)
+ if err != nil {
+ panic("Expected integer index after @ in pattern")
+ }
+ segments = append(segments, IntegerSegmentPathFilterAST{index})
+ case TokenDot:
+ segments = append(segments, AnySegmentPathFilterAST{})
+ case TokenAst:
+ if len(segments) == 0 {
+ panic("Invalid * in pattern, * must go after something")
+ }
+ segments[len(segments) - 1] = RepeatPathFilterAST {segments[len(segments)-1]}
+ default:
+ p.rewind(token)
+ break loop
+ }
+ }
+ return SequencePathFilterAST {segments}
+}
+
+// TODO: should only return a single filter
+func (p *parser) parseFilter() []Filter {
+ var filters []Filter
+ token := p.next()
+ switch token.typ {
+ case TokenHash, TokenAt, TokenDot:
+ p.rewind(token)
+ filterAst := p.parsePathPatternFilter()
+ filters = append(filters, compilePathFilterAST(filterAst))
+ token = p.next()
+ }
+ if len(filters) == 0 {
+ panic("Missing filter")
+ }
+ p.rewind(token)
+ return filters
+}
+
+func (p *parser) parseBasicCommand(commandChar rune) Command {
+ switch commandChar {
+ case 'p':
+ return PrintValueCommand{}
+ case 'd':
+ return DeleteAllCommand{}
+ default:
+ panic("Invalid command")
+ }
+}
+
+func (p *parser) parseCommand() Command {
+ token := p.next()
+ switch token.typ {
+ case TokenHash, TokenAt, TokenDot:
+ p.rewind(token)
+ filters := p.parseFilter()
+ command := p.parseCommand()
+ for _, filter := range filters {
+ command = FilteredCommand {
+ filter: filter,
+ command: command,
+ }
+ }
+ return command
+ case TokenCommand:
+ commandChar, _, err := strings.NewReader(token.val).ReadRune()
+ if err != nil {
+ panic("Error reading a command character!?")
+ }
+ return p.parseBasicCommand(commandChar)
+ default:
+ fmt.Println(token)
+ panic("Invalid token, expected command")
+ }
+}
+
+func (p *parser) parseCommands() []Command {
+ var commands []Command
+ for {
+ nextToken := p.peek()
+ if nextToken.typ == TokenEOF || nextToken.typ == TokenRBrace {
+ return commands
+ }
+ commands = append(commands, p.parseCommand())
+ semicolon := p.next()
+ if semicolon.typ == TokenEOF || semicolon.typ == TokenRBrace {
+ return commands
+ }
+ if semicolon.typ != TokenSemicolon {
+ panic("Expected ; after command")
+ }
+ }
+}
+
+func Parse(tokens chan Token) []Command {
+ p := parser {
+ tokenStream: tokens,
+ }
+ return p.parseCommands()
+}