<- Back to shtanton's homepage
aboutsummaryrefslogtreecommitdiff
path: root/main
diff options
context:
space:
mode:
authorCharlie Stanton <charlie@shtanton.xyz>2023-04-20 09:59:59 +0100
committerCharlie Stanton <charlie@shtanton.xyz>2023-04-20 09:59:59 +0100
commita0a416e7762fcdcc066617da8083b0372b87155c (patch)
tree84217b9d1a9496944ddfeee8687cd01f6997cc3e /main
parentb95e5ddaa1b182dfe58a386bfc107fa7d95c4393 (diff)
downloadstred-go-a0a416e7762fcdcc066617da8083b0372b87155c.tar
Remove filters and various commands that are no longer wanted
These have all been made redundant by the incredible substitute command
Diffstat (limited to 'main')
-rw-r--r--main/command.go60
-rw-r--r--main/filter.go93
-rw-r--r--main/lex.go233
-rw-r--r--main/parse.go181
-rw-r--r--main/pathfilter.go83
-rw-r--r--main/pathfilterast.go74
6 files changed, 4 insertions, 720 deletions
diff --git a/main/command.go b/main/command.go
index e676255..7d44309 100644
--- a/main/command.go
+++ b/main/command.go
@@ -16,41 +16,6 @@ func (cmd PrintValueCommand) exec(state *ProgramState) {
}
}
-type ToggleTerminalCommand struct {}
-func (cmd ToggleTerminalCommand) exec(state *ProgramState) {
- toggled := map[walk.TerminalValue]walk.TerminalValue {
- walk.ArrayBegin: walk.MapBegin,
- walk.ArrayEnd: walk.MapEnd,
- walk.MapBegin: walk.ArrayBegin,
- walk.MapEnd: walk.ArrayEnd,
- }
-
- for i := range state.value {
- terminal, isTerminal := state.value[i].(walk.TerminalValue)
- if !isTerminal {
- continue
- }
- state.value[i] = toggled[terminal]
- }
-}
-
-type FilteredCommand struct {
- filter Filter
- command Command
-}
-func (cmd FilteredCommand) exec(state *ProgramState) {
- path := walk.PathFromWalkValues(state.path)
- for _, value := range state.value {
- if cmd.filter.exec(walk.WalkItem {
- Value: value,
- Path: path,
- }) {
- cmd.command.exec(state)
- return
- }
- }
-}
-
type SequenceCommand struct {
commands []Command
}
@@ -60,22 +25,6 @@ func (cmd SequenceCommand) exec(state *ProgramState) {
}
}
-type AppendLiteralCommand struct {
- values []walk.WalkValue
-}
-func (cmd AppendLiteralCommand) exec(state *ProgramState) {
- state.value = append(state.value, cmd.values...)
-}
-
-type PrependLiteralCommand struct {
- values []walk.WalkValue
-}
-func (cmd PrependLiteralCommand) exec(state *ProgramState) {
- var newItems []walk.WalkValue
- newItems = append(newItems, cmd.values...)
- state.value = append(newItems, state.value...)
-}
-
type NextCommand struct {}
func (cmd NextCommand) exec(state *ProgramState) {
nextItem := <- state.in
@@ -90,15 +39,6 @@ func (cmd AppendNextCommand) exec(state *ProgramState) {
state.path = nextItem.Path.ToWalkValues()
}
-type PrintLiteralsCommand struct {
- items []walk.WalkItem
-}
-func (cmd PrintLiteralsCommand) exec(state *ProgramState) {
- for _, item := range cmd.items {
- state.out <- item
- }
-}
-
type DeleteAllCommand struct {}
func (cmd DeleteAllCommand) exec(state *ProgramState) {
state.path = nil
diff --git a/main/filter.go b/main/filter.go
deleted file mode 100644
index d80ae8f..0000000
--- a/main/filter.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package main
-
-import (
- "main/walk"
-)
-
-type PathFilter struct {
- initial PathFilterState
-}
-func (filter PathFilter) exec(space walk.WalkItem) bool {
- pathFilterState := make(map[PathFilterState]struct{})
- pathFilterState[filter.initial] = struct{}{}
- for _, segment := range space.Path {
- nextPathFilterState := make(map[PathFilterState]struct{})
- for curState := range pathFilterState {
- for nextState := range curState.eat(segment) {
- nextPathFilterState[nextState] = struct{}{}
- }
- }
- pathFilterState = nextPathFilterState
- }
- for pathState := range pathFilterState {
- if pathState.accept() {
- return true
- }
- }
- return false
-}
-
-type MapTerminalFilter struct {}
-func (filter MapTerminalFilter) exec(space walk.WalkItem) bool {
- terminal, isTerminal := space.Value.(walk.TerminalValue)
- if !isTerminal {
- return false
- }
- return terminal == walk.MapBegin || terminal == walk.MapEnd
-}
-
-type BeginTerminalFilter struct {}
-func (filter BeginTerminalFilter) exec(space walk.WalkItem) bool {
- terminal, isTerminal := space.Value.(walk.TerminalValue)
- if !isTerminal {
- return false
- }
- return terminal == walk.ArrayBegin || terminal == walk.MapBegin
-}
-
-type EndTerminalFilter struct {}
-func (filter EndTerminalFilter) exec(space walk.WalkItem) bool {
- terminal, isTerminal := space.Value.(walk.TerminalValue)
- if !isTerminal {
- return false
- }
- return terminal == walk.ArrayEnd || terminal == walk.MapEnd
-}
-
-type TerminalFilter struct {}
-func (filter TerminalFilter) exec(space walk.WalkItem) bool {
- _, isTerminal := space.Value.(walk.TerminalValue)
- return isTerminal
-}
-
-type RootFilter struct {}
-func (filter RootFilter) exec(space walk.WalkItem) bool {
- return len(space.Path) == 0
-}
-
-type AndFilter struct {
- left Filter
- right Filter
-}
-func (filter AndFilter) exec(space walk.WalkItem) bool {
- return filter.left.exec(space) && filter.right.exec(space)
-}
-
-type OrFilter struct {
- left Filter
- right Filter
-}
-func (filter OrFilter) exec(space walk.WalkItem) bool {
- return filter.left.exec(space) || filter.right.exec(space)
-}
-
-type NotFilter struct {
- content Filter
-}
-func (filter NotFilter) exec(space walk.WalkItem) bool {
- return !filter.content.exec(space)
-}
-
-type Filter interface {
- exec(walk.WalkItem) bool
-} \ No newline at end of file
diff --git a/main/lex.go b/main/lex.go
index 02dd0ee..e93e42a 100644
--- a/main/lex.go
+++ b/main/lex.go
@@ -113,7 +113,6 @@ type TokenType int
const (
TokenErr TokenType = iota // Lexing error
TokenEOF // end of file
- TokenSemicolon // ;
TokenLParen // (
TokenRParen // )
TokenLBrace // {
@@ -198,88 +197,24 @@ func isStringIndexChar(r rune) bool {
func lexCommand(l *lexer) stateFunc {
l.acceptAll(whitespace)
l.ignore()
- if l.peek() == eof {
- l.emit(TokenEOF)
- return nil
- }
r := l.next()
switch r {
- case '#':
- l.emit(TokenHash)
- lexPatternStringIndex(l)
- return lexCommand
- case '@':
- l.emit(TokenAt)
- lexPatternIntegerIndex(l)
- return lexCommand
- case '.':
- l.emit(TokenDot)
- return lexCommand
- case '*':
- l.emit(TokenAst)
- return lexCommand
- case '|':
- if l.accept("|") {
- l.emit(TokenOr)
- } else {
- l.emit(TokenBar)
- }
- return lexCommand
- case '[':
- l.emit(TokenLBrack)
- return lexCommand
- case ']':
- l.emit(TokenRBrack)
- return lexCommand
- case '(':
- l.emit(TokenLParen)
- return lexCommand
- case ')':
- l.emit(TokenRParen)
- return lexCommand
- case '?':
- l.emit(TokenQuestion)
- return lexCommand
+ case eof:
+ l.emit(TokenEOF)
+ return nil
case '{':
l.emit(TokenLBrace)
return lexCommand
case '}':
l.emit(TokenRBrace)
- return lexCommandEnd
- case '&':
- if l.accept("&") {
- l.emit(TokenAnd)
- return lexCommand
- }
- case '^':
- if l.accept("$") {
- l.emit(TokenHatDollar)
- } else {
- l.emit(TokenHat)
- }
- return lexCommand
- case '$':
- l.emit(TokenDollar)
- return lexCommand
- case '!':
- l.emit(TokenExclamation)
- return lexCommand
- case '~':
- l.emit(TokenTilde)
return lexCommand
- case 'i':
- l.emit(TokenCommand)
- return lexMultipleLiterals
case 's':
l.emit(TokenCommand)
return lexSubstitution
- case 'S':
- l.emit(TokenCommand)
- return lexBigSubstitution
}
if isAlpha(r) {
l.emit(TokenCommand)
- return lexCommandEnd
+ return lexCommand
}
return l.errorf("Expected command found something else")
}
@@ -306,163 +241,3 @@ func lexSubstitution(l *lexer) stateFunc {
}
return lexCommand
}
-
-func lexBigSubstitution(l *lexer) stateFunc {
- delimiter := l.next()
- if delimiter == eof || isAlphaNumeric(delimiter) {
- return l.errorf("Invalid delimiter for big substitution")
- }
- l.emit(TokenSubstituteDelimiter)
- loop: for {
- r := l.next()
- switch r {
- case delimiter:
- l.emit(TokenSubstituteDelimiter)
- break loop
- case '#':
- l.emit(TokenHash)
- lexPatternStringIndex(l)
- case '@':
- l.emit(TokenAt)
- lexPatternIntegerIndex(l)
- case '.':
- l.emit(TokenDot)
- case '*':
- l.emit(TokenAst)
- case '|':
- l.emit(TokenBar)
- case '[':
- l.emit(TokenLBrack)
- case ']':
- l.emit(TokenRBrack)
- case '?':
- l.emit(TokenQuestion)
- case ':':
- l.emit(TokenColon)
- case ',':
- l.emit(TokenComma)
- }
- }
- loop2: for {
- r := l.next()
- switch r {
- case delimiter:
- l.emit(TokenSubstituteDelimiter)
- break loop2
- case '\\':
- if !l.acceptPassing(isDigit) {
- return l.errorf("Expected digit after \\")
- }
- l.emit(TokenSubstitutePlaceholder)
- }
- }
- // TODO: No clue where I was going with this
- return lexCommand
-}
-
-func lexMultipleLiterals(l *lexer) stateFunc {
- l.acceptAll(whitespaceNewlines)
- l.ignore()
- r := l.next()
- switch r {
- case ';', eof:
- l.backup()
- return lexCommandEnd
- case ':':
- l.emit(TokenColon)
- return lexMultipleLiterals
- case ',':
- l.emit(TokenComma)
- return lexMultipleLiterals
- }
- err := lexSingleLiteral(l)
- if err != "" {
- return l.errorf(err)
- }
- return lexMultipleLiterals
-}
-
-func lexSingleLiteral(l *lexer) string {
- l.acceptAll(whitespaceNewlines)
- l.ignore()
- r := l.next()
- switch r {
- case '"':
- l.emit(TokenDoubleQuote)
- if !lexStringLiteral(l) {
- return "Expected closing \""
- }
- case 'n':
- if !l.expect("ull") {
- return "Invalid literal, expected null"
- }
- l.emit(TokenNullLiteral)
- case 't':
- if !l.expect("rue") {
- return "Invalid literal, expected true"
- }
- l.emit(TokenTrueLiteral)
- case 'f':
- if !l.expect("alse") {
- return "Invalid literal, expected false"
- }
- l.emit(TokenFalseLiteral)
- case '{', '}', '[', ']':
- l.emit(TokenTerminalLiteral)
- default:
- if isDigit(r) {
- lexNumberLiteral(l)
- return ""
- }
- return "Invalid literal"
- }
- return ""
-}
-
-// Just read the first digit
-func lexNumberLiteral(l *lexer) {
- l.acceptAllPassing(isDigit)
- if l.accept(".") {
- l.acceptAllPassing(isDigit)
- }
- l.emit(TokenNumberLiteral)
-}
-
-// TODO: escape characters
-func lexStringLiteral(l *lexer) bool {
- for {
- r := l.next()
- switch r {
- case '"':
- l.backup()
- l.emit(TokenStringLiteral)
- l.next()
- l.emit(TokenDoubleQuote)
- return true
- case eof:
- return false
- }
- }
-}
-
-func lexPatternStringIndex(l *lexer) {
- l.acceptAllPassing(isStringIndexChar)
- l.emit(TokenPatternStringIndex)
-}
-
-func lexPatternIntegerIndex(l *lexer) {
- l.acceptAllPassing(isDigit)
- l.emit(TokenPatternIntegerIndex)
-}
-
-func lexCommandEnd(l *lexer) stateFunc {
- if l.peek() == eof {
- l.emit(TokenEOF)
- return nil
- }
- if l.accept("}") {
- l.emit(TokenRBrace)
- return lexCommandEnd
- }
- return lexCommand
-}
diff --git a/main/parse.go b/main/parse.go
index e0272e8..e9dd012 100644
--- a/main/parse.go
+++ b/main/parse.go
@@ -2,9 +2,7 @@ package main
import (
"strings"
- "strconv"
"fmt"
- "main/walk"
"main/subex"
)
@@ -42,167 +40,6 @@ var segmentTokens map[TokenType]bool = map[TokenType]bool {
TokenLBrack: true,
}
-func (p *parser) parsePathPatternFilter(minPower int) PathFilterAST {
- var lhs PathFilterAST
- token := p.next()
- switch token.typ {
- case TokenHash:
- stringIndex := p.next()
- if stringIndex.typ != TokenPatternStringIndex {
- panic("Expected string index after # in pattern")
- }
- lhs = StringSegmentPathFilterAST{stringIndex.val}
- case TokenAt:
- intIndex := p.next()
- if intIndex.typ != TokenPatternIntegerIndex {
- panic("Expected integer index after @ in pattern")
- }
- index, err := strconv.Atoi(intIndex.val)
- if err != nil {
- panic("Expected integer index after @ in pattern")
- }
- lhs = IntegerSegmentPathFilterAST{index}
- case TokenDot:
- lhs = AnySegmentPathFilterAST{}
- case TokenLBrack:
- lhs = p.parsePathPatternFilter(0)
- if p.next().typ != TokenRBrack {
- panic("Expected ] in path filter")
- }
- default:
- panic("Expected path pattern filter segment")
- }
- loop: for {
- token = p.next()
- switch {
- case token.typ == TokenAst && 10 >= minPower:
- lhs = RepeatPathFilterAST {lhs}
- case token.typ == TokenQuestion && 10 >= minPower:
- lhs = OrPathFilterAST{lhs, NonePathFilterAST{}}
- case token.typ == TokenBar && 0 >= minPower:
- lhs = OrPathFilterAST{lhs, p.parsePathPatternFilter(1)}
- case segmentTokens[token.typ] && 2 >= minPower:
- p.rewind(token)
- lhs = SequencePathFilterAST {lhs, p.parsePathPatternFilter(3)}
- default:
- p.rewind(token)
- break loop
- }
- }
- return lhs
-}
-
-func (p *parser) parseFilter(minPower int) Filter {
- var lhs Filter
- token := p.next()
- switch token.typ {
- case TokenHash, TokenAt, TokenDot, TokenLBrack:
- p.rewind(token)
- filterAst := p.parsePathPatternFilter(0)
- lhs = compilePathFilterAST(filterAst)
- case TokenHat:
- lhs = BeginTerminalFilter{}
- case TokenDollar:
- lhs = EndTerminalFilter{}
- case TokenHatDollar:
- lhs = TerminalFilter{}
- case TokenTilde:
- lhs = RootFilter{}
- case TokenLParen:
- lhs = p.parseFilter(0)
- rParen := p.next()
- if rParen.typ != TokenRParen {
- panic("Missing ) in filter")
- }
- default:
- panic("Expected filter")
- }
- loop: for {
- token = p.next()
- switch {
- case token.typ == TokenAnd && 2 >= minPower:
- lhs = AndFilter {lhs, p.parseFilter(3)}
- case token.typ == TokenOr && 0 >= minPower:
- lhs = OrFilter {lhs, p.parseFilter(1)}
- default:
- p.rewind(token)
- break loop
- }
- }
- return lhs
-}
-
-func (p *parser) parseLiterals() (items []walk.WalkItem) {
- var path walk.Path
- var value walk.WalkValue
- loop: for {
- token := p.next()
- switch token.typ {
- case TokenSemicolon, TokenEOF:
- p.rewind(token)
- break loop
- case TokenComma:
- case TokenNullLiteral:
- value = walk.ValueNull{}
- case TokenTrueLiteral:
- value = walk.ValueBool(true)
- case TokenFalseLiteral:
- value = walk.ValueBool(false)
- case TokenNumberLiteral:
- numberLiteral, err := strconv.ParseFloat(token.val, 64)
- if err != nil {
- panic("Error parsing number literal to float64")
- }
- value = walk.ValueNumber(numberLiteral)
- case TokenDoubleQuote:
- stringToken := p.next()
- if stringToken.typ != TokenStringLiteral {
- panic("Expected string literal after \"")
- }
- // TODO: resolve escape characters
- stringLiteral := stringToken.val
- if p.next().typ != TokenDoubleQuote {
- panic("Expected \" after string literal")
- }
- colon := p.next()
- if colon.typ == TokenColon {
- if path != nil {
- panic("Expected value after path:")
- }
- path = walk.Path{stringLiteral}
- } else {
- p.rewind(colon)
- value = walk.ValueString(stringLiteral)
- }
- case TokenTerminalLiteral:
- switch token.val {
- case "{":
- value = walk.MapBegin
- case "}":
- value = walk.MapEnd
- case "[":
- value = walk.ArrayBegin
- case "]":
- value = walk.ArrayEnd
- default:
- panic("Invalid terminal token")
- }
- }
- if value != nil {
- items = append(items, walk.WalkItem {
- Path: path,
- Value: value,
- })
- path = nil
- value = nil
- }
- }
- if path != nil {
- panic("Expected value after path:")
- }
- return items
-}
-
func (p *parser) parseSubex() subex.SubexState {
delim := p.next()
if delim.typ != TokenSubstituteDelimiter {
@@ -246,9 +83,6 @@ func (p *parser) parseBasicCommand(commandChar rune) Command {
subex: subex,
next: next,
}
- case 'i':
- items := p.parseLiterals()
- return PrintLiteralsCommand {items: items}
case 'o':
return NoopCommand{}
case 'x':
@@ -267,21 +101,6 @@ func (p *parser) parseBasicCommand(commandChar rune) Command {
func (p *parser) parseCommand() Command {
token := p.next()
switch token.typ {
- case TokenHash, TokenAt, TokenDot, TokenLParen, TokenHat, TokenDollar, TokenHatDollar, TokenTilde:
- p.rewind(token)
- filter := p.parseFilter(0)
- notToken := p.next()
- if notToken.typ == TokenExclamation {
- filter = NotFilter {filter}
- } else {
- p.rewind(notToken)
- }
- command := p.parseCommand()
- command = FilteredCommand {
- filter: filter,
- command: command,
- }
- return command
case TokenLBrace:
commands := p.parseCommands()
if p.next().typ != TokenRBrace {
diff --git a/main/pathfilter.go b/main/pathfilter.go
deleted file mode 100644
index 1af3b6d..0000000
--- a/main/pathfilter.go
+++ /dev/null
@@ -1,83 +0,0 @@
-package main
-
-import (
- "main/walk"
-)
-
-type AnySegmentPathFilter struct {
- next PathFilterState
-}
-func (filter AnySegmentPathFilter) eat(segment walk.PathSegment) map[PathFilterState]struct{} {
- res := make(map[PathFilterState]struct{})
- res[filter.next] = struct{}{}
- return res
-}
-func (filter AnySegmentPathFilter) accept() bool {
- return false
-}
-
-type OrPathFilter struct {
- filters [2]PathFilterState
-}
-func (filter OrPathFilter) eat(segment walk.PathSegment) map[PathFilterState]struct{} {
- res := make(map[PathFilterState]struct{})
- for _, f := range filter.filters {
- for r := range f.eat(segment) {
- res[r] = struct{}{}
- }
- }
- return res
-}
-func (filter OrPathFilter) accept() bool {
- for _, f := range filter.filters {
- if f.accept() {
- return true
- }
- }
- return false
-}
-
-type NonePathFilter struct {}
-func (filter NonePathFilter) eat(segment walk.PathSegment) map[PathFilterState]struct{} {
- return make(map[PathFilterState]struct{})
-}
-func (filter NonePathFilter) accept() bool {
- return true
-}
-
-type StringSegmentPathFilter struct {
- index string
- next PathFilterState
-}
-func (filter StringSegmentPathFilter) eat(segment walk.PathSegment) map[PathFilterState]struct{} {
- s, isString := segment.(string)
- res := make(map[PathFilterState]struct{})
- if isString && s == filter.index {
- res[filter.next] = struct{}{}
- }
- return res
-}
-func (filter StringSegmentPathFilter) accept() bool {
- return false
-}
-
-type IntegerSegmentPathFilter struct {
- index int
- next PathFilterState
-}
-func (filter IntegerSegmentPathFilter) eat(segment walk.PathSegment) map[PathFilterState]struct{} {
- i, isInteger := segment.(int)
- res := make(map[PathFilterState]struct{})
- if isInteger && i == filter.index {
- res[filter.next] = struct{}{}
- }
- return res
-}
-func (filter IntegerSegmentPathFilter) accept() bool {
- return false
-}
-
-type PathFilterState interface {
- eat(walk.PathSegment) map[PathFilterState]struct{}
- accept() bool
-}
diff --git a/main/pathfilterast.go b/main/pathfilterast.go
deleted file mode 100644
index c84b8af..0000000
--- a/main/pathfilterast.go
+++ /dev/null
@@ -1,74 +0,0 @@
-package main
-
-type StringSegmentPathFilterAST struct {
- index string
-}
-func (ast StringSegmentPathFilterAST) compileWith(next PathFilterState) PathFilterState {
- return StringSegmentPathFilter {
- index: ast.index,
- next: next,
- }
-}
-
-type IntegerSegmentPathFilterAST struct {
- index int
-}
-func (ast IntegerSegmentPathFilterAST) compileWith(next PathFilterState) PathFilterState {
- return IntegerSegmentPathFilter {
- index: ast.index,
- next: next,
- }
-}
-
-type RepeatPathFilterAST struct {
- content PathFilterAST
-}
-func (ast RepeatPathFilterAST) compileWith(next PathFilterState) PathFilterState {
- nextGroup := &OrPathFilter{}
- repeatStart := ast.content.compileWith(nextGroup)
- nextGroup.filters = [2]PathFilterState{next, repeatStart}
- return nextGroup
-}
-
-type SequencePathFilterAST struct {
- first PathFilterAST
- second PathFilterAST
-}
-func (ast SequencePathFilterAST) compileWith(next PathFilterState) PathFilterState {
- next = ast.second.compileWith(next)
- next = ast.first.compileWith(next)
- return next
-}
-
-type AnySegmentPathFilterAST struct {}
-func (ast AnySegmentPathFilterAST) compileWith(next PathFilterState) PathFilterState {
- return AnySegmentPathFilter{next: next}
-}
-
-type OrPathFilterAST struct {
- first PathFilterAST
- second PathFilterAST
-}
-func (ast OrPathFilterAST) compileWith(next PathFilterState) PathFilterState {
- return OrPathFilter {
- filters: [2]PathFilterState{
- ast.first.compileWith(next),
- ast.second.compileWith(next),
- },
- }
-}
-
-type NonePathFilterAST struct {}
-func (ast NonePathFilterAST) compileWith(next PathFilterState) PathFilterState {
- return next
-}
-
-type PathFilterAST interface {
- compileWith(PathFilterState) PathFilterState
-}
-
-func compilePathFilterAST(ast PathFilterAST) PathFilter {
- return PathFilter{
- initial: ast.compileWith(NonePathFilter{}),
- }
-}