diff options
author | Charlie Stanton <charlie@shtanton.xyz> | 2022-08-26 18:15:56 +0100 |
---|---|---|
committer | Charlie Stanton <charlie@shtanton.xyz> | 2022-08-26 18:15:56 +0100 |
commit | 094c9a8921fb5f54a34d8cdcb924b5dbacd336d8 (patch) | |
tree | 1ceb06246b2d5bd196746de7f300bdfe32a4a18a | |
parent | ce5c224211a94bfd4c898b51d15febdf2ed9d6f2 (diff) | |
download | stred-go-094c9a8921fb5f54a34d8cdcb924b5dbacd336d8.tar |
Adds a bunch of new path pattern features
- Bracketting in expressions
- OR with |
- Optional with ?
-rw-r--r-- | main/lex.go | 14 | ||||
-rw-r--r-- | main/parse.go | 93 | ||||
-rw-r--r-- | main/pathfilter.go | 8 | ||||
-rw-r--r-- | main/pathfilterast.go | 30 |
4 files changed, 101 insertions, 44 deletions
diff --git a/main/lex.go b/main/lex.go index 6977f8a..fdb3b59 100644 --- a/main/lex.go +++ b/main/lex.go @@ -115,6 +115,8 @@ const ( TokenAt // @ TokenDot // . TokenAst // * + TokenBar // | + TokenQuestion // ? TokenPatternStringIndex // A string index in a pattern TokenPatternIntegerIndex // An integer index in a pattern ) @@ -185,6 +187,18 @@ func lexCommand(l *lexer) stateFunc { case '*': l.emit(TokenAst) return lexCommand + case '|': + l.emit(TokenBar) + return lexCommand + case '(': + l.emit(TokenLParen) + return lexCommand + case ')': + l.emit(TokenRParen) + return lexCommand + case '?': + l.emit(TokenQuestion) + return lexCommand case '{': l.emit(TokenLBrace) return lexCommand diff --git a/main/parse.go b/main/parse.go index e876010..492b58f 100644 --- a/main/parse.go +++ b/main/parse.go @@ -11,11 +11,17 @@ type parser struct { rewinds []Token } func (p *parser) next() Token { + var token Token if len(p.rewinds) == 0 { - return <- p.tokenStream + token = <- p.tokenStream + } else { + token = p.rewinds[len(p.rewinds)-1] + p.rewinds = p.rewinds[:len(p.rewinds)-1] + } + if token.typ == TokenErr { + fmt.Println(token) + panic("Lexing error") } - token := p.rewinds[len(p.rewinds)-1] - p.rewinds = p.rewinds[:len(p.rewinds)-1] return token } func (p *parser) rewind(token Token) { @@ -27,41 +33,61 @@ func (p *parser) peek() Token { return token } -// TODO: make a pratt parser -func (p *parser) parsePathPatternFilter() PathFilterAST { - var segments []PathFilterAST +var segmentTokens map[TokenType]bool = map[TokenType]bool { + TokenHash: true, + TokenAt: true, + TokenDot: true, + TokenLParen: true, +} + +func (p *parser) parsePathPatternFilter(minPower int) PathFilterAST { + var lhs PathFilterAST + token := p.next() + switch token.typ { + case TokenHash: + stringIndex := p.next() + if stringIndex.typ != TokenPatternStringIndex { + panic("Expected string index after # in pattern") + } + lhs = StringSegmentPathFilterAST{stringIndex.val} + case TokenAt: + intIndex := p.next() + if intIndex.typ != TokenPatternIntegerIndex { + panic("Expected integer index after @ in pattern") + } + index, err := strconv.Atoi(intIndex.val) + if err != nil { + panic("Expected integer index after @ in pattern") + } + lhs = IntegerSegmentPathFilterAST{index} + case TokenDot: + lhs = AnySegmentPathFilterAST{} + case TokenLParen: + lhs = p.parsePathPatternFilter(0) + if p.next().typ != TokenRParen { + panic("Expected )") + } + default: + panic("Expected path pattern filter segment") + } loop: for { - token := p.next() - switch token.typ { - case TokenHash: - stringIndex := p.next() - if stringIndex.typ != TokenPatternStringIndex { - panic("Expected string index after # in pattern") - } - segments = append(segments, StringSegmentPathFilterAST{stringIndex.val}) - case TokenAt: - intIndex := p.next() - if intIndex.typ != TokenPatternIntegerIndex { - panic("Expected integer index after @ in pattern") - } - index, err := strconv.Atoi(intIndex.val) - if err != nil { - panic("Expected integer index after @ in pattern") - } - segments = append(segments, IntegerSegmentPathFilterAST{index}) - case TokenDot: - segments = append(segments, AnySegmentPathFilterAST{}) - case TokenAst: - if len(segments) == 0 { - panic("Invalid * in pattern, * must go after something") - } - segments[len(segments) - 1] = RepeatPathFilterAST {segments[len(segments)-1]} + token = p.next() + switch { + case token.typ == TokenAst && 10 >= minPower: + lhs = RepeatPathFilterAST {lhs} + case token.typ == TokenQuestion && 10 >= minPower: + lhs = OrPathFilterAST{lhs, NonePathFilterAST{}} + case token.typ == TokenBar && 0 >= minPower: + lhs = OrPathFilterAST{lhs, p.parsePathPatternFilter(1)} + case segmentTokens[token.typ] && 2 >= minPower: + p.rewind(token) + lhs = SequencePathFilterAST {lhs, p.parsePathPatternFilter(3)} default: p.rewind(token) break loop } } - return SequencePathFilterAST {segments} + return lhs } // TODO: should only return a single filter @@ -71,7 +97,7 @@ func (p *parser) parseFilter() []Filter { switch token.typ { case TokenHash, TokenAt, TokenDot: p.rewind(token) - filterAst := p.parsePathPatternFilter() + filterAst := p.parsePathPatternFilter(0) filters = append(filters, compilePathFilterAST(filterAst)) token = p.next() } @@ -114,7 +140,6 @@ func (p *parser) parseCommand() Command { } return p.parseBasicCommand(commandChar) default: - fmt.Println(token) panic("Invalid token, expected command") } } diff --git a/main/pathfilter.go b/main/pathfilter.go index b64872e..7e21efe 100644 --- a/main/pathfilter.go +++ b/main/pathfilter.go @@ -12,10 +12,10 @@ func (filter AnySegmentPathFilter) accept() bool { return false } -type GroupPathFilter struct { - filters []PathFilterState +type OrPathFilter struct { + filters [2]PathFilterState } -func (filter GroupPathFilter) eat(segment PathSegment) map[PathFilterState]struct{} { +func (filter OrPathFilter) eat(segment PathSegment) map[PathFilterState]struct{} { res := make(map[PathFilterState]struct{}) for _, f := range filter.filters { for r := range f.eat(segment) { @@ -24,7 +24,7 @@ func (filter GroupPathFilter) eat(segment PathSegment) map[PathFilterState]struc } return res } -func (filter GroupPathFilter) accept() bool { +func (filter OrPathFilter) accept() bool { for _, f := range filter.filters { if f.accept() { return true diff --git a/main/pathfilterast.go b/main/pathfilterast.go index c2ddc7f..c84b8af 100644 --- a/main/pathfilterast.go +++ b/main/pathfilterast.go @@ -24,19 +24,19 @@ type RepeatPathFilterAST struct { content PathFilterAST } func (ast RepeatPathFilterAST) compileWith(next PathFilterState) PathFilterState { - nextGroup := &GroupPathFilter{} + nextGroup := &OrPathFilter{} repeatStart := ast.content.compileWith(nextGroup) - nextGroup.filters = []PathFilterState{next, repeatStart} + nextGroup.filters = [2]PathFilterState{next, repeatStart} return nextGroup } type SequencePathFilterAST struct { - sequence []PathFilterAST + first PathFilterAST + second PathFilterAST } func (ast SequencePathFilterAST) compileWith(next PathFilterState) PathFilterState { - for i := len(ast.sequence) - 1; i >= 0; i -= 1 { - next = ast.sequence[i].compileWith(next) - } + next = ast.second.compileWith(next) + next = ast.first.compileWith(next) return next } @@ -45,6 +45,24 @@ func (ast AnySegmentPathFilterAST) compileWith(next PathFilterState) PathFilterS return AnySegmentPathFilter{next: next} } +type OrPathFilterAST struct { + first PathFilterAST + second PathFilterAST +} +func (ast OrPathFilterAST) compileWith(next PathFilterState) PathFilterState { + return OrPathFilter { + filters: [2]PathFilterState{ + ast.first.compileWith(next), + ast.second.compileWith(next), + }, + } +} + +type NonePathFilterAST struct {} +func (ast NonePathFilterAST) compileWith(next PathFilterState) PathFilterState { + return next +} + type PathFilterAST interface { compileWith(PathFilterState) PathFilterState } |