Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

FILTER keyword for latest anchor queries (grammar/lexer/hooks) #149

Merged
merged 15 commits into from
Oct 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions bql/grammar/grammar.go
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,7 @@ func moreClauses() []*Clause {
Elements: []Element{
NewTokenType(lexer.ItemDot),
NewSymbol("CLAUSES"),
NewSymbol("FILTER_CLAUSES"),
},
},
{},
Expand Down Expand Up @@ -365,6 +366,33 @@ func clauses() []*Clause {
}
}

func moreFilterClauses() []*Clause {
rogerlucena marked this conversation as resolved.
Show resolved Hide resolved
return []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemDot),
NewSymbol("FILTER_CLAUSES"),
},
},
{},
}
}
func filterClauses() []*Clause {
return []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemFilter),
NewTokenType(lexer.ItemFilterFunction),
NewTokenType(lexer.ItemLPar),
NewTokenType(lexer.ItemBinding),
NewTokenType(lexer.ItemRPar),
NewSymbol("MORE_FILTER_CLAUSES"),
},
},
{},
}
}

func optionalClauses() []*Clause {
return []*Clause{
{
Expand Down Expand Up @@ -1266,6 +1294,8 @@ func BQL() *Grammar {
"MORE_CLAUSES": moreClauses(),
"CLAUSES": clauses(),
"OPTIONAL_CLAUSE": optionalClauses(),
"FILTER_CLAUSES": filterClauses(),
"MORE_FILTER_CLAUSES": moreFilterClauses(),
"SUBJECT_EXTRACT": subjectExtractClauses(),
"SUBJECT_TYPE": subjectTypeClauses(),
"SUBJECT_ID": subjectIDClauses(),
Expand Down Expand Up @@ -1400,6 +1430,12 @@ func SemanticBQL() *Grammar {
}
setElementHook(semanticBQL, objSymbols, semantic.WhereObjectClauseHook(), nil)

// Filter clause hook.
filterSymbols := []semantic.Symbol{
"FILTER_CLAUSES",
}
setElementHook(semanticBQL, filterSymbols, semantic.WhereFilterClauseHook(), nil)

// Collect binding variables variables.
varSymbols := []semantic.Symbol{
"VARS", "VARS_AS", "MORE_VARS", "COUNT_DISTINCT",
Expand Down
94 changes: 94 additions & 0 deletions bql/grammar/grammar_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,20 @@ func TestAcceptByParse(t *testing.T) {
?n "_object"@[] ?o};`,
// Show the graphs.
`show graphs;`,
// Test FILTER clause inside WHERE.
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p)
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p) .
FILTER latest(?o)
};`,
// Test optional trailing dot after the last clause inside WHERE.
`select ?a
from ?b
Expand Down Expand Up @@ -229,6 +243,19 @@ func TestAcceptByParse(t *testing.T) {
?s ?p ?o .
/u<paul> ?p ?o
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p) .
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p) .
FILTER latest(?o) .
};`,
}
p, err := NewParser(BQL())
if err != nil {
Expand Down Expand Up @@ -386,6 +413,46 @@ func TestRejectByParse(t *testing.T) {
where {?n "_subject"@[] ?s.
?n "_predicate"@[] ?p.
?n "_object"@[] ?o};`,
// Test invalid FILTER clause inside WHERE.
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest ?p
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest (?p)
};`,
`select ?a
from ?b
where {
FILTER latest(?p) .
?s ?p ?o
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p) .
/u<paul> ?p ?o
};`,
`select ?a
from ?b
where {
?s ?p ?o .
?s ?p ?o .
FILTER latest(?p) .
/u<paul> ?p ?o
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER late^st(?p)
};`,
// Test invalid trailing dot use inside WHERE.
`select ?a
from ?b
Expand All @@ -411,6 +478,19 @@ func TestRejectByParse(t *testing.T) {
?s ?p ?o
/u<paul> ?p ?o
};`,
`select ?a
from ?b
where {
?s ?p ?o
FILTER latest(?p)
};`,
`select ?a
from ?b
where {
?s ?p ?o .
FILTER latest(?p)
FILTER latest(?o)
};`,
}
p, err := NewParser(BQL())
if err != nil {
Expand Down Expand Up @@ -565,6 +645,13 @@ func TestAcceptQueryBySemanticParse(t *testing.T) {
`select ?s from ?g where{/_<foo> as ?s ?p "id"@[?foo, ?bar] as ?o} order by ?s;`,
`select ?s as ?a, ?o as ?b, ?o as ?c from ?g where{?s ?p ?o} order by ?a ASC, ?b DESC;`,
`select ?s as ?a, ?o as ?b, ?o as ?c from ?g where{?s ?p ?o} order by ?a ASC, ?b DESC, ?a ASC, ?b DESC, ?c;`,
// Test valid FILTER clause for grammar with hooks.
`select ?p
from ?b
where {
?s ?p ?o .
FILTER latest(?p)
};`,
}
p, err := NewParser(SemanticBQL())
if err != nil {
Expand Down Expand Up @@ -596,6 +683,13 @@ func TestRejectByParseAndSemantic(t *testing.T) {
`select ?s as ?a, ?o as ?b, ?o as ?c from ?g where{?s ?p ?o} order by ?a ASC, ?a DESC;`,
// Wrong limit literal.
`select ?s as ?a, ?o as ?b, ?o as ?c from ?g where{?s ?p ?o} LIMIT "true"^^type:bool;`,
// Reject not supported FILTER function.
`select ?p, ?o
from ?test
where {
/u<peter> ?p ?o .
FILTER notSupportedFilterFunction(?p)
};`,
}
p, err := NewParser(SemanticBQL())
if err != nil {
Expand Down
35 changes: 35 additions & 0 deletions bql/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,10 @@ const (
ItemGraphs
// ItemOptional identifies optional graph pattern clauses.
ItemOptional
// ItemFilter represents the filter keyword in BQL.
ItemFilter
// ItemFilterFunction represents a filter function in BQL.
ItemFilterFunction
)

func (tt TokenType) String() string {
Expand Down Expand Up @@ -253,6 +257,10 @@ func (tt TokenType) String() string {
return "GRAPHS"
case ItemOptional:
return "OPTIONAL"
case ItemFilter:
return "FILTER"
case ItemFilterFunction:
return "FILTER_FUNCTION"
default:
return "UNKNOWN"
}
Expand Down Expand Up @@ -294,6 +302,7 @@ const (
from = "from"
where = "where"
optional = "optional"
filter = "filter"
as = "as"
before = "before"
after = "after"
Expand Down Expand Up @@ -402,6 +411,9 @@ func lexToken(l *lexer) stateFn {
return lexPredicateOrLiteral
}
if unicode.IsLetter(r) {
if l.lastTokenType == ItemFilter {
return lexFilterFunction
}
return lexKeyword
}
}
Expand Down Expand Up @@ -617,6 +629,10 @@ func lexKeyword(l *lexer) stateFn {
consumeKeyword(l, ItemOptional)
return lexSpace
}
if strings.EqualFold(input, filter) {
consumeKeyword(l, ItemFilter)
return lexSpace
}
if strings.EqualFold(input, typeKeyword) {
consumeKeyword(l, ItemType)
return lexSpace
Expand Down Expand Up @@ -648,6 +664,25 @@ func lexKeyword(l *lexer) stateFn {
return nil
}

// lexFilterFunction lexes a filter function out of the input (used in FILTER clauses).
func lexFilterFunction(l *lexer) stateFn {
l.next()
var nr rune
for {
nr = l.next()
if nr == leftPar {
l.backup()
break
}
if !unicode.IsLetter(nr) {
l.emitError(`invalid rune in filter function: "` + string(nr) + `"; filter functions should be formed only by letters`)
return nil
}
}
l.emit(ItemFilterFunction)
return lexSpace
}

func lexNode(l *lexer) stateFn {
ltID := false
for done := false; !done; {
Expand Down
55 changes: 55 additions & 0 deletions bql/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ func TestTokenTypeString(t *testing.T) {
{ItemShow, "SHOW"},
{ItemGraphs, "GRAPHS"},
{ItemOptional, "OPTIONAL"},
{ItemFilter, "FILTER"},
{ItemFilterFunction, "FILTER_FUNCTION"},
{TokenType(-1), "UNKNOWN"},
}

Expand Down Expand Up @@ -388,6 +390,29 @@ func TestIndividualTokens(t *testing.T) {
{Type: ItemEOF},
},
},
{
`FILTER latest(?p)`,
[]Token{
{Type: ItemFilter, Text: "FILTER"},
{Type: ItemFilterFunction, Text: "latest"},
{Type: ItemLPar, Text: "("},
{Type: ItemBinding, Text: "?p"},
{Type: ItemRPar, Text: ")"},
{Type: ItemEOF},
},
},
{
`FILTER latest(?p) .`,
[]Token{
{Type: ItemFilter, Text: "FILTER"},
{Type: ItemFilterFunction, Text: "latest"},
{Type: ItemLPar, Text: "("},
{Type: ItemBinding, Text: "?p"},
{Type: ItemRPar, Text: ")"},
{Type: ItemDot, Text: "."},
{Type: ItemEOF},
},
},
}

for _, test := range table {
Expand Down Expand Up @@ -578,6 +603,36 @@ func TestValidTokenQuery(t *testing.T) {
ItemHaving, ItemBinding, ItemEQ, ItemTime, ItemSemicolon, ItemEOF,
},
},
{
`select ?s ?p ?o
from ?foo
where {
?s ?p ?o .
FILTER latest(?p)
};`,
[]TokenType{
ItemQuery, ItemBinding, ItemBinding, ItemBinding, ItemFrom, ItemBinding,
ItemWhere, ItemLBracket, ItemBinding, ItemBinding, ItemBinding, ItemDot,
ItemFilter, ItemFilterFunction, ItemLPar, ItemBinding, ItemRPar,
ItemRBracket, ItemSemicolon, ItemEOF,
},
},
{
`select ?s ?p ?o
from ?foo
where {
?s ?p ?o .
FILTER latest(?p) .
FILTER latest(?o)
};`,
[]TokenType{
ItemQuery, ItemBinding, ItemBinding, ItemBinding, ItemFrom, ItemBinding,
ItemWhere, ItemLBracket, ItemBinding, ItemBinding, ItemBinding, ItemDot,
ItemFilter, ItemFilterFunction, ItemLPar, ItemBinding, ItemRPar, ItemDot,
ItemFilter, ItemFilterFunction, ItemLPar, ItemBinding, ItemRPar,
ItemRBracket, ItemSemicolon, ItemEOF,
},
},
}

for _, test := range table {
Expand Down
5 changes: 5 additions & 0 deletions bql/semantic/convert.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,11 @@ func (c ConsumedElement) Token() *lexer.Token {
return c.token
}

// String returns a string representation of the ConsumedElement.
func (c ConsumedElement) String() string {
return fmt.Sprintf("{isSymbol=%v, symbol=%s, token=%s}", c.isSymbol, c.symbol, c.token)
}

// ToNode converts the node found by the lexer and converts it into a BadWolf
// node.
func ToNode(ce ConsumedElement) (*node.Node, error) {
Expand Down
Loading