From d457fc08189db3b7bee997060cfcb01717cdbbec Mon Sep 17 00:00:00 2001 From: Yuval Moravchick <35838935+uvzz@users.noreply.github.com> Date: Tue, 11 Jun 2024 23:37:03 +0300 Subject: [PATCH] Token limit fix CVE-2023-49559 (#291) * Add directive limit to prevent overloading * Added token limit to parser for query and schema parsing, removed my previous directive limit * Added token limit to parser for query and schema parsing, removed my previous directive limit * Update parser/parser.go * Update parser/parser.go * Update parser/query.go * Fix lint Signed-off-by: Steve Coffman --------- Signed-off-by: Steve Coffman Co-authored-by: Yuval Moravchick Co-authored-by: Steve Coffman Co-authored-by: Steve Coffman --- parser/parser.go | 14 ++++++++++++++ parser/parser_test.go | 5 ++++- parser/query.go | 4 ++-- parser/schema.go | 3 ++- 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/parser/parser.go b/parser/parser.go index ef03c41..bfcf7ea 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -1,6 +1,7 @@ package parser import ( + "fmt" "strconv" "github.com/vektah/gqlparser/v2/ast" @@ -20,6 +21,13 @@ type parser struct { comment *ast.CommentGroup commentConsuming bool + + tokenCount int + maxTokenLimit int +} + +func (p *parser) SetMaxTokenLimit(maxToken int) { + p.maxTokenLimit = maxToken } func (p *parser) consumeComment() (*ast.Comment, bool) { @@ -95,6 +103,12 @@ func (p *parser) next() lexer.Token { if p.err != nil { return p.prev } + // Increment the token count before reading the next token + p.tokenCount++ + if p.maxTokenLimit != 0 && p.tokenCount > p.maxTokenLimit { + p.err = fmt.Errorf("exceeded token limit of %d", p.maxTokenLimit) + return p.prev + } if p.peeked { p.peeked = false p.comment = nil diff --git a/parser/parser_test.go b/parser/parser_test.go index 6b677a9..2bc42fd 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -166,5 +166,8 @@ func TestParserUtils(t *testing.T) { } func newParser(input string) parser { - return parser{lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"})} + return parser{ + lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"}), + maxTokenLimit: 15000, // 15000 is the default value + } } diff --git a/parser/query.go b/parser/query.go index f408e68..7f31b0f 100644 --- a/parser/query.go +++ b/parser/query.go @@ -2,14 +2,14 @@ package parser import ( "github.com/vektah/gqlparser/v2/lexer" - //nolint:revive . "github.com/vektah/gqlparser/v2/ast" ) func ParseQuery(source *Source) (*QueryDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 0, // 0 is the default value } return p.parseQueryDocument(), p.err } diff --git a/parser/schema.go b/parser/schema.go index 9b13d0c..f012173 100644 --- a/parser/schema.go +++ b/parser/schema.go @@ -20,7 +20,8 @@ func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { func ParseSchema(source *Source) (*SchemaDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 15000, // default value } sd, err := p.parseSchemaDocument(), p.err if err != nil {