Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support graphql-ruby's new tracing API (backwards compatible) #66

Merged
merged 5 commits into from
Mar 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ruby.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
matrix:
os: [ubuntu-latest]
ruby: ['2.7', '3.0', '3.1', '3.2']
gemfile: ['graphql_1.13', 'graphql_2.0']
gemfile: ['graphql_1.13', 'graphql_2.0', 'graphql_head']
runs-on: ${{ matrix.os }}
env:
BUNDLE_GEMFILE: ${{ github.workspace }}/gemfiles/${{ matrix.gemfile }}.gemfile
Expand Down
5 changes: 5 additions & 0 deletions gemfiles/graphql_head.gemfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# frozen_string_literal: true

eval_gemfile("../Gemfile")

gem("graphql", github: "rmosolgo/graphql-ruby", branch: "master")
1 change: 1 addition & 0 deletions lib/graphql/metrics.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
require "concurrent"
require "graphql/metrics/version"
require "graphql/metrics/instrumentation"
require "graphql/metrics/trace"
require "graphql/metrics/tracer"
require "graphql/metrics/analyzer"

Expand Down
192 changes: 192 additions & 0 deletions lib/graphql/metrics/trace.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
# frozen_string_literal: true

module GraphQL
module Metrics
module Trace
def initialize(**_rest)
super
context = (@multiplex || @query).context
@skip_tracing = context&.fetch(GraphQL::Metrics::SKIP_GRAPHQL_METRICS_ANALYSIS, false)
swalkinshaw marked this conversation as resolved.
Show resolved Hide resolved
end

# NOTE: These methods come from the graphql ruby gem and are in "chronological" order based on the phases
# of execution of the graphql-ruby gem, though old versions of the gem aren't always consistent about this (see
# https://github.com/rmosolgo/graphql-ruby/issues/3393). Most of them can be run multiple times when
# multiplexing multiple queries.

# wraps everything below this line; only run once
def execute_multiplex(multiplex:, &block)
return yield if @skip_tracing
capture_multiplex_start_time(&block)

end

# may not trigger if the query is passed in pre-parsed
def lex(query_string:, &block)
return yield if @skip_tracing
capture_lexing_time(&block)
end

# may not trigger if the query is passed in pre-parsed
def parse(query_string:, &block)
return yield if @skip_tracing
capture_parsing_time(&block)
end

def validate(query:, validate:, &block)
return yield if @skip_tracing
capture_validation_time(query.context, &block)
end

# wraps all `analyze_query`s; only run once
def analyze_multiplex(multiplex:, &block)
return yield if @skip_tracing
# Ensures that we reset potentially long-lived PreContext objects between multiplexs. We reset at this point
# since all parsing and validation will be done by this point, and a GraphQL::Query::Context will exist.
pre_context.reset
yield
end

def analyze_query(query:, &block)
return yield if @skip_tracing
capture_analysis_time(query.context, &block)
end

def execute_query(query:, &block)
return yield if @skip_tracing
capture_query_start_time(query.context, &block)
end

def execute_field(field:, query:, ast_node:, arguments:, object:, &block)
return yield if @skip_tracing || query.context[SKIP_FIELD_AND_ARGUMENT_METRICS]
return yield unless GraphQL::Metrics.timings_capture_enabled?(query.context)
trace_field(GraphQL::Metrics::INLINE_FIELD_TIMINGS, query, &block)
end

def execute_field_lazy(field:, query:, ast_node:, arguments:, object:, &block)
return yield if @skip_tracing || query.context[SKIP_FIELD_AND_ARGUMENT_METRICS]
return yield unless GraphQL::Metrics.timings_capture_enabled?(query.context)
trace_field(GraphQL::Metrics::LAZY_FIELD_TIMINGS, query, &block)
end

private

PreContext = Struct.new(
:multiplex_start_time,
:multiplex_start_time_monotonic,
:parsing_start_time_offset,
:parsing_duration,
:lexing_start_time_offset,
:lexing_duration
) do
def reset
self[:multiplex_start_time] = nil
self[:multiplex_start_time_monotonic] = nil
self[:parsing_start_time_offset] = nil
self[:parsing_duration] = nil
self[:lexing_start_time_offset] = nil
self[:lexing_duration] = nil
end
end

def pre_context
# NOTE: This is used to store timings from lexing, parsing, validation, before we have a context to store
# values in. Uses thread-safe Concurrent::ThreadLocalVar to store a set of values per thread.
@pre_context ||= Concurrent::ThreadLocalVar.new(PreContext.new)
@pre_context.value
end

def capture_multiplex_start_time
pre_context.multiplex_start_time = GraphQL::Metrics.current_time
pre_context.multiplex_start_time_monotonic = GraphQL::Metrics.current_time_monotonic

yield
end

def capture_lexing_time
timed_result = GraphQL::Metrics.time { yield }

pre_context.lexing_start_time_offset = timed_result.start_time
pre_context.lexing_duration = timed_result.duration

timed_result.result
end

def capture_parsing_time
timed_result = GraphQL::Metrics.time { yield }

pre_context.parsing_start_time_offset = timed_result.start_time
pre_context.parsing_duration = timed_result.duration

timed_result.result
end

# Also consolidates parsing timings (if any) from the `pre_context`
def capture_validation_time(context)
# Queries may already be lexed and parsed before execution (whether a single query or multiplex).
# If we don't have those values, use some sane defaults.
if pre_context.lexing_duration.nil?
pre_context.lexing_start_time_offset = pre_context.multiplex_start_time
pre_context.lexing_duration = 0.0
end
if pre_context.parsing_duration.nil?
pre_context.parsing_start_time_offset = pre_context.multiplex_start_time
pre_context.parsing_duration = 0.0
end

timed_result = GraphQL::Metrics.time(pre_context.multiplex_start_time_monotonic) { yield }

ns = context.namespace(CONTEXT_NAMESPACE)

ns[MULTIPLEX_START_TIME] = pre_context.multiplex_start_time
ns[MULTIPLEX_START_TIME_MONOTONIC] = pre_context.multiplex_start_time_monotonic
ns[LEXING_START_TIME_OFFSET] = pre_context.lexing_start_time_offset
ns[LEXING_DURATION] = pre_context.lexing_duration
ns[PARSING_START_TIME_OFFSET] = pre_context.parsing_start_time_offset
ns[PARSING_DURATION] = pre_context.parsing_duration
ns[VALIDATION_START_TIME_OFFSET] = timed_result.time_since_offset
ns[VALIDATION_DURATION] = timed_result.duration

timed_result.result
end

def capture_analysis_time(context)
ns = context.namespace(CONTEXT_NAMESPACE)

timed_result = GraphQL::Metrics.time(ns[MULTIPLEX_START_TIME_MONOTONIC]) { yield }

ns[ANALYSIS_START_TIME_OFFSET] = timed_result.time_since_offset
ns[ANALYSIS_DURATION] = timed_result.duration

timed_result.result
end

def capture_query_start_time(context)
ns = context.namespace(CONTEXT_NAMESPACE)
ns[QUERY_START_TIME] = GraphQL::Metrics.current_time
ns[QUERY_START_TIME_MONOTONIC] = GraphQL::Metrics.current_time_monotonic

yield
end

def trace_field(context_key, query)
ns = query.context.namespace(CONTEXT_NAMESPACE)
offset_time = ns[GraphQL::Metrics::QUERY_START_TIME_MONOTONIC]
start_time = GraphQL::Metrics.current_time_monotonic

result = yield

duration = GraphQL::Metrics.current_time_monotonic - start_time
time_since_offset = start_time - offset_time if offset_time

path_excluding_numeric_indicies = query.context[:current_path].select { |p| p.is_a?(String) }
ns[context_key][path_excluding_numeric_indicies] ||= []
ns[context_key][path_excluding_numeric_indicies] << {
start_time_offset: time_since_offset, duration: duration
}

result
end
end
end
end
127 changes: 7 additions & 120 deletions lib/graphql/metrics/tracer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ class Tracer
GRAPHQL_GEM_TRACING_LAZY_FIELD_KEY = 'execute_field_lazy'
]

include GraphQL::Metrics::Trace

def initialize
# no-op, but don't want the behavior from GraphQL::Metrics::Trace
end

def trace(key, data, &block)
# NOTE: Context doesn't exist yet during lexing, parsing.
context = data[:query]&.context
Expand Down Expand Up @@ -54,130 +60,11 @@ def trace(key, data, &block)
GraphQL::Metrics::LAZY_FIELD_TIMINGS
end

trace_field(context_key, data, &block)
trace_field(context_key, data[:query], &block)
else
return yield
end
end

private

PreContext = Struct.new(
:multiplex_start_time,
:multiplex_start_time_monotonic,
:parsing_start_time_offset,
:parsing_duration,
:lexing_start_time_offset,
:lexing_duration
) do
def reset
self[:multiplex_start_time] = nil
self[:multiplex_start_time_monotonic] = nil
self[:parsing_start_time_offset] = nil
self[:parsing_duration] = nil
self[:lexing_start_time_offset] = nil
self[:lexing_duration] = nil
end
end

def pre_context
# NOTE: This is used to store timings from lexing, parsing, validation, before we have a context to store
# values in. Uses thread-safe Concurrent::ThreadLocalVar to store a set of values per thread.
@pre_context ||= Concurrent::ThreadLocalVar.new(PreContext.new)
@pre_context.value
end

def capture_multiplex_start_time
pre_context.multiplex_start_time = GraphQL::Metrics.current_time
pre_context.multiplex_start_time_monotonic = GraphQL::Metrics.current_time_monotonic

yield
end

def capture_lexing_time
timed_result = GraphQL::Metrics.time { yield }

pre_context.lexing_start_time_offset = timed_result.start_time
pre_context.lexing_duration = timed_result.duration

timed_result.result
end

def capture_parsing_time
timed_result = GraphQL::Metrics.time { yield }

pre_context.parsing_start_time_offset = timed_result.start_time
pre_context.parsing_duration = timed_result.duration

timed_result.result
end

# Also consolidates parsing timings (if any) from the `pre_context`
def capture_validation_time(context)
# Queries may already be lexed and parsed before execution (whether a single query or multiplex).
# If we don't have those values, use some sane defaults.
if pre_context.lexing_duration.nil?
pre_context.lexing_start_time_offset = pre_context.multiplex_start_time
pre_context.lexing_duration = 0.0
end
if pre_context.parsing_duration.nil?
pre_context.parsing_start_time_offset = pre_context.multiplex_start_time
pre_context.parsing_duration = 0.0
end

timed_result = GraphQL::Metrics.time(pre_context.multiplex_start_time_monotonic) { yield }

ns = context.namespace(CONTEXT_NAMESPACE)

ns[MULTIPLEX_START_TIME] = pre_context.multiplex_start_time
ns[MULTIPLEX_START_TIME_MONOTONIC] = pre_context.multiplex_start_time_monotonic
ns[LEXING_START_TIME_OFFSET] = pre_context.lexing_start_time_offset
ns[LEXING_DURATION] = pre_context.lexing_duration
ns[PARSING_START_TIME_OFFSET] = pre_context.parsing_start_time_offset
ns[PARSING_DURATION] = pre_context.parsing_duration
ns[VALIDATION_START_TIME_OFFSET] = timed_result.time_since_offset
ns[VALIDATION_DURATION] = timed_result.duration

timed_result.result
end

def capture_analysis_time(context)
ns = context.namespace(CONTEXT_NAMESPACE)

timed_result = GraphQL::Metrics.time(ns[MULTIPLEX_START_TIME_MONOTONIC]) { yield }

ns[ANALYSIS_START_TIME_OFFSET] = timed_result.time_since_offset
ns[ANALYSIS_DURATION] = timed_result.duration

timed_result.result
end

def capture_query_start_time(context)
ns = context.namespace(CONTEXT_NAMESPACE)
ns[QUERY_START_TIME] = GraphQL::Metrics.current_time
ns[QUERY_START_TIME_MONOTONIC] = GraphQL::Metrics.current_time_monotonic

yield
end

def trace_field(context_key, data)
ns = data[:query].context.namespace(CONTEXT_NAMESPACE)
offset_time = ns[GraphQL::Metrics::QUERY_START_TIME_MONOTONIC]
start_time = GraphQL::Metrics.current_time_monotonic

result = yield

duration = GraphQL::Metrics.current_time_monotonic - start_time
time_since_offset = start_time - offset_time if offset_time

path_excluding_numeric_indicies = data[:path].select { |p| p.is_a?(String) }
ns[context_key][path_excluding_numeric_indicies] ||= []
ns[context_key][path_excluding_numeric_indicies] << {
start_time_offset: time_since_offset, duration: duration
}

result
end
end
end
end
Loading