Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/mongo/collection/view/iterable.rb
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def each
# If a query with a limit is performed, the query cache will
# re-use results from an earlier query with the same or larger
# limit, and then impose the lower limit during iteration.
limit_for_cached_query = respond_to?(:limit) ? limit : nil
limit_for_cached_query = respond_to?(:limit) ? QueryCache.normalized_limit(limit) : nil
end

if block_given?
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't comment on line 89 because it's not in the diff but I'll ask on this one..

If we call each without a block, it'll just return an Enumerable, which won't have the cached limit applied.

This case probably should be handled by always doing the positive case, and then calling to_enum on the result of it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

line 79 could end up being something like

result = if use_query_cache?
  @cursor.limited_to(limit) # is there a case where limit doesn't exist? CachingCursor inherits Cursor which implements limit, so don't see why we have the `respond_to?(:limit)` check
else
  @cursor
end

if block_given? 
  # iterate result and yield 
else
  result.to_enum
end

Just ideas at them moment, let me know what you think

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cc @p-mongo (not sure if you'd normally get notifications for these comments on someone elses PR)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do @mikebaldry , but thank you for the highlight. We'll discuss this PR with the team to figure out the best course of action.

Expand Down
14 changes: 12 additions & 2 deletions lib/mongo/query_cache.rb
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,8 @@ def set(cursor, **opts)
#
# @api private
def get(**opts)
limit = opts[:limit]
limit = normalized_limit(opts[:limit])

_namespace_key = namespace_key(**opts)
_cache_key = cache_key(**opts)

Expand All @@ -189,7 +190,7 @@ def get(**opts)
caching_cursor = namespace_hash[_cache_key]
return nil unless caching_cursor

caching_cursor_limit = caching_cursor.view.limit
caching_cursor_limit = normalized_limit(caching_cursor.view.limit)

# There are two scenarios in which a caching cursor could fulfill the
# query:
Expand All @@ -199,6 +200,7 @@ def get(**opts)
#
# Otherwise, return nil because the stored cursor will not satisfy
# the query.

if limit && (caching_cursor_limit.nil? || caching_cursor_limit >= limit)
caching_cursor
elsif limit.nil? && caching_cursor_limit.nil?
Expand All @@ -208,6 +210,14 @@ def get(**opts)
end
end

def normalized_limit(limit)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this would be more at home under Cursor somewhere, thoughts?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Though a limit doesn't require creation of a cursor.

I would prefer collection to not reference QueryCache for implementation parts.

Copy link
Contributor Author

@mikebaldry mikebaldry Apr 14, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did consider CachingCursor exposing a method to get a limited set of values and handling the normalization internally there, some how. Didn't get that far though, happy to attempt it

return nil unless limit
# For the purposes of caching, a limit of 0 means no limit, as mongo treats it as such.
return nil if limit == 0
# For the purposes of caching, a negative limit is the same as as a positive limit.
limit.abs
end

private

def cache_key(**opts)
Expand Down
159 changes: 159 additions & 0 deletions spec/integration/query_cache_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -345,18 +345,69 @@

it 'uses the cache' do
results_limit_5 = authorized_collection.find.limit(5).to_a
results_limit_negative_5 = authorized_collection.find.limit(-5).to_a
results_limit_3 = authorized_collection.find.limit(3).to_a
results_limit_negative_3 = authorized_collection.find.limit(-3).to_a
results_no_limit = authorized_collection.find.to_a
results_limit_0 = authorized_collection.find.limit(0).to_a


expect(results_limit_5.length).to eq(5)
expect(results_limit_5.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4])

expect(results_limit_negative_5.length).to eq(5)
expect(results_limit_negative_5.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4])

expect(results_limit_3.length).to eq(3)
expect(results_limit_3.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results_limit_negative_3.length).to eq(3)
expect(results_limit_negative_3.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results_no_limit.length).to eq(10)
expect(results_no_limit.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])

expect(results_limit_0.length).to eq(10)
expect(results_limit_0.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])

expect(events.length).to eq(1)
end
end

context 'when the first query has a 0 limit' do
before do
authorized_collection.find.limit(0).to_a
end

it 'uses the cache' do
results_limit_5 = authorized_collection.find.limit(5).to_a
results_limit_negative_5 = authorized_collection.find.limit(-5).to_a
results_limit_3 = authorized_collection.find.limit(3).to_a
results_limit_negative_3 = authorized_collection.find.limit(-3).to_a
results_no_limit = authorized_collection.find.to_a
results_limit_0 = authorized_collection.find.limit(0).to_a

expect(results_limit_5.length).to eq(5)
expect(results_limit_5.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4])

expect(results_limit_negative_5.length).to eq(5)
expect(results_limit_negative_5.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4])


expect(results_limit_3.length).to eq(3)
expect(results_limit_3.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results_limit_negative_3.length).to eq(3)
expect(results_limit_negative_3.map { |r| r["test"] }).to eq([0, 1, 2])


expect(results_no_limit.length).to eq(10)
expect(results_no_limit.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])


expect(results_limit_0.length).to eq(10)
expect(results_limit_0.map { |r| r["test"] }).to eq([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])

expect(events.length).to eq(1)
end
end
Expand Down Expand Up @@ -391,6 +442,21 @@
end
end

context 'and two queries are performed with a larger negative limit' do
it 'uses the query cache for the third query' do
results1 = authorized_collection.find.limit(-3).to_a
results2 = authorized_collection.find.limit(-3).to_a

expect(results1.length).to eq(3)
expect(results1.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results2.length).to eq(3)
expect(results2.map { |r| r["test"] }).to eq([0, 1, 2])

expect(events.length).to eq(2)
end
end

context 'and the second query has a smaller limit' do
let(:results) { authorized_collection.find.limit(1).to_a }

Expand All @@ -401,6 +467,99 @@
end
end

context 'and the second query has a smaller negative limit' do
let(:results) { authorized_collection.find.limit(-1).to_a }

it 'uses the cached query' do
expect(results.count).to eq(1)
expect(results.first["test"]).to eq(0)
expect(events.length).to eq(1)
end
end

context 'and the second query has no limit' do
it 'queries again' do
expect(authorized_collection.find.to_a.count).to eq(10)
expect(events.length).to eq(2)
end
end
end

context 'when the first query has a negative limit' do
before do
authorized_collection.find.limit(-2).to_a
end

context 'and the second query has a larger limit' do
let(:results) { authorized_collection.find.limit(3).to_a }

it 'queries again' do
expect(results.length).to eq(3)
expect(results.map { |result| result["test"] }).to eq([0, 1, 2])
expect(events.length).to eq(2)
end
end

context 'and the second query has a larger negative limit' do
let(:results) { authorized_collection.find.limit(-3).to_a }

it 'queries again' do
expect(results.length).to eq(3)
expect(results.map { |result| result["test"] }).to eq([0, 1, 2])
expect(events.length).to eq(2)
end
end

context 'and two queries are performed with a larger limit' do
it 'uses the query cache for the third query' do
results1 = authorized_collection.find.limit(3).to_a
results2 = authorized_collection.find.limit(3).to_a

expect(results1.length).to eq(3)
expect(results1.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results2.length).to eq(3)
expect(results2.map { |r| r["test"] }).to eq([0, 1, 2])

expect(events.length).to eq(2)
end
end

context 'and two queries are performed with a larger negative limit' do
it 'uses the query cache for the third query' do
results1 = authorized_collection.find.limit(-3).to_a
results2 = authorized_collection.find.limit(-3).to_a

expect(results1.length).to eq(3)
expect(results1.map { |r| r["test"] }).to eq([0, 1, 2])

expect(results2.length).to eq(3)
expect(results2.map { |r| r["test"] }).to eq([0, 1, 2])

expect(events.length).to eq(2)
end
end

context 'and the second query has a smaller limit' do
let(:results) { authorized_collection.find.limit(1).to_a }

it 'uses the cached query' do
expect(results.count).to eq(1)
expect(results.first["test"]).to eq(0)
expect(events.length).to eq(1)
end
end

context 'and the second query has a smaller negative limit' do
let(:results) { authorized_collection.find.limit(-1).to_a }

it 'uses the cached query' do
expect(results.count).to eq(1)
expect(results.first["test"]).to eq(0)
expect(events.length).to eq(1)
end
end

context 'and the second query has no limit' do
it 'queries again' do
expect(authorized_collection.find.to_a.count).to eq(10)
Expand Down
Loading