Skip to content

Commit

Permalink
Explicitly make collate ignore the merge timeout
Browse files Browse the repository at this point in the history
  • Loading branch information
PragTob committed Jan 3, 2021
1 parent 08d82c1 commit 04c3019
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 17 deletions.
4 changes: 2 additions & 2 deletions lib/simplecov.rb
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def start(profile = nil, &block)

#
# Collate a series of SimpleCov result files into a single SimpleCov output.
#
# You can optionally specify configuration with a block:
# SimpleCov.collate Dir["simplecov-resultset-*/.resultset.json"]
# OR
Expand All @@ -86,8 +87,7 @@ def collate(result_filenames, profile = nil, &block)
initial_setup(profile, &block)

# Use the ResultMerger to produce a single, merged result, ready to use.
# TODO: Did/does collate ignore old results? It probably shouldn't, right?
@result = ResultMerger.merge_and_store(*result_filenames)
@result = ResultMerger.merge_and_store(*result_filenames, ignore_timeout: true)

run_exit_tasks!
end
Expand Down
26 changes: 15 additions & 11 deletions lib/simplecov/result_merger.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ def resultset_writelock
File.join(SimpleCov.coverage_path, ".resultset.json.lock")
end

def merge_and_store(*file_paths)
result = merge_results(*file_paths)
def merge_and_store(*file_paths, ignore_timeout: false)
result = merge_results(*file_paths, ignore_timeout: ignore_timeout)
store_result(result) if result
result
end

def merge_results(*file_paths)
def merge_results(*file_paths, ignore_timeout: false)
# It is intentional here that files are only read in and parsed one at a time.
#
# In big CI setups you might deal with 100s of CI jobs and each one producing Megabytes
Expand All @@ -34,19 +34,22 @@ def merge_results(*file_paths)
#
# For similar reasons a SimpleCov::Result is only created in the end as that'd create
# even more data especially when it also reads in all source files.
initial_memo = valid_results(file_paths.shift)
initial_memo = valid_results(file_paths.shift, ignore_timeout: ignore_timeout)

command_names, coverage = file_paths.reduce(initial_memo) do |memo, file_path|
merge_coverage(memo, valid_results(file_path))
merge_coverage(memo, valid_results(file_path, ignore_timeout: ignore_timeout))
end

SimpleCov::Result.new(coverage, command_name: Array(command_names).sort.join(", "))
SimpleCov::Result.new(coverage, command_name: command_names.reject(&:empty?).sort.join(", ")) if coverage
end

def valid_results(file_path)
parsed = parse_file(file_path)
valid_results = parsed.select { |_command_name, data| within_merge_timeout?(data) }
command_plus_coverage = valid_results.map { |command_name, data| [[command_name], adapt_result(data.fetch("coverage"))] }
def valid_results(file_path, ignore_timeout: false)
results = parse_file(file_path)
results = results.select { |_command_name, data| within_merge_timeout?(data) } unless ignore_timeout

command_plus_coverage = results.map do |command_name, data|
[[command_name], adapt_result(data.fetch("coverage"))]
end

# one file itself _might_ include multiple test runs
merge_coverage(*command_plus_coverage)
Expand Down Expand Up @@ -84,11 +87,12 @@ def time_since_result_creation(data)
end

def merge_coverage(*results)
return [[""], nil] if results.empty?
return results.first if results.size == 1

results.reduce do |(memo_command, memo_coverage), (command, coverage)|
# timestamp is dropped here, which is intentional
merged_coverage = SimpleCov::Combine::ResultsCombiner.combine(memo_coverage, coverage)
merged_coverage = Combine.combine(Combine::ResultsCombiner, memo_coverage, coverage)
merged_command = memo_command + command

[merged_command, merged_coverage]
Expand Down
6 changes: 5 additions & 1 deletion spec/helper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@
SimpleCov.coverage_dir("tmp/coverage")

def source_fixture(filename)
File.expand_path(File.join(File.dirname(__FILE__), "fixtures", filename))
File.join(source_fixture_base_directory, "fixtures", filename)
end

def source_fixture_base_directory
@source_fixture_base_directory ||= File.dirname(__FILE__)
end

# Taken from http://stackoverflow.com/questions/4459330/how-do-i-temporarily-redirect-stderr-in-ruby
Expand Down
104 changes: 101 additions & 3 deletions spec/result_merger_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,22 @@
}
end

let(:merged_resultset_1_and_2) do
{
source_fixture("sample.rb") => {"lines" => [1, 1, 2, 2, nil, nil, 2, 2, nil, nil]},
source_fixture("app/models/user.rb") => {"lines" => [nil, 2, 6, 2, nil, nil, 2, 0, nil, nil]},
source_fixture("app/controllers/sample_controller.rb") => {"lines" => [nil, 4, 2, 1, nil, nil, 2, 0, nil, nil]},
source_fixture("resultset1.rb") => {"lines" => [1, 1, 1, 1]},
source_fixture("parallel_tests.rb") => {"lines" => [nil, nil, nil, 0]},
source_fixture("conditionally_loaded_1.rb") => {"lines" => [nil, 0, 1]},
source_fixture("resultset2.rb") => {"lines" => [nil, 1, 1, nil]},
source_fixture("conditionally_loaded_2.rb") => {"lines" => [nil, 0, 1]}
}
end

let(:result1) { SimpleCov::Result.new(resultset1, command_name: "result1") }
let(:result2) { SimpleCov::Result.new(resultset2, command_name: "result2") }


describe "resultset handling" do
# See GitHub issue #6
it "returns an empty hash when the resultset cache file is empty" do
Expand Down Expand Up @@ -65,12 +77,15 @@
end

it "returns proper values for merged_result" do
expect(SimpleCov::ResultMerger.merged_result.source_files.find { |s| s.filename =~ /user/ }.lines.map(&:coverage)).to eq([nil, 2, 6, 2, nil, nil, 2, 0, nil, nil])
result = SimpleCov::ResultMerger.merged_result

expect_resultset_1_and_2_merged(result.to_hash)
end

context "with second result way above the merge_timeout" do
let(:result2) { outdated(super()) }

before do
result2.created_at = Time.now - 172_800 # two days ago
SimpleCov::ResultMerger.store_result(result2)
end

Expand All @@ -85,6 +100,73 @@
end

describe ".merge_and_store" do
let(:resultset_prefix) { "test_resultset" }
let(:resultset1_path) { "#{resultset_prefix}1.json" }
let(:resultset2_path) { "#{resultset_prefix}2.json" }

describe "merging behavior" do
before :each do
store_result(result1, path: resultset1_path)
store_result(result2, path: resultset2_path)
end

after :each do
FileUtils.rm Dir.glob("#{resultset_prefix}*.json")
end

context "2 normal results" do
it "correctly merges the 2 results" do
result = SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path)
expect_resultset_1_and_2_merged(result.to_hash)
end

it "has the result stored" do
SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path)

expect_resultset_1_and_2_merged(SimpleCov::ResultMerger.read_resultset)
end
end

context "1 resultset is outdated" do
let(:result1) { outdated(super()) }

it "completely omits the result from the merge" do
result_hash = SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path).to_hash

expect(result_hash.keys).to eq ["result2"]

merged_coverage = result_hash.fetch("result2").fetch("coverage")
expect(merged_coverage).to eq(resultset2)
end

it "includes it when we say ignore_timeout: true" do
result_hash = SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path, ignore_timeout: true).to_hash

expect_resultset_1_and_2_merged(result_hash)
end
end

context "both resultsets outdated" do
let(:result1) { outdated(super()) }
let(:result2) { outdated(super()) }

it "completely omits the result from the merge" do
allow(SimpleCov::ResultMerger).to receive(:store)

result = SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path)

expect(result).to eq nil
expect(SimpleCov::ResultMerger).not_to have_received(:store)
end

it "includes both when we say ignore_timeout: true" do
result_hash = SimpleCov::ResultMerger.merge_and_store(resultset1_path, resultset2_path, ignore_timeout: true).to_hash

expect_resultset_1_and_2_merged(result_hash)
end
end
end

context "pre 0.18 result format" do
let(:file_path) { "old_resultset.json" }
let(:content) { {source_fixture("three.rb") => [nil, 1, 2]} }
Expand Down Expand Up @@ -197,4 +279,20 @@
expect(file.read).to eq("process 1\nprocess 2\n")
end
end

private

def store_result(result, path:)
File.open(path, "w+") { |f| f.puts JSON.pretty_generate(result.to_hash) }
end

def outdated(result)
result.created_at = Time.now - 172_800
result
end

def expect_resultset_1_and_2_merged(result_hash)
merged_coverage = result_hash.fetch("result1, result2").fetch("coverage")
expect(merged_coverage).to eq(merged_resultset_1_and_2)
end
end

0 comments on commit 04c3019

Please sign in to comment.