Skip to content

Commit

Permalink
Merge pull request #80 from ninoseki/refactoring-specs
Browse files Browse the repository at this point in the history
refactor: refactoring specs by “rubocop —auto-correct”
  • Loading branch information
ninoseki committed Apr 30, 2019
2 parents 6d07517 + 19d9b43 commit e51c419
Show file tree
Hide file tree
Showing 18 changed files with 119,961 additions and 294,502 deletions.
4 changes: 2 additions & 2 deletions spec/attachement_spec.rb
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# frozen_string_literal: true

RSpec.describe Miteru::Attachement do
subject { Miteru::Attachement.new("https://github.com") }
subject { described_class.new("https://github.com") }

describe "#to_h" do
it "should return a hash" do
it "returns a hash" do
hash = subject.to_h
expect(hash).to be_a(Hash)
expect(hash.dig(:title)).to eq("github.com")
Expand Down
6 changes: 3 additions & 3 deletions spec/cli_spec.rb
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
# frozen_string_literal: true

RSpec.describe Miteru::CLI do
subject { Miteru::CLI.new }
subject { described_class.new }

describe "#execute" do
before do
allow(Miteru::Feeds).to receive_message_chain(:new, :suspicious_urls).and_return(["http://#{host}:#{port}/has_kit"])
end

it "should not raise any error" do
capture(:stdout) { Miteru::CLI.start %w(execute) }
it "does not raise any error" do
capture(:stdout) { described_class.start %w(execute) }
end
end
end
9 changes: 5 additions & 4 deletions spec/crawler_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,18 @@
include_context "http_server"
include_context "download_kits"

before { allow(ENV).to receive(:[]).with("SLACK_WEBHOOK_URL").and_return(nil) }
subject { described_class }

subject { Miteru::Crawler }
before { allow(ENV).to receive(:[]).with("SLACK_WEBHOOK_URL").and_return(nil) }

describe ".execute" do
before do
allow(Miteru::Feeds).to receive_message_chain(:new, :suspicious_urls).and_return(["http://#{host}:#{port}/has_kit"])
allow(Parallel).to receive(:processor_count).and_return(0)
end

it "should not raise any error" do
capture(:stdout) { expect { subject.execute }.to_not raise_error }
it "does not raise any error" do
capture(:stdout) { expect { subject.execute }.not_to raise_error }
end
end
end
7 changes: 4 additions & 3 deletions spec/downloader_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@
include_context "download_kits"

describe "#download_kits" do
subject { Miteru::Downloader.new(base_dir) }
subject { described_class.new(base_dir) }

before { WebMock.disable! }

after { WebMock.enable! }

context "when it runs once" do
it "should download a file" do
it "downloads a file" do
kits = [
Miteru::Kit.new(base_url: "http://#{host}:#{port}/has_kit", link: "test.zip"),
Miteru::Kit.new(base_url: "http://#{host}:#{port}/has_kit", link: "test.tar")
Expand All @@ -35,7 +36,7 @@
end

context "when it runs multiple times" do
it "should remove duplicated files" do
it "removes duplicated files" do
kits = [
Miteru::Kit.new(base_url: "http://#{host}:#{port}/has_kit", link: "test.zip")
]
Expand Down
2 changes: 1 addition & 1 deletion spec/feeds/ayashige_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
subject { Miteru::Feeds::Ayashige }

describe "#urls" do
it "should return an Array" do
it "returns an Array" do
results = subject.new.urls
expect(results).to be_an(Array)
results.all? { |url| url.start_with?(/^http|^https/) }
Expand Down
8 changes: 4 additions & 4 deletions spec/feeds/urlscan_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
subject { Miteru::Feeds::UrlScan }

context "without 'size' option" do
it "should return an Array" do
it "returns an Array" do
results = subject.new.urls
expect(results).to be_an(Array)
expect(results.length).to eq(100)
Expand All @@ -13,23 +13,23 @@

context "with 'size' option" do
context "when size <= 10,000" do
it "should return an Array" do
it "returns an Array" do
results = subject.new(10_000).urls
expect(results).to be_an(Array)
expect(results.length).to eq(10_000)
end
end

context "when size > 10,000" do
it "should raise an ArugmentError" do
it "raises an ArugmentError" do
expect { subject.new(10_001).urls }.to raise_error(ArgumentError)
end
end

context "when an error is raised" do
before { allow(Miteru::HTTPClient).to receive_message_chain(:new, :get).and_raise(Miteru::HTTPResponseError, "test") }

it "should output a message" do
it "outputs a message" do
message = capture(:stdout) { subject.new.urls }
expect(message).to eq("Failed to load urlscan.io feed (test)\n")
end
Expand Down
10 changes: 5 additions & 5 deletions spec/feeds_spec.rb
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# frozen_string_literal: true

RSpec.describe Miteru::Feeds do
subject { Miteru::Feeds }
subject { described_class }

describe "#breakdown" do
context "when given an url without path" do
it "should return an Array (length == 1)" do
it "returns an Array (length == 1)" do
results = subject.new.breakdown("http://test.com")
expect(results).to be_an(Array)
expect(results.length).to eq(1)
Expand All @@ -14,7 +14,7 @@

context "when given an url with path" do
context "when disabling directory_traveling" do
it "should return an Array (length == 1)" do
it "returns an Array (length == 1)" do
results = subject.new.breakdown("http://test.com/test/test/index.html")
expect(results).to be_an(Array)
expect(results.length).to eq(1)
Expand All @@ -23,7 +23,7 @@
end

context "when enabling directory_traveling" do
it "should return an Array (length == 3)" do
it "returns an Array (length == 3)" do
results = subject.new(directory_traveling: true).breakdown("http://test.com/test/test/index.html")
expect(results).to be_an(Array)
expect(results.length).to eq(3)
Expand All @@ -39,7 +39,7 @@
allow(Miteru::Feeds::Ayashige).to receive_message_chain(:new, :urls).and_return(["https://test.com"])
end

it "should return an Array without duplicated" do
it "returns an Array without duplicated" do
results = subject.new.suspicious_urls
expect(results).to be_an(Array)
expect(results.length).to eq(1)
Expand Down

Large diffs are not rendered by default.

This file was deleted.

Loading

0 comments on commit e51c419

Please sign in to comment.