From 2f17082e734c95f124e92caa952f03becee32157 Mon Sep 17 00:00:00 2001 From: evazion Date: Tue, 18 Sep 2018 16:00:03 -0500 Subject: [PATCH] Downloads::File: fix SSRF when following redirects (#2498). Fixes the banned IP check not being applied when following redirects: http://danbooru.donmai.us/uploads/new?url=http://httpbin.org/redirect-to%3Furl=http://127.0.0.1/test.jpg --- app/logical/downloads/file.rb | 41 ++++++++++++++++++++------------ test/unit/downloads/file_test.rb | 19 +++++++++++++-- 2 files changed, 43 insertions(+), 17 deletions(-) diff --git a/app/logical/downloads/file.rb b/app/logical/downloads/file.rb index 6b6aed58c52..215a36a3ed7 100644 --- a/app/logical/downloads/file.rb +++ b/app/logical/downloads/file.rb @@ -9,7 +9,6 @@ class Error < Exception ; end attr_reader :url, :referer validate :validate_url - validate :validate_local_hosts # Prevent Cloudflare from potentially mangling the image. See issue #3528. def self.uncached_url(url, headers = {}) @@ -34,13 +33,11 @@ def self.is_cloudflare?(url, headers = {}) def initialize(url, referer=nil) @url = Addressable::URI.parse(url) rescue nil @referer = referer + validate! end def size - validate! - options = { timeout: 3, headers: strategy.headers }.deep_merge(Danbooru.config.httparty_options) - - res = HTTParty.head(strategy.file_url, options) + res = HTTParty.head(strategy.file_url, **httparty_options, timeout: 3) if res.success? res.content_length @@ -50,7 +47,6 @@ def size end def download!(tries: 3, **options) - validate! url = self.class.uncached_url(strategy.file_url, strategy.headers) Retriable.retriable(on: RETRIABLE_ERRORS, tries: tries, base_interval: 0) do @@ -59,13 +55,6 @@ def download!(tries: 3, **options) end end - def validate_local_hosts - ip_addr = IPAddr.new(Resolv.getaddress(url.hostname)) - if Danbooru.config.banned_ip_for_download?(ip_addr) - errors[:base] << "Downloads from #{ip_addr} are not allowed" - end - end - def validate_url errors[:base] << "URL must not be blank" if url.blank? errors[:base] << "'#{url}' is not a valid url" if !url.host.present? @@ -74,9 +63,8 @@ def validate_url def http_get_streaming(url, file: Tempfile.new(binmode: true), headers: {}, max_size: Danbooru.config.max_file_size) size = 0 - options = { stream_body: true, timeout: 10, headers: headers } - res = HTTParty.get(url, options.deep_merge(Danbooru.config.httparty_options)) do |chunk| + res = HTTParty.get(url, httparty_options) do |chunk| size += chunk.size raise Error.new("File is too large (max size: #{max_size})") if size > max_size && max_size > 0 @@ -94,5 +82,28 @@ def http_get_streaming(url, file: Tempfile.new(binmode: true), headers: {}, max_ def strategy @strategy ||= Sources::Strategies.find(url.to_s, referer) end + + def httparty_options + { + timeout: 10, + stream_body: true, + headers: strategy.headers, + connection_adapter: ValidatingConnectionAdapter, + }.deep_merge(Danbooru.config.httparty_options) + end + end + + # Hook into HTTParty to validate the IP before following redirects. + # https://www.rubydoc.info/github/jnunemaker/httparty/HTTParty/ConnectionAdapter + class ValidatingConnectionAdapter < HTTParty::ConnectionAdapter + def self.call(uri, options) + ip_addr = IPAddr.new(Resolv.getaddress(uri.hostname)) + + if Danbooru.config.banned_ip_for_download?(ip_addr) + raise Downloads::File::Error, "Downloads from #{ip_addr} are not allowed" + end + + super(uri, options) + end end end diff --git a/test/unit/downloads/file_test.rb b/test/unit/downloads/file_test.rb index e8e4cce91ce..63ae6f0a317 100644 --- a/test/unit/downloads/file_test.rb +++ b/test/unit/downloads/file_test.rb @@ -11,12 +11,27 @@ class FileTest < ActiveSupport::TestCase context "for a banned IP" do should "prevent downloads" do Resolv.expects(:getaddress).returns("127.0.0.1") - assert_raise(ActiveModel::ValidationError) { Downloads::File.new("http://evil.com").download! } + assert_raise(Downloads::File::Error) { Downloads::File.new("http://evil.com").download! } end should "prevent fetching the size" do Resolv.expects(:getaddress).returns("127.0.0.1") - assert_raise(ActiveModel::ValidationError) { Downloads::File.new("http://evil.com").size } + assert_raise(Downloads::File::Error) { Downloads::File.new("http://evil.com").size } + end + + should "not follow redirects to banned IPs" do + url = "http://httpbin.org/redirect-to?url=http://127.0.0.1" + stub_request(:get, url).to_return(status: 301, headers: { "Location": "http://127.0.0.1" }) + + assert_raise(Downloads::File::Error) { Downloads::File.new(url).download! } + end + + should "not follow redirects that resolve to a banned IP" do + url = "http://httpbin.org/redirect-to?url=http://127.0.0.1.nip.io" + stub_request(:get, url).to_return(status: 301, headers: { "Location": "http://127.0.0.1.xip.io" }) + Resolv.expects(:getaddress).returns("127.0.0.1") + + assert_raise(Downloads::File::Error) { Downloads::File.new(url).download! } end end