Skip to content

Commit

Permalink
[api] fake issue servers using webmock
Browse files Browse the repository at this point in the history
Fetching fate issues seems to be a stub ;(
  • Loading branch information
coolo committed Sep 6, 2013
1 parent 1e16e5e commit 9f77b13
Show file tree
Hide file tree
Showing 11 changed files with 2,379 additions and 111 deletions.
173 changes: 88 additions & 85 deletions src/api/app/models/issue_tracker.rb
Expand Up @@ -13,9 +13,9 @@ class NotFoundError < APIException
validates_inclusion_of :kind, :in => ['other', 'bugzilla', 'cve', 'fate', 'trac', 'launchpad', 'sourceforge']

# FIXME: issues_updated should not be hidden, but it should also not break our api
DEFAULT_RENDER_PARAMS = {:except => [:id, :password, :user, :issues_updated], :dasherize => true, :skip_types => true, :skip_instruct => true }
DEFAULT_RENDER_PARAMS = {:except => [:id, :password, :user, :issues_updated], :dasherize => true, :skip_types => true, :skip_instruct => true}

def self.write_to_backend()
def self.write_to_backend
path = "/issue_trackers"
logger.debug "Write issue tracker information to backend..."
Suse::Backend.put_source(path, IssueTracker.all.to_xml(DEFAULT_RENDER_PARAMS))
Expand All @@ -29,7 +29,7 @@ def self.write_to_backend()
end

def update_package_metadata
Project.each do |prj|
Project.all.each do |prj|
next unless Project.exists?(prj)
prj.packages.each do |pkg|
next unless Package.exists?(pkg)
Expand All @@ -42,21 +42,21 @@ def update_package_metadata
end

# Checks if the given issue belongs to this issue tracker
def matches?(issue)
return Regexp.new(regex).match(issue)
end
# def matches?(issue)
# return Regexp.new(regex).match(issue)
# end

# Generates a URL to display a given issue in the upstream issue tracker
def show_url_for(issue)
return show_url.gsub('@@@', issue) if issue
return nil
end
# def show_url_for(issue)
# return show_url.gsub('@@@', issue) if issue
# return nil
# end

def issue(issue_id)
return Issue.find_by_name_and_tracker(issue_id, self.name)
end
# def issue(issue_id)
# return Issue.find_by_name_and_tracker(issue_id, self.name)
# end

def update_issues()
def update_issues
# before asking remote to ensure that it is older then on remote, assuming ntp works ...
# to be sure, just reduce it by 5 seconds (would be nice to have a counter at bugzilla to
# guarantee a complete search)
Expand All @@ -68,7 +68,7 @@ def update_issues()
rescue Net::ReadTimeout
return false
end
ids = result["bugs"].map{ |x| x["id"].to_i }
ids = result["bugs"].map { |x| x["id"].to_i }

if private_fetch_issues(ids)
self.issues_updated = update_time_stamp
Expand Down Expand Up @@ -102,10 +102,10 @@ def update_issues()
end

# this function is usually never called. Just for debugging and disaster recovery
def enforced_update_all_issues()
def enforced_update_all_issues
update_time_stamp = Time.at(Time.now.to_f - 5)

ids = issues.map{ |x| x.name.to_s }
ids = issues.map { |x| x.name.to_s }

if private_fetch_issues(ids)
self.issues_updated = update_time_stamp
Expand All @@ -121,91 +121,94 @@ def fetch_issues(issues=nil)
issues = self.issues.stateless
end

ids = issues.map{ |x| x.name.to_s }
ids = issues.map { |x| x.name.to_s }

return private_fetch_issues(ids)
end

private
def private_fetch_issues(ids)
unless self.enable_fetch
logger.info "Bug mentioned on #{self.name}, but fetching from server is disabled"
return false
def self.update_all_issues
IssueTracker.all.each do |t|
next unless t.enable_fetch
t.delay.update_issues
end
end

update_time_stamp = Time.at(Time.now.to_f)
private

if kind == "bugzilla"
# limit to 256 ids to avoid too much load and timeouts on bugzilla side
limit_per_slice=256
while ids
begin
result = bugzilla_server.get({:ids => ids[0..limit_per_slice], :permissive => 1})
rescue RuntimeError => e
logger.error "Unable to fetch issue #{e.inspect}"
return false
rescue XMLRPC::FaultException => e
logger.error "Error: #{e.faultCode} #{e.faultString}"
return false
end
result["bugs"].each{ |r|
issue = Issue.find_by_name_and_tracker r["id"].to_s, self.name
if issue
if r["is_open"]
# bugzilla sees it as open
issue.state = Issue.states["OPEN"]
elsif r["is_open"] == false
# bugzilla sees it as closed
issue.state = Issue.states["CLOSED"]
else
# bugzilla does not tell a state
issue.state = Issue.bugzilla_state(r["status"])
end
u = User.find_by_email(r["assigned_to"].to_s)
logger.info "Bug user #{r["assigned_to"].to_s} is not found in OBS user database" unless u
issue.owner_id = u.id if u
issue.updated_at = update_time_stamp
if r["is_private"]
issue.summary = nil
else
issue.summary = r["summary"]
end
issue.save
end
}

ids=ids[limit_per_slice..-1]
end
elsif kind == "fate"
# Try with 'IssueTracker.find_by_name('fate').details('123')' on script/console
url = URI.parse("#{self.url}/#{self.name}?contenttype=text%2Fxml")
begin # Need a loop to follow redirects...
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = (url.scheme == 'https')
request = Net::HTTP::Get.new(url.path)
resp = http.start {|h| h.request(request) }
url = URI.parse(resp.header['location']) if resp.header['location']
end while resp.header['location']
# TODO: Parse returned XML and return proper JSON
return false
elsif kind == "trac"
# TODO: Most trac instances demand a login, maybe worth having one ;-)
server = XMLRPC::Client.new2("#{self.url}/rpc")
def fetch_bugzilla_issues(ids)
# limit to 256 ids to avoid too much load and timeouts on bugzilla side
limit_per_slice=256
while !ids.blank?
begin
server.proxy('system').listMethods()
result = bugzilla_server.get({:ids => ids[0..limit_per_slice], :permissive => 1})
rescue RuntimeError => e
logger.error "Unable to fetch issue #{e.inspect}"
return false
rescue XMLRPC::FaultException => e
logger.error "Error: #{e.faultCode} #{e.faultString}"
if e.faultCode == 403
# The url would be http://user:pass@trac-inst.com/login/rpc
#server = XMLRPC::Client.new2("#{self.url}/login/rpc")
end
return false
end
result["bugs"].each { |r| parse_single_bugzilla_issue(r) }
ids=ids[limit_per_slice..-1]
end
return true
end

def parse_single_bugzilla_issue(r)
issue = Issue.find_by_name_and_tracker r["id"].to_s, self.name
if issue
if r["is_open"]
# bugzilla sees it as open
issue.state = Issue.states["OPEN"]
elsif r["is_open"] == false
# bugzilla sees it as closed
issue.state = Issue.states["CLOSED"]
else
# bugzilla does not tell a state
issue.state = Issue.bugzilla_state(r["status"])
end
u = User.find_by_email(r["assigned_to"].to_s)
logger.info "Bug user #{r["assigned_to"].to_s} is not found in OBS user database" unless u
issue.owner_id = u.id if u
issue.updated_at = update_time_stamp
if r["is_private"]
issue.summary = nil
else
issue.summary = r["summary"]
end
issue.save
end
end

def private_fetch_issues(ids)
unless self.enable_fetch
logger.info "Bug mentioned on #{self.name}, but fetching from server is disabled"
return false
end

if kind == "bugzilla"
return fetch_bugzilla_issues(ids)
elsif kind == "fate"
# Try with 'IssueTracker.find_by_name('fate').details('123')' on script/console
return fetch_fate_issues
end
# everything succeeded
return true
end

def fetch_fate_issues
url = URI.parse("#{self.url}/#{self.name}?contenttype=text%2Fxml")
begin # Need a loop to follow redirects...
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = (url.scheme == 'https')
request = Net::HTTP::Get.new(url.path)
resp = http.start { |h| h.request(request) }
url = URI.parse(resp.header['location']) if resp.header['location']
end while resp.header['location']
# TODO: Parse returned XML and return proper JSON
return false
end

def bugzilla_server
server = XMLRPC::Client.new2("#{self.url}/xmlrpc.cgi")
server.timeout = 300 # 5 minutes timeout
Expand Down
5 changes: 1 addition & 4 deletions src/api/config/clock.rb
Expand Up @@ -15,10 +15,7 @@
end

every(1.hour, 'refresh issues') do
IssueTracker.all.each do |t|
next unless t.enable_fetch
t.delay.update_issues
end
IssueTracker.update_all_issues
end

every(1.hour, 'accept requests') do
Expand Down
1 change: 1 addition & 0 deletions src/api/config/initializers/delayed_job_config.rb
@@ -0,0 +1 @@
Delayed::Worker.delay_jobs = !Rails.env.test?
Binary file added src/api/test/fixtures/backend/allitems.xml.gz
Binary file not shown.

0 comments on commit 9f77b13

Please sign in to comment.