Permalink
Browse files

impl reverse, handle 404 and fix status color

  • Loading branch information...
1 parent 2c1acb5 commit fa9cc87770593c894ec9f991c55e050926c24c59 @bdelbosc committed Jan 26, 2012
Showing with 97 additions and 32 deletions.
  1. +8 −4 CHANGES.rst
  2. +4 −2 README.txt
  3. +7 −4 TODO.org
  4. +7 −4 jenkviz/command.py
  5. +49 −9 jenkviz/crawl.py
  6. +4 −2 jenkviz/graphviz.py
  7. +3 −3 jenkviz/main.py
  8. +9 −2 jenkviz/model.py
  9. +5 −1 jenkviz/util.py
  10. +1 −1 setup.py
View
@@ -5,30 +5,34 @@ jenkviz CHANGES
.. contents:: Table of Contents
-jenkviz GIT master
+jenkviz git master
--------------------
:git: https://github.com/bdelbosc/jenkviz
-:Target: 0.1.2
+:Target: 0.2.0
New Features
~~~~~~~~~~~~~~
* Adding ``--explore`` options
+* Adding ``--reverse`` to crawl backward
Bug Fixes
~~~~~~~~~~
-* Handle multiple upstream build.
+* Handles multiple upstream build.
+* Handles 404 on deleted build.
+
+* Fix color for Failed and Aborted build.
jenkviz 0.1.1
------------------
-:Package: http://pypi.python.org/packages/source/f/jenkviz/jenkviz-0.1.1.tar.gz
+:Package: http://pypi.python.org/packages/source/j/jenkviz/jenkviz-0.1.1.tar.gz
:github: https://github.com/bdelbosc/jenkviz/tree/0.1.1
View
@@ -22,7 +22,7 @@ DESCRIPTION
- number of builds
- Black arrows to render upstream and downstream relation
- Orange arrows to render downstream only relation
- - Build with a Blue/Yellow/Red box for Success/Unstable/Failure
+ - Build with a blue/yellow/red/gray box for Success/Unstable/Failed/Aborted
build status
Build information are stored in a local sqlite database. The
@@ -51,11 +51,13 @@ USAGE
COMMANDS
~~~~~~~~~
- crawl [--direct|--explore] [--output SVG_FILE] JENKINS_BUILD_URL
+ crawl [--direct|--reverse|--explore] [--output SVG_FILE] JENKINS_BUILD_URL
The ``--direct`` option shows only downstream and upstream relation,
removing downstream only link.
+ The ``--reverse`` option crawl backward using upstream builds.
+
The ``--explore`` option to keep downstream builds that have
upstream build out of the scope of the origin build (the upstream
build is not a descendant of the root build)
View
@@ -1,15 +1,18 @@
#+TITLE: JenkViz todos
-* TODO reverse
- parse reverse
+* TODO reverse explore
+ parse reverse then crow downstream using roots
+* TODO use rest api to find missing downstream
+ when missing downstream build number try to use the rest api
+ for instance this gives the downstream build number nuxeo-distribution-master #90
+ http://qa.nuxeo.org/jenkins/job/FT-nuxeo-master-selenium-cmf-tomcat/api/xml?depth=1&xpath=/freeStyleProject/build[action/cause/upstreamProject="nuxeo-distribution-master" and action/cause/upstreamBuild=90]/number/text()
* TODO fix bug in throughput
sounds like if elapsed > 24h there is a pb
* TODO render db stats
list slowest jobs, most frequent, slowest N ...
list hosts usage ...
-* TODO handle multiple upstream
- http://qa.nuxeo.org/jenkins/job/addons_nuxeo-social-collaboration-master/904/
* TODO find how to work around jenkins bug
https://issues.jenkins-ci.org/browse/JENKINS-6211
prevent non maven job to display correctly downstream build
-> impl the reverse mode
+ -> use rest api
View
@@ -32,16 +32,19 @@ def cmd_crawl(args, options):
os.mkdir(options.from_file)
db = open_db(options)
crawl = Crawl(db, options)
- root = crawl.crawl(args[0])
+ if options.reverse:
+ roots = crawl.reverse_crawl(args[0])
+ else:
+ roots = crawl.crawl(args[0])
close_db(db)
- stat = root.extra
+ stat = roots[0].extra
logging.info("Started: %s\n\tend: %s\n\telapsed: %s\n\tduration: %ss\n\tNb builds: %s\n\ttrhoughput: %s\n" % (
stat['start'], stat['stop'], stat['elapsed'], stat['duration'], stat['count'], stat['throughput']))
if not options.output:
- svg_file = root.getId() + ".svg"
+ svg_file = roots[0].getId() + ".svg"
else:
svg_file = options.output
- graphviz(root, svg_file)
+ graphviz(roots, svg_file)
logging.info("%s generated." % svg_file)
return 0
View
@@ -10,6 +10,7 @@
"""
import os
import re
+import logging
from urlparse import urlparse
from requests import get as http_get
from model import Build
@@ -36,7 +37,7 @@ def crawl(self, url):
self._crawl(self.path_root)
self.clean(self.root)
self.root.extra = self.stats()
- return self.root
+ return [self.root]
def _crawl(self, url):
parent = self.get_build(url)
@@ -45,15 +46,43 @@ def _crawl(self, url):
print parent
if not len(parent.get_downstream()):
return
- for url in parent.get_downstream():
+ for child_url in parent.get_downstream():
known = False
- if url in self.builds.keys():
+ if child_url in self.builds.keys():
known = True
- build = self.get_build(url)
- if not self.options.direct or parent.url in build.get_upstream():
- parent.children.append(build)
+ child = self.get_build(child_url)
+ if not self.options.direct or parent.url in child.get_upstream():
+ parent.children.append(child)
if not known:
- self._crawl(url)
+ self._crawl(child_url)
+
+ def reverse_crawl(self, url):
+ self.url = url
+ u = urlparse(url)
+ self.path_leaf = u.path
+ self.server_url = url[:-len(self.path_leaf)]
+ self.roots = []
+ self._reverse_crawl(self.path_leaf)
+ # TODO: disable cache and craw from roots
+ # for root in self.roots:
+ # self._crawl(root.url)
+ self.roots[0].extra = self.stats()
+ return self.roots
+
+ def _reverse_crawl(self, url):
+ child = self.get_build(url)
+ print child
+ if len(child.get_upstream()) == 0:
+ self.roots.append(child)
+ return
+ for parent_url in child.get_upstream():
+ known = False
+ if parent_url in self.builds.keys():
+ known = True
+ parent = self.get_build(parent_url)
+ parent.children.append(child)
+ if not known:
+ self._reverse_crawl(parent_url)
def get_build(self, url):
self.count += 1
@@ -66,6 +95,8 @@ def get_build(self, url):
# 3. fetch jenkins page
if ret is None or self.options.update:
build = self.fetch_build(url)
+ if build is None:
+ return None
# 3.1 persist build
if ret is None:
self.save_build_to_db(build)
@@ -82,7 +113,7 @@ def fetch_build(self, url):
else:
body = self.fetch_build_from_server(url)
ret = self.parse_build(url, body)
- if self.options.to_file:
+ if ret and self.options.to_file:
self.save_build_to_file(ret, body)
return ret
@@ -97,7 +128,9 @@ def update_build_to_db(self, build):
def fetch_build_from_server(self, url):
response = http_get(self.server_url + url)
- assert(response.status_code == 200)
+ if response.status_code != 200:
+ logging.error('Failure: %s%s return %s' % (self.server_url, url, response.status_code))
+ return "ERROR: %s" % response.status_code
return response.text
def fetch_build_from_file(self, url):
@@ -114,6 +147,11 @@ def save_build_to_file(self, build, body):
open(file_path, 'w+').write(body.encode('utf-8'))
def parse_build(self, url, body):
+ if body.startswith('ERROR'):
+ name = url.split('/')[-3]
+ build_number = url.split('/')[-2]
+ return Build(url, body, name, build_number, None, None, 'Unknown', [],
+ self.server_url, '', [])
name = extract_token(body, '<title>', ' ')
h1 = extract_token(body, '<h1>', '</h1>')
status = extract_token(h1, 'alt="', '"')
@@ -147,6 +185,8 @@ def stats(self):
start = stop = None
duration = 0
for build in self.builds.itervalues():
+ if not build.start_t or not build.stop_t:
+ continue
if start is None or build.start_t < start:
start = build.start_t
if stop is None or build.stop_t > stop:
View
@@ -12,8 +12,9 @@
from datetime import timedelta
-def graphviz(root, svg_file):
+def graphviz(roots, svg_file):
"""Create a fpath svg from build tree."""
+ root = roots[0]
dot_file = svg_file.replace('.svg', '.dot')
out = open(dot_file, "w+")
out.write("""digraph g {
@@ -24,7 +25,8 @@ def graphviz(root, svg_file):
root.extra['count'], root.extra['throughput']))
visited = []
- _graphviz_recurse(root, out, visited)
+ for root in roots:
+ _graphviz_recurse(root, out, visited)
out.write("}\n")
out.close()
_make_svg(dot_file, svg_file)
View
@@ -43,9 +43,9 @@ def main(argv=sys.argv):
help="Use html files in the the FROM_FILE directory instead of querying jenkins server.")
parser.add_option("--to-file", type="string",
help="Save jenkins page into the TO_FILE directory.")
- # parser.add_option("-r", "--reverse", action="store_true",
- # default=True,
- # help="Reverse crawl")
+ parser.add_option("-r", "--reverse", action="store_true",
+ default=False,
+ help="Reverse crawl")
parser.add_option("--direct", action="store_true",
default=False,
help="Display only direct upstream dependencies")
View
@@ -93,7 +93,10 @@ def __init__(self, url, host, name, build_number, start, duration, status, downs
self.trigger = trigger
self.start_t = time_to_datetime(start)
self.duration_s = duration_to_second(duration)
- self.stop_t = self.start_t + timedelta(seconds=self.duration_s)
+ if self.start_t and self.duration_s:
+ self.stop_t = self.start_t + timedelta(seconds=self.duration_s)
+ else:
+ self.stop_t = None
self.downstream = ','.join(downstream)
self.upstream = ','.join(upstream)
@@ -109,10 +112,14 @@ def getId(self):
def color(self):
if self.status == 'Success':
return "blue"
- if self.status == 'Failure':
+ if self.status == 'Failed':
return "red"
if self.status == 'Unstable':
return "gold"
+ if self.status == 'Unknown':
+ return "brown"
+ if self.status == 'Aborted':
+ return "dimgray"
return "black"
def full_url(self):
View
@@ -80,6 +80,8 @@ def __ror__(self, other):
def duration_to_second(duration):
"""Convert jenkins duration into second"""
+ if not duration:
+ return None
match = re.match('^(([0-9])+ h)? ?(([0-9]+) min)? ?(([0-9]+) sec)?$', duration)
ret = 0
if match and len(match.groups()) == 6:
@@ -93,7 +95,9 @@ def duration_to_second(duration):
def time_to_datetime(str_time):
- return datetime.strptime(str_time, '%b %d, %Y %I:%M:%S %p')
+ if str_time:
+ return datetime.strptime(str_time, '%b %d, %Y %I:%M:%S %p')
+ return None
def extract_token(text, tag_start, tag_end):
View
@@ -1,7 +1,7 @@
#!/usr/bin/env python
__author__ = "Benoit Delbosc"
__copyright__ = "Copyright (C) 2012 Nuxeo SA <http://nuxeo.com/>"
-__version__ = '0.1.1'
+__version__ = '0.2.0'
"""jenkviz package setup"""
from setuptools import setup, find_packages

0 comments on commit fa9cc87

Please sign in to comment.