Permalink
Browse files

RESTful API is working!

  • Loading branch information...
1 parent b938c4d commit df75849631c4bf9efd1acf235dfd50c5bb0d867d Niall Douglas (s [underscore] sourceforge {at} nedprod [dot] com) committed Mar 5, 2012
View
@@ -3,10 +3,11 @@
# Created: March 2012
from libBEXML import BEXML, parserbase
-import web, urlparse, os, inspect, logging, sys, traceback
+import web, urlparse, os, inspect, logging, sys, traceback, types, collections
import omnijson as json
from mimerender import mimerender
from cgi import escape
+from uuid import UUID
DEBUG=True
@@ -29,37 +30,58 @@ def inspect_apis(routines):
def XMLise(v, indent=0):
- ret=""
+ ret=u''.rjust(indent)
+ ret+=u"<dictionary>\n"
+ indent+=3
for k in v:
x=v[k]
ret+=u''.rjust(indent)
- ret+=u'<'+unicode(k)+u'>'
+ ret+=u'<item key="'+unicode(k)+u'">'
if isinstance(x, dict):
ret+=u'\n'
ret+=XMLise(x, indent+3)
ret+=u''.rjust(indent)
elif isinstance(x, list):
- ret+=u'\n'
+ ret+=u'<list>\n'
for i in x:
- ret+=u''.rjust(indent+3)
- ret+=unicode(i)
- ret+=',\n'
- ret+=u''.rjust(indent)
+ ret+=u''.rjust(indent+6)+u'<item>'
+ ret+=escape(unicode(i))
+ ret+='</item>\n'
+ ret+=u''.rjust(indent+3)+u'</list>'
else:
- ret+=unicode(x)
- ret+=u'</'+unicode(k)+u'>\n'
+ ret+=escape(unicode(x))
+ ret+=u'</item>\n'
+ indent-=3
+ ret+=u''.rjust(indent)
+ ret+=u"</dictionary>\n"
return ret
-render_xml = lambda **args: XMLise(args)
-render_json = lambda **args: json.dumps(args)
-render_html = lambda **args: u'<html><body><pre>%s</pre></body></html>'%escape(XMLise(args))
-render_txt = lambda **args: repr(args)
+def FixupTypes(v):
+ """Specialised serialisation for certain types"""
+ if isinstance(v, UUID):
+ return v.urn[9:]
+ elif isinstance(v, dict):
+ ret={}
+ for item in v:
+ ret[item]=FixupTypes(v[item])
+ return ret
+ elif isinstance(v, list):
+ for i in xrange(0, len(v)):
+ v[i]=FixupTypes(v[i])
+ return v
+ return v
+
+render_xml = lambda **args: XMLise(FixupTypes(args))
+render_json = lambda **args: json.dumps(FixupTypes(args))
+render_html = lambda **args: u'<html><body><pre>%s</pre></body></html>'%escape(XMLise(FixupTypes(args)))
+render_txt = lambda **args: repr(FixupTypes(args))
urls = (
'/', 'index',
'/apilist', 'apilist',
'/open(.*)', 'open',
- '/parser/(.+)', 'parser'
+ '/cancel/(.+)', 'cancel',
+ '/(.+)', 'catchall',
)
app = web.application(urls, locals())
session = web.session.Session(app, web.session.DiskStore('bexmlsrv_sessions'), initializer={'uri': None})
@@ -70,6 +92,8 @@ def __init__(self, session, uri, parser):
self.session=session
self.uri=uri
self.parser=parser
+ self.generators={}
+ # TODO: Find some way of poking a hook into session such that when it expires, it deletes this object
def excfilter(func):
def dec(*args, **kwargs):
@@ -117,7 +141,7 @@ class open:
txt = render_txt
)
@excfilter
- def GET(self):
+ def GET(self, api):
query=urlparse.parse_qs(web.ctx.env['QUERY_STRING'])
if 'uri' not in query:
raise AssertionError, "Need the uri parameter"
@@ -133,9 +157,36 @@ def GET(self):
raise AssertionError, "Path '"+up.netloc+"' not found under the current working directory"
session.uri=uri
sessionToParserInfo[session.session_id]=ParserInfo(session, uri, BEXML(session.uri))
+ return {'version': '0.01', 'result': 'OK'}
+
+class cancel:
+ @mimerender(
+ default = 'html',
+ html = render_html,
+ xml = render_xml,
+ json = render_json,
+ txt = render_txt
+ )
+ @excfilter
+ def GET(self, api):
+ query=urlparse.parse_qs(web.ctx.env['QUERY_STRING'])
+ if session.uri is None:
+ raise AssertionError, "Need to call /open to open a parsing session first"
+ if api not in parserAPIs:
+ raise AssertionError, "API '"+api+"' not in allowed APIs: "+repr(parserAPIs)
+ reloaded=False
+ if session.session_id in sessionToParserInfo:
+ pi=sessionToParserInfo[session.session_id]
+ else:
+ raise AssertionError, "Cannot cancel in an invalid session"
+ method=getattr(pi.parser, api)
+ if method not in pi.generators:
+ raise AssertionError, "Cannot cancel an API not in operation"
+ else:
+ del pi.generators[method]
return {'result': 'OK'}
-class parser:
+class catchall:
@mimerender(
default = 'html',
html = render_html,
@@ -156,15 +207,27 @@ def GET(self, api):
else:
sessionToParserInfo[session.session_id]=pi=ParserInfo(session, uri, BEXML(session.uri))
reloaded=True
- method=parserAPIs[api].__get__(pi.parser, type(pi.parser))
- methodspec=inspect.getargspec(method)
- mandpars=[x for x in methodspec.args if x is not 'self']
- if methodspec.defaults is not None:
- mandpars=mandpars[:-len(methodspec.defaults)]
- for parameter in mandpars:
- if parameter not in query:
- raise AssertionError, "Mandatory parameter '"+parameter+"' is missing"
- return {'reloaded' : reloaded, 'result': method(**query)}
+ method=getattr(pi.parser, api)
+ if method not in pi.generators:
+ methodspec=inspect.getargspec(method)
+ mandpars=[x for x in methodspec.args if x is not 'self']
+ if methodspec.defaults is not None:
+ mandpars=mandpars[:-len(methodspec.defaults)]
+ for parameter in mandpars:
+ if parameter not in query:
+ raise AssertionError, "Mandatory parameter '"+parameter+"' is missing"
+ # urlparse allows multiple values per parameter, we only want the first
+ query[parameter]=query[parameter][0]
+ result=method(**query)
+ if isinstance(result, types.GeneratorType):
+ pi.generators[method]=result
+ if method in pi.generators:
+ try:
+ result=pi.generators[method].next()
+ except StopIteration:
+ del pi.generators[method]
+ result=None
+ return {'reloaded' : reloaded, 'result': result}
if __name__ == "__main__":
app.run()
@@ -1,4 +1,4 @@
-__all__ = ['autoinit', 'bexml', 'coerce_datetime', 'comment', 'issue', 'parserbase', 'parsers', 'propertieddictionary']
+__all__ = ['bexml', 'coerce_datetime', 'comment', 'issue', 'parserbase', 'parsers', 'propertieddictionary']
# Don't modify the line above, or this line!
import automodinit
automodinit.automodinit(__name__, __file__, globals())
@@ -113,26 +113,6 @@ def isStale(self):
"""True if the file backing for this comment is newer than us"""
pass
- def match(self, commentfilter):
- """Returns true if this comment matches commentfilter"""
- if issuefilter.uuid!=nullUUID:
- if not re.match(str(commentfilter.uuid), str(self.uuid)): return False
- if issuefilter.short_name!="":
- if not re.search(commentfilter.short_name, self.short_name): return False
- if issuefilter.alt_id!="":
- if not re.search(commentfilter.alt_id, self.alt_id): return False
- if issuefilter.in_reply_to!=nullUUID:
- if not re.match(str(commentfilter.in_reply_to), str(self.in_reply_to)): return False
- if issuefilter.author!="":
- if not re.search(commentfilter.author, self.author): return False
- if issuefilter.date!=nullDatetime:
- if not re.search(str(commentfilter.date), str(self.date)): return False
- if issuefilter.content_type!="":
- if not re.search(commentfilter.content_type, self.content_type): return False
- if issuefilter.body!="":
- if not re.search(commentfilter.body, self.body): return False
- return True
-
@abstractmethod
def load(self, reload=False):
"""Loads in the comment from the backing store"""
@@ -35,7 +35,6 @@
"""
from abc import ABCMeta, abstractmethod, abstractproperty
-import re
from uuid import UUID
from datetime import datetime
@@ -119,26 +118,6 @@ def isStale(self):
"""True if the backing for this issue is newer than us"""
pass
- def match(self, issuefilter):
- """Returns true if this issue matches issuefilter"""
- if issuefilter.uuid!=nullUUID:
- if not re.match(str(issuefilter.uuid), str(self.uuid)): return False
- if issuefilter.short_name!="":
- if not re.search(issuefilter.short_name, self.short_name): return False
- if issuefilter.severity!="":
- if not re.search(issuefilter.severity, self.severity): return False
- if issuefilter.status!="":
- if not re.search(issuefilter.status, self.status): return False
- if issuefilter.reporter!="":
- if not re.search(issuefilter.reporter, self.reporter): return False
- if issuefilter.creator!="":
- if not re.search(issuefilter.creator, self.creator): return False
- if issuefilter.created!=nullDatetime:
- if not re.search(str(issuefilter.created), str(self.created)): return False
- if issuefilter.summary!="":
- if not re.search(issuefilter.summary, self.summary): return False
- return True
-
def addComment(self, comment):
"""Adds a comment to the issue"""
assert isinstance(comment, Comment)
@@ -41,6 +41,11 @@ def reload(self):
pass
@abstractmethod
- def parse(self, issuefilter=None):
+ def parseIssues(self, issuefilter=None):
"""Coroutine parsing the issues at the uri filtering out anything not matching issuefilter"""
pass
+
+ @abstractmethod
+ def parseComments(self, issue_uuid, commentfilter=None):
+ """Coroutine parsing the comments for issue_uuid filtering out anything not matching commentfilter"""
+ pass
@@ -9,6 +9,7 @@
import urllib2, os, re, codecs, logging
from urlparse import urlparse
from collections import namedtuple
+from uuid import UUID
import yaml
log=logging.getLogger(__name__)
@@ -180,7 +181,7 @@ def reload(self):
for commentuuid in issue.comments:
issue.comments[commentuuid].uuid
- def parse(self, issuefilter=None):
+ def parseIssues(self, issuefilter=None):
if len(self.__bedir)==0:
self.reload()
for bugdir in self.__bedir:
@@ -190,16 +191,29 @@ def parse(self, issuefilter=None):
# Refresh if loaded and stale
if issue.isLoaded and issue.tracksStaleness and issue.isStale:
issue.load(True)
- if issuefilter is None:
+ if issuefilter is None or issue._match(issuefilter):
yield issue
- else:
- if issuefilter.match(issue):
- yield issue
if not self.cache_in_memory:
# Replace with a fresh structure. If the caller took
# a copy of the issue, it'll live on, otherwise it'll
# get GCed
self.__loadIssueAndComments(issueuuid, os.path.dirname(issue.dirpath))
+ def parseComments(self, issue_uuid, commentfilter=None):
+ if not isinstance(issue_uuid, str):
+ issue_uuid=str(issue_uuid)
+ if len(self.__bedir)==0:
+ self.reload()
+ issue=None
+ for bugdir in self.__bedir:
+ issueuuids=self.__bedir[bugdir]
+ if issue_uuid in issueuuids:
+ issue=issueuuids[issue_uuid]
+ break
+ if issue is None:
+ raise AssertionError, "Issue uuid '"+str(issue_uuid)+"' not found"
+ for commentuuid in issue.comments:
+ yield issue.comments[commentuuid]
+
def instantiate(uri, **args):
return BEDirParser(uri, **args)
@@ -306,7 +306,7 @@ def __repr__(self):
for key in self:
value=unicode(self[key])
for f in filters:
- f.matched=f.matched or (f.property.match(key) and f.value.match(value))
+ f.matched=f.matched or (f.property.match(key) and f.value.search(value))
matchedAnything=False
for f in filters:
if f.required and not f.matched:
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# BEXML, a fast Bugs Everywhere parser with RESTful API and other issue tracker backends
+# (C) 2012 Niall Douglas http://www.nedproductions.biz/
+# Created: March 2012
+#
+# Deliberately written to compile in IronPython and PyPy
+
+import sys
+if sys.path[0]!='.': sys.path.insert(0, '.')
+from bexmlsrv import app
+import logging, time, unittest, omnijson as json, urllib
+
+class TestParseBErepoWithLib(unittest.TestCase):
+ def setUp(self):
+ logging.basicConfig(level=logging.WARN)
+ start=time.time()
+ end=time.time()
+ self.emptyloop=end-start
+ self.cookies={}
+
+ def request(self, api, pars=None):
+ req=api+('?'+urllib.urlencode(pars) if pars is not None else "")
+ headers={'Accept' : 'application/json'}
+ if len(self.cookies):
+ cookie=""
+ for key in self.cookies:
+ if cookie!="": cookie+='; '
+ cookie+=key+'='+self.cookies[key]
+ headers['Cookie']= cookie
+ response=app.request(req, headers=headers)
+ self.assertEqual(response.status, "200 OK", "API call to '"+req+"' failed, response was "+repr(response))
+ if 'Set-Cookie' in response.headers:
+ cookie=response.headers['Set-Cookie']
+ key, sep, value=cookie.partition('=')
+ if ';' in value: value=value[:value.find(';')]
+ self.cookies[key]=value
+ data=json.loads(response.data)
+ return (data, response)
+
+ def test(self):
+ print "API list is", self.request("/apilist")[0]
+
+ self.request("/open", { 'uri' : "file://tests/bugs.bugseverywhere.org"})
+
+ print("\nIssues in the bugs everywhere repository:")
+ start=time.time()
+ self.request("/reload")
+ end=time.time()
+ print("Loading the bugs everywhere repository took %f secs" % (end-start-self.emptyloop))
+
+ start=time.time()
+ issues=comments=0
+ while True:
+ data=self.request("/parseIssues")[0]
+ issue=data['result']
+ if issue is None: break
+ issues+=1
+ #print " "+issue['uuid']+": "+issue['summary']
+ while True:
+ data=self.request("/parseComments", {'issue_uuid': issue['uuid']})[0]
+ comment=data['result']
+ if comment is None: break
+ comments+=1
+ end=time.time()
+ print("Reading %d issues and %d comments from the bugs everywhere repository for the first time took %f secs" % (issues, comments, end-start-self.emptyloop))
+
+if __name__=="__main__":
+ unittest.main()
+
Oops, something went wrong.

0 comments on commit df75849

Please sign in to comment.