Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100755 90 lines (70 sloc) 2.463 kB
ef0440b @rnewson Python variant of couchdb external hook.
authored
1 #!/usr/bin/python
2
416a059 @davisp Refactored the external hook a bit.
davisp authored
3 import httplib
4 import optparse as op
5 import sys
6 import traceback
9e37cdb @rnewson url escape query parameters.
authored
7 import urllib
7217c19 @rnewson simplify Python script.
authored
8
ef0440b @rnewson Python variant of couchdb external hook.
authored
9 try:
10 import json
11 except:
12 import simplejson as json
13
416a059 @davisp Refactored the external hook a bit.
davisp authored
14 __usage__ = "%prog [OPTIONS]"
15
16 def options():
17 return [
18 op.make_option('-u', '--url', dest='url',
19 default="127.0.0.1",
20 help="Host of the CouchDB-Lucene indexer. [%default]"),
21 op.make_option('-p', '--port', dest='port', type='int',
22 default=5985,
23 help="Port of the CouchDB-Lucene indexer. [%default]")
24 ]
25
26 def main():
27 parser = op.OptionParser(usage=__usage__, option_list=options())
28 opts, args = parser.parse_args()
29
30 if len(args):
31 parser.error("Unrecognized arguments: %s" % ' '.join(args))
32
33 res = httplib.HTTPConnection(opts.url, opts.port)
34 for req in requests():
35 try:
36 resp = respond(res, req)
37 except Exception, e:
38 body = traceback.format_exc()
39 resp = mkresp(500, body, {"Content-Type": "text/plain"})
40 res = httplib.HTTPConnection(opts.url, opts.port)
41
42 sys.stdout.write(json.dumps(resp))
43 sys.stdout.write("\n")
44 sys.stdout.flush()
45
ef0440b @rnewson Python variant of couchdb external hook.
authored
46 def requests():
47 line = sys.stdin.readline()
48 while line:
49 yield json.loads(line)
50 line = sys.stdin.readline()
51
416a059 @davisp Refactored the external hook a bit.
davisp authored
52 def respond(res, req):
53 path = req.get("path", [])
989c04a @rnewson make Python external hook script work for cache hits too.
authored
54
416a059 @davisp Refactored the external hook a bit.
davisp authored
55 if len(path) != 4:
56 body = "\n".join([
57 "Invalid path: %s" % '/'.join([''] + path),
58 "Paths should be: /db_name/_fti/docid/index_name?q=...",
59 "'docid' is from the '_design/docid' that defines index_name"
60 ])
61 return mkresp(400, body, {"Content-Type": "text/plain"})
ef0440b @rnewson Python variant of couchdb external hook.
authored
62
416a059 @davisp Refactored the external hook a bit.
davisp authored
63 path = '/'.join(['', 'search', path[0], path[2], path[3]])
64 path = '?'.join([path, urllib.urlencode(req["query"])])
24c528e @rnewson allow -u/--url= override for couchdb-lucene location.
authored
65
989c04a @rnewson make Python external hook script work for cache hits too.
authored
66 req_headers = {}
416a059 @davisp Refactored the external hook a bit.
davisp authored
67 for h in req.get("headers", []):
68 if h.lower() in ["accept", "if-none-match"]:
989c04a @rnewson make Python external hook script work for cache hits too.
authored
69 req_headers[h] = req["headers"][h]
416a059 @davisp Refactored the external hook a bit.
davisp authored
70
989c04a @rnewson make Python external hook script work for cache hits too.
authored
71 res.request("GET", path, headers=req_headers)
416a059 @davisp Refactored the external hook a bit.
davisp authored
72 resp = res.getresponse()
989c04a @rnewson make Python external hook script work for cache hits too.
authored
73
74 resp_headers = {}
75 for h, v in resp.getheaders():
76 if h.lower() in ["content-type", "etag"]:
77 resp_headers[h] = resp.getheader(h, [])
78
79 return mkresp(resp.status, resp.read(), resp_headers)
416a059 @davisp Refactored the external hook a bit.
davisp authored
80
81 def mkresp(code, body, headers=None):
82 ret = {"code": code, "body": body}
83 if headers is not None:
84 ret["headers"] = headers
85 return ret
ef0440b @rnewson Python variant of couchdb external hook.
authored
86
87 if __name__ == "__main__":
88 main()
416a059 @davisp Refactored the external hook a bit.
davisp authored
89
Something went wrong with that request. Please try again.