Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

gevent/util.py: refactor and fixes

  • Loading branch information...
commit 66361d24ff91ce3bbbf6a71cbe0ec021b3e4c293 1 parent e3bfad2
Trey Smith trey0 authored

Showing 1 changed file with 81 additions and 38 deletions. Show diff stats Hide diff stats

  1. +81 38 geocamUtil/gevent/util.py
119 geocamUtil/gevent/util.py
@@ -3,58 +3,101 @@
3 3 import os
4 4 import fcntl
5 5 import errno
  6 +import re
6 7
7 8 import gevent
8 9 from gevent import socket
9 10 from gevent.queue import Queue
10 11
11 12
12   -def copyFileToQueue(f, q):
  13 +class LineParser(object):
  14 + def __init__(self, handler, maxLineLength=None):
  15 + self._buf = ''
  16 + self._handler = handler
  17 + self._maxLineLength = maxLineLength
  18 +
  19 + def write(self, text):
  20 + self._buf += text
  21 + while 1:
  22 + m = re.search(r'\r\n|\n|\r', self._buf)
  23 + ind = m.end() if m else None
  24 + if (ind == None
  25 + and self._maxLineLength is not None
  26 + and len(self._buf) >= self._maxLineLength):
  27 + ind = self._maxLineLength
  28 + if ind == None:
  29 + break
  30 + else:
  31 + line = self._buf[:ind]
  32 + self._handler(line)
  33 + self._buf = self._buf[ind:]
  34 +
  35 + def flush(self):
  36 + if self._buf:
  37 + self._handler(self._buf)
  38 + self._buf = ''
  39 +
  40 +
  41 +END_OF_FILE = ('__EOF__',)
  42 +
  43 +
  44 +def safeRead(fd, chunkSize):
  45 + try:
  46 + chunk = os.read(fd, 1024)
  47 + except OSError, ex:
  48 + if ex[0] == errno.EAGAIN:
  49 + return ''
  50 + raise
  51 + if not chunk:
  52 + return END_OF_FILE
  53 + return chunk
  54 +
  55 +
  56 +def setNonBlocking(fd):
  57 + flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
  58 + flags = flags | os.O_NONBLOCK
  59 + fcntl.fcntl(fd, fcntl.F_SETFL, flags)
  60 +
  61 +
  62 +def copyFileToQueue(f, q, maxLineLength=None):
13 63 """
14   - Given a file *f* (probably the output pipe from a subprocess),
15   - asynchronously copy lines from the file into gevent Queue *q* until
16   - EOF is reached.
  64 + Given a file or file descriptor *f* (probably the output pipe from a
  65 + subprocess), asynchronously copy lines from the file into gevent
  66 + Queue *q* until EOF is reached. If *maxLineLength* is not None, break
  67 + up long lines to have length at most that long.
17 68 """
18 69 if isinstance(f, file):
19 70 fd = f.fileno()
20 71 else:
21 72 fd = f
22 73
23   - # make file non-blocking
24   - flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
25   - flags = flags | os.O_NONBLOCK
26   - fcntl.fcntl(fd, fcntl.F_SETFL, flags)
  74 + try:
  75 + parser = LineParser(q.put, maxLineLength)
27 76
28   - buf = ''
29   - while 1:
30   - try:
31   - chunk = os.read(fd, 1024)
32   - if not chunk:
  77 + setNonBlocking(fd)
  78 + while 1:
  79 + chunk = safeRead(fd, 1024)
  80 + if chunk is END_OF_FILE:
33 81 break
34   - except OSError, ex:
35   - if ex[0] != errno.EAGAIN:
36   - raise
37   - chunk = None
38   - if chunk:
39   - buf += chunk
40   - while 1:
41   - ind = buf.find('\n')
42   - if ind == -1:
43   - break
44   - else:
45   - line = buf[:(ind + 1)]
46   - q.put(line)
47   - buf = buf[(ind + 1):]
48   - try:
49   - gevent.sleep(0.1)
50   - # socket.wait_read(fd) # hangs. why?
51   - except socket.timeout:
52   - pass
53   - os.close(fd)
54   - q.put(StopIteration)
55   -
56   -
57   -def queueFromFile(f):
  82 + if chunk:
  83 + parser.write(chunk)
  84 + try:
  85 + gevent.sleep(0.1)
  86 + # socket.wait_read(fd) # hangs. why?
  87 + except socket.timeout:
  88 + pass
  89 +
  90 + parser.flush()
  91 +
  92 + finally:
  93 + if isinstance(f, file):
  94 + f.close()
  95 + else:
  96 + os.close(fd)
  97 + q.put(StopIteration)
  98 +
  99 +
  100 +def queueFromFile(f, maxLineLength=None):
58 101 q = Queue()
59   - gevent.spawn(copyFileToQueue, f, q)
  102 + gevent.spawn(copyFileToQueue, f, q, maxLineLength)
60 103 return q

0 comments on commit 66361d2

Please sign in to comment.
Something went wrong with that request. Please try again.