Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge pull request #23 from willkg/celery-2.5-update

Celery 2.5 update
  • Loading branch information...
commit 22bfbe94b69b23f11a943d78d092f1e6e457533c 2 parents 7040835 + 275892b
@willkg willkg authored
Showing with 16,097 additions and 7,348 deletions.
  1. +6 −5 bin/camqadm
  2. +6 −5 bin/celerybeat
  3. +6 −5 bin/celeryctl
  4. +6 −5 bin/celeryd
  5. +6 −5 bin/celeryd-multi
  6. +6 −5 bin/celeryev
  7. +6 −5 bin/djcelerymon
  8. +1 −0  lib/python/amqplib/client_0_8/__init__.py
  9. +11 −1 lib/python/amqplib/client_0_8/abstract_channel.py
  10. +3 −8 lib/python/amqplib/client_0_8/basic_message.py
  11. +94 −2 lib/python/amqplib/client_0_8/channel.py
  12. +17 −1 lib/python/amqplib/client_0_8/connection.py
  13. +24 −7 lib/python/amqplib/client_0_8/method_framing.py
  14. +63 −52 lib/python/amqplib/client_0_8/serialization.py
  15. +81 −20 lib/python/amqplib/client_0_8/transport.py
  16. +61 −46 lib/python/anyjson/__init__.py
  17. +27 −1 lib/python/celery/__init__.py
  18. +210 −0 lib/python/celery/abstract.py
  19. +30 −0 lib/python/celery/actors.py
  20. +294 −0 lib/python/celery/app/__init__.py
  21. +55 −0 lib/python/celery/app/abstract.py
  22. +356 −0 lib/python/celery/app/amqp.py
  23. +38 −0 lib/python/celery/app/annotations.py
  24. +392 −0 lib/python/celery/app/base.py
  25. +255 −0 lib/python/celery/app/defaults.py
  26. +733 −0 lib/python/celery/app/task/__init__.py
  27. +44 −48 lib/python/celery/apps/beat.py
  28. +215 −176 lib/python/celery/apps/worker.py
  29. +34 −44 lib/python/celery/backends/__init__.py
  30. +186 −168 lib/python/celery/backends/amqp.py
  31. +197 −39 lib/python/celery/backends/base.py
  32. +82 −36 lib/python/celery/backends/cache.py
  33. +115 −114 lib/python/celery/backends/cassandra.py
  34. +57 −24 lib/python/celery/backends/database.py
  35. +90 −21 lib/python/celery/backends/mongodb.py
  36. +27 −118 lib/python/celery/backends/pyredis.py
  37. +116 −0 lib/python/celery/backends/redis.py
  38. +22 −12 lib/python/celery/backends/tyrant.py
  39. +241 −164 lib/python/celery/beat.py
  40. +208 −21 lib/python/celery/bin/base.py
  41. +42 −34 lib/python/celery/bin/camqadm.py
  42. +47 −19 lib/python/celery/bin/celerybeat.py
  43. +282 −79 lib/python/celery/bin/celeryctl.py
  44. +58 −40 lib/python/celery/bin/celeryd.py
  45. +42 −55 lib/python/celery/bin/celeryd_detach.py
  46. +88 −38 lib/python/celery/bin/celeryd_multi.py
  47. +92 −54 lib/python/celery/bin/celeryev.py
  48. +16 −0 lib/python/celery/concurrency/__init__.py
  49. +114 −0 lib/python/celery/concurrency/base.py
  50. +135 −0 lib/python/celery/concurrency/eventlet.py
  51. +123 −0 lib/python/celery/concurrency/gevent.py
  52. +78 −110 lib/python/celery/concurrency/processes/__init__.py
  53. +108 −0 lib/python/celery/concurrency/processes/_win.py
  54. +186 −0 lib/python/celery/concurrency/processes/forking.py
  55. +357 −115 lib/python/celery/concurrency/processes/pool.py
  56. +13 −0 lib/python/celery/concurrency/processes/process.py
  57. +21 −0 lib/python/celery/concurrency/solo.py
  58. +27 −49 lib/python/celery/concurrency/threads.py
  59. +96 −294 lib/python/celery/conf.py
  60. +11 −7 lib/python/celery/contrib/abortable.py
  61. +178 −36 lib/python/celery/contrib/batches.py
  62. +47 −0 lib/python/celery/contrib/bundles.py
  63. +96 −0 lib/python/celery/contrib/migrate.py
  64. +156 −0 lib/python/celery/contrib/rdb.py
  65. +444 −168 lib/python/celery/datastructures.py
  66. +15 −12 lib/python/celery/db/a805d4bd.py
  67. +7 −6 lib/python/celery/db/dfd042c7.py
  68. +18 −11 lib/python/celery/db/models.py
  69. +11 −6 lib/python/celery/db/session.py
  70. +27 −70 lib/python/celery/decorators.py
  71. +169 −66 lib/python/celery/events/__init__.py
  72. +142 −54 lib/python/celery/events/cursesmon.py
  73. +23 −9 lib/python/celery/events/dumper.py
  74. +57 −33 lib/python/celery/events/snapshot.py
  75. +118 −75 lib/python/celery/events/state.py
  76. +42 −18 lib/python/celery/exceptions.py
  77. +3 −200 lib/python/celery/execute/__init__.py
  78. +238 −100 lib/python/celery/execute/trace.py
  79. +24 −20 lib/python/celery/loaders/__init__.py
  80. +23 −0 lib/python/celery/loaders/app.py
  81. +165 −39 lib/python/celery/loaders/base.py
  82. +51 −36 lib/python/celery/loaders/default.py
  83. +153 −0 lib/python/celery/local.py
  84. +228 −164 lib/python/celery/log.py
  85. +0 −16 lib/python/celery/management/commands/celeryd.py
  86. +13 −316 lib/python/celery/messaging.py
  87. +0 −56 lib/python/celery/models.py
  88. +413 −143 lib/python/celery/platforms.py
  89. +23 −32 lib/python/celery/registry.py
  90. +330 −225 lib/python/celery/result.py
  91. +45 −39 lib/python/celery/routes.py
  92. +106 −77 lib/python/celery/schedules.py
  93. +79 −0 lib/python/celery/security/__init__.py
  94. +90 −0 lib/python/celery/security/certificate.py
  95. +29 −0 lib/python/celery/security/key.py
  96. +88 −0 lib/python/celery/security/serialization.py
  97. +32 −3 lib/python/celery/signals.py
  98. +13 −4 lib/python/celery/states.py
  99. +66 −53 lib/python/celery/task/__init__.py
  100. +21 −576 lib/python/celery/task/base.py
  101. +0 −68 lib/python/celery/task/builtins.py
  102. +68 −0 lib/python/celery/task/chords.py
  103. +177 −142 lib/python/celery/task/control.py
  104. +49 −32 lib/python/celery/task/http.py
  105. +9 −1 lib/python/celery/task/schedules.py
  106. +86 −142 lib/python/celery/task/sets.py
  107. +62 −16 lib/python/celery/tests/__init__.py
  108. +2 −2 lib/python/celery/tests/compat.py
  109. +25 −6 lib/python/celery/tests/config.py
  110. +21 −16 lib/python/celery/tests/functional/case.py
  111. +3 −1 lib/python/celery/tests/functional/tasks.py
  112. +325 −0 lib/python/celery/tests/test_app/__init__.py
  113. +27 −0 lib/python/celery/tests/test_app/test_actors.py
  114. +53 −0 lib/python/celery/tests/test_app/test_annotations.py
  115. +103 −0 lib/python/celery/tests/test_app/test_app_amqp.py
  116. +43 −0 lib/python/celery/tests/test_app/test_app_defaults.py
  117. +93 −28 lib/python/celery/tests/{ → test_app}/test_beat.py
  118. +3 −2 lib/python/celery/tests/{ → test_app}/test_celery.py
  119. +245 −0 lib/python/celery/tests/test_app/test_loaders.py
  120. +41 −90 lib/python/celery/tests/{ → test_app}/test_log.py
  121. +58 −32 lib/python/celery/tests/{ → test_app}/test_routes.py
  122. +18 −10 lib/python/celery/tests/test_backends/__init__.py
  123. +0 −60 lib/python/celery/tests/test_backends/disabled_amqp.py
  124. +299 −0 lib/python/celery/tests/test_backends/test_amqp.py
  125. +161 −44 lib/python/celery/tests/test_backends/test_base.py
  126. +170 −71 lib/python/celery/tests/test_backends/test_cache.py
  127. +73 −28 lib/python/celery/tests/test_backends/test_database.py
  128. +280 −0 lib/python/celery/tests/test_backends/test_mongodb.py
  129. +21 −0 lib/python/celery/tests/test_backends/test_pyredis_compat.py
  130. +27 −67 lib/python/celery/tests/test_backends/test_redis.py
  131. +165 −0 lib/python/celery/tests/test_backends/test_redis_unit.py
  132. +8 −6 lib/python/celery/tests/test_backends/test_tyrant.py
  133. +91 −0 lib/python/celery/tests/test_bin/__init__.py
  134. +0 −49 lib/python/celery/tests/test_bin/test_base.py
  135. +117 −23 lib/python/celery/tests/test_bin/test_celerybeat.py
  136. +343 −81 lib/python/celery/tests/test_bin/test_celeryd.py
  137. +63 −0 lib/python/celery/tests/test_bin/test_celeryev.py
  138. 0  lib/python/celery/{management/commands → tests/test_compat}/__init__.py
  139. +33 −0 lib/python/celery/tests/test_compat/test_decorators.py
  140. +19 −0 lib/python/celery/tests/test_compat/test_messaging.py
  141. +71 −0 lib/python/celery/tests/test_concurrency/__init__.py
  142. +37 −0 lib/python/celery/tests/test_concurrency/test_concurrency_eventlet.py
  143. +53 −0 lib/python/celery/tests/test_concurrency/test_concurrency_gevent.py
  144. +88 −52 lib/python/celery/tests/{ → test_concurrency}/test_concurrency_processes.py
  145. +24 −0 lib/python/celery/tests/test_concurrency/test_concurrency_solo.py
  146. +20 −9 lib/python/celery/tests/{ → test_concurrency}/test_pool.py
  147. +0 −151 lib/python/celery/tests/test_datastructures.py
  148. +0 −74 lib/python/celery/tests/test_events.py
  149. +191 −0 lib/python/celery/tests/test_events/__init__.py
  150. +72 −0 lib/python/celery/tests/test_events/test_events_cursesmon.py
  151. +124 −0 lib/python/celery/tests/test_events/test_events_snapshot.py
  152. +100 −48 lib/python/celery/tests/{ → test_events}/test_events_state.py
  153. +0 −122 lib/python/celery/tests/test_loaders.py
  154. +0 −15 lib/python/celery/tests/test_messaging.py
  155. +0 −306 lib/python/celery/tests/test_result.py
  156. +133 −0 lib/python/celery/tests/test_security/__init__.py
  157. +14 −0 lib/python/celery/tests/test_security/case.py
  158. +44 −0 lib/python/celery/tests/test_security/test_certificate.py
  159. +26 −0 lib/python/celery/tests/test_security/test_key.py
  160. +55 −0 lib/python/celery/tests/test_security/test_serialization.py
  161. +0 −21 lib/python/celery/tests/test_serialization.py
  162. 0  lib/python/celery/{management → tests/test_slow}/__init__.py
  163. +28 −23 lib/python/celery/tests/{ → test_slow}/test_buckets.py
  164. +277 −209 lib/python/celery/tests/{test_task.py → test_task/__init__.py}
  165. +105 −0 lib/python/celery/tests/test_task/test_chord.py
  166. +159 −0 lib/python/celery/tests/test_task/test_context.py
  167. +93 −0 lib/python/celery/tests/test_task/test_execute_trace.py
  168. +15 −6 lib/python/celery/tests/{ → test_task}/test_registry.py
  169. +444 −0 lib/python/celery/tests/test_task/test_result.py
  170. +5 −2 lib/python/celery/tests/{ → test_task}/test_states.py
  171. +3 −2 lib/python/celery/tests/{ → test_task}/test_task_abortable.py
  172. +10 −0 lib/python/celery/tests/test_task/test_task_builtins.py
  173. +176 −0 lib/python/celery/tests/test_task/test_task_control.py
  174. +41 −59 lib/python/celery/tests/{ → test_task}/test_task_http.py
  175. +29 −45 lib/python/celery/tests/{ → test_task}/test_task_sets.py
  176. +0 −29 lib/python/celery/tests/test_task_builtins.py
  177. +0 −159 lib/python/celery/tests/test_task_control.py
  178. +0 −68 lib/python/celery/tests/test_timer2.py
  179. +0 −216 lib/python/celery/tests/test_utils.py
  180. +139 −0 lib/python/celery/tests/test_utils/__init__.py
Sorry, we could not display the entire diff because too many files (369) changed.
View
11 bin/camqadm
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','camqadm'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','camqadm'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'camqadm')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'camqadm')()
+ )
View
11 bin/celerybeat
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','celerybeat'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','celerybeat'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'celerybeat')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'celerybeat')()
+ )
View
11 bin/celeryctl
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','celeryctl'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','celeryctl'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'celeryctl')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'celeryctl')()
+ )
View
11 bin/celeryd
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','celeryd'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','celeryd'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'celeryd')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'celeryd')()
+ )
View
11 bin/celeryd-multi
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','celeryd-multi'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','celeryd-multi'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'celeryd-multi')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'celeryd-multi')()
+ )
View
11 bin/celeryev
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.1.4','console_scripts','celeryev'
-__requires__ = 'celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'celery==2.5.5','console_scripts','celeryev'
+__requires__ = 'celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('celery==2.1.4', 'console_scripts', 'celeryev')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('celery==2.5.5', 'console_scripts', 'celeryev')()
+ )
View
11 bin/djcelerymon
@@ -1,9 +1,10 @@
#!/usr/bin/env python
-# EASY-INSTALL-ENTRY-SCRIPT: 'django-celery==2.1.4','console_scripts','djcelerymon'
-__requires__ = 'django-celery==2.1.4'
+# EASY-INSTALL-ENTRY-SCRIPT: 'django-celery==2.5.5','console_scripts','djcelerymon'
+__requires__ = 'django-celery==2.5.5'
import sys
from pkg_resources import load_entry_point
-sys.exit(
- load_entry_point('django-celery==2.1.4', 'console_scripts', 'djcelerymon')()
-)
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('django-celery==2.5.5', 'console_scripts', 'djcelerymon')()
+ )
View
1  lib/python/amqplib/client_0_8/__init__.py
@@ -22,6 +22,7 @@
# Pull in the public items from the various sub-modules
#
from basic_message import *
+from channel import *
from connection import *
from exceptions import *
View
12 lib/python/amqplib/client_0_8/abstract_channel.py
@@ -20,6 +20,12 @@
from serialization import AMQPWriter
+try:
+ bytes
+except NameError:
+ # Python 2.5 and lower
+ bytes = str
+
__all__ = [
'AbstractChannel',
]
@@ -58,7 +64,7 @@ def __exit__(self, type, value, traceback):
self.close()
- def _send_method(self, method_sig, args='', content=None):
+ def _send_method(self, method_sig, args=bytes(), content=None):
"""
Send a method for our channel.
@@ -88,6 +94,10 @@ def wait(self, allowed_methods=None):
method_sig, args, content = self.connection._wait_method(
self.channel_id, allowed_methods)
+ return self.dispatch_method(method_sig, args, content)
+
+
+ def dispatch_method(self, method_sig, args, content):
if content \
and self.auto_decode \
and hasattr(content, 'content_encoding'):
View
11 lib/python/amqplib/client_0_8/basic_message.py
@@ -115,14 +115,8 @@ def __init__(self, body='', children=None, **properties):
application_headers={'foo': 7})
"""
- if isinstance(body, unicode):
- if properties.get('content_encoding', None) is None:
- properties['content_encoding'] = 'UTF-8'
- self.body = body.encode(properties['content_encoding'])
- else:
- self.body = body
-
super(Message, self).__init__(**properties)
+ self.body = body
def __eq__(self, other):
@@ -134,4 +128,5 @@ def __eq__(self, other):
which isn't compared.
"""
- return super(Message, self).__eq__(other) and (self.body == other.body)
+ return super(Message, self).__eq__(other) \
+ and hasattr(other, 'body') and (self.body == other.body)
View
96 lib/python/amqplib/client_0_8/channel.py
@@ -61,8 +61,8 @@ def __init__(self, connection, channel_id=None, auto_decode=True):
whether the library should attempt to decode the body
of Messages to a Unicode string if there's a 'content_encoding'
property for the message. If there's no 'content_encoding'
- property, or the decode raises an Exception, the plain string
- is left as the message body.
+ property, or the decode raises an Exception, the message body
+ is left as plain bytes.
"""
if channel_id is None:
@@ -1104,6 +1104,93 @@ def _queue_bind_ok(self, args):
pass
+ def queue_unbind(self, queue, exchange, routing_key='',
+ nowait=False, arguments=None, ticket=None):
+ """
+ NOTE::::This is not part of AMQP 0-8, but RabbitMQ supports this as
+ an extension
+
+ unbind a queue from an exchange
+
+ This method unbinds a queue from an exchange.
+
+ RULE:
+
+ If a unbind fails, the server MUST raise a connection exception.
+
+ PARAMETERS:
+ queue: shortstr
+
+ Specifies the name of the queue to unbind.
+
+ RULE:
+
+ The client MUST either specify a queue name or have
+ previously declared a queue on the same channel
+
+ RULE:
+
+ The client MUST NOT attempt to unbind a queue that
+ does not exist.
+
+ exchange: shortstr
+
+ The name of the exchange to unbind from.
+
+ RULE:
+
+ The client MUST NOT attempt to unbind a queue from an
+ exchange that does not exist.
+
+ RULE:
+
+ The server MUST accept a blank exchange name to mean
+ the default exchange.
+
+ routing_key: shortstr
+
+ routing key of binding
+
+ Specifies the routing key of the binding to unbind.
+
+ arguments: table
+
+ arguments of binding
+
+ Specifies the arguments of the binding to unbind.
+
+ """
+ if arguments is None:
+ arguments = {}
+
+ args = AMQPWriter()
+ if ticket is not None:
+ args.write_short(ticket)
+ else:
+ args.write_short(self.default_ticket)
+ args.write_shortstr(queue)
+ args.write_shortstr(exchange)
+ args.write_shortstr(routing_key)
+ #args.write_bit(nowait)
+ args.write_table(arguments)
+ self._send_method((50, 50), args)
+
+ if not nowait:
+ return self.wait(allowed_methods=[
+ (50, 51), # Channel.queue_unbind_ok
+ ])
+
+
+ def _queue_unbind_ok(self, args):
+ """
+ confirm unbind successful
+
+ This method confirms that the unbind was successful.
+
+ """
+ pass
+
+
def queue_declare(self, queue='', passive=False, durable=False,
exclusive=False, auto_delete=True, nowait=False,
arguments=None, ticket=None):
@@ -2589,6 +2676,7 @@ def _tx_select_ok(self, args):
(50, 21): _queue_bind_ok,
(50, 31): _queue_purge_ok,
(50, 41): _queue_delete_ok,
+ (50, 51): _queue_unbind_ok,
(60, 11): _basic_qos_ok,
(60, 21): _basic_consume_ok,
(60, 31): _basic_cancel_ok,
@@ -2600,3 +2688,7 @@ def _tx_select_ok(self, args):
(90, 21): _tx_commit_ok,
(90, 31): _tx_rollback_ok,
}
+
+ _IMMEDIATE_METHODS = [
+ (60, 50), # basic_return
+ ]
View
18 lib/python/amqplib/client_0_8/connection.py
@@ -36,7 +36,7 @@
#
LIBRARY_PROPERTIES = {
'library': 'Python amqplib',
- 'library_version': '0.6.1',
+ 'library_version': '1.0.2',
}
AMQP_LOGGER = logging.getLogger('amqplib')
@@ -84,6 +84,10 @@ def __init__(self,
If login_response is not specified, one is built up for you from
userid and password if they are present.
+ The 'ssl' parameter may be simply True/False, or for Python >= 2.6
+ a dictionary of options to pass to ssl.wrap_socket() such as
+ requiring certain certificates.
+
"""
if (login_response is None) \
and (userid is not None) \
@@ -204,6 +208,15 @@ def _wait_method(self, channel_id, allowed_methods):
return method_sig, args, content
#
+ # Certain methods like basic_return should be dispatched
+ # immediately rather than being queued, even if they're not
+ # one of the 'allowed_methods' we're looking for.
+ #
+ if (channel != 0) and (method_sig in Channel._IMMEDIATE_METHODS):
+ self.channels[channel].dispatch_method(method_sig, args, content)
+ continue
+
+ #
# Not the channel and/or method we were looking for. Queue
# this method for later
#
@@ -824,3 +837,6 @@ def _x_tune_ok(self, channel_max, frame_max, heartbeat):
(10, 60): _close,
(10, 61): _close_ok,
}
+
+
+ _IMMEDIATE_METHODS = []
View
31 lib/python/amqplib/client_0_8/method_framing.py
@@ -22,6 +22,12 @@
from struct import pack, unpack
try:
+ bytes
+except NameError:
+ # Python 2.5 and lower
+ bytes = str
+
+try:
from collections import defaultdict
except:
class defaultdict(dict):
@@ -88,7 +94,7 @@ def add_payload(self, payload):
self.body_received += len(payload)
if self.body_received == self.body_size:
- self.msg.body = ''.join(self.body_parts)
+ self.msg.body = bytes().join(self.body_parts)
self.complete = True
@@ -230,15 +236,26 @@ def __init__(self, dest, frame_max):
def write_method(self, channel, method_sig, args, content=None):
payload = pack('>HH', method_sig[0], method_sig[1]) + args
+ if content:
+ # do this early, so we can raise an exception if there's a
+ # problem with the content properties, before sending the
+ # first frame
+ body = content.body
+ if isinstance(body, unicode):
+ coding = content.properties.get('content_encoding', None)
+ if coding is None:
+ coding = content.properties['content_encoding'] = 'UTF-8'
+
+ body = body.encode(coding)
+ properties = content._serialize_properties()
+
self.dest.write_frame(1, channel, payload)
if content:
- body = content.body
- payload = pack('>HHQ', method_sig[0], 0, len(body)) + \
- content._serialize_properties()
+ payload = pack('>HHQ', method_sig[0], 0, len(body)) + properties
self.dest.write_frame(2, channel, payload)
- while body:
- payload, body = body[:self.frame_max - 8], body[self.frame_max -8:]
- self.dest.write_frame(3, channel, payload)
+ chunk_size = self.frame_max - 8
+ for i in xrange(0, len(body), chunk_size):
+ self.dest.write_frame(3, channel, body[i:i+chunk_size])
View
115 lib/python/amqplib/client_0_8/serialization.py
@@ -21,38 +21,34 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
import string
+import sys
from datetime import datetime
from decimal import Decimal
from struct import pack, unpack
from time import mktime
-try:
- from cStringIO import StringIO
-except:
- from StringIO import StringIO
-
-
-DUMP_CHARS = string.letters + string.digits + string.punctuation
-
-def _hexdump(s):
- """
- Present just for debugging help.
-
- """
- while s:
- x, s = s[:16], s[16:]
+IS_PY3K = sys.version_info[0] >= 3
- hex = ['%02x' % ord(ch) for ch in x]
- hex = ' '.join(hex).ljust(50)
+if IS_PY3K:
+ def byte(n):
+ return bytes([n])
+else:
+ byte = chr
- char_dump = []
- for ch in x:
- if ch in DUMP_CHARS:
- char_dump.append(ch)
- else:
- char_dump.append('.')
+try:
+ from io import BytesIO
+except:
+ # Python 2.5 and lower
+ try:
+ from cStringIO import StringIO as BytesIO
+ except:
+ from StringIO import StringIO as BytesIO
- print hex + ''.join(char_dump)
+try:
+ bytes
+except NameError:
+ # Python 2.5 and lower
+ bytes = str
class AMQPReader(object):
@@ -66,8 +62,8 @@ def __init__(self, source):
a plain (non-unicode) string.
"""
- if isinstance(source, str):
- self.input = StringIO(source)
+ if isinstance(source, bytes):
+ self.input = BytesIO(source)
elif hasattr(source, 'read'):
self.input = source
else:
@@ -141,8 +137,10 @@ def read_longlong(self):
def read_shortstr(self):
"""
- Read a utf-8 encoded string that's stored in up to
- 255 bytes. Return it decoded as a Python unicode object.
+ Read a short string that's stored in up to 255 bytes.
+
+ The encoding isn't specified in the AMQP spec, so
+ assume it's utf-8
"""
self.bitcount = self.bits = 0
@@ -152,14 +150,15 @@ def read_shortstr(self):
def read_longstr(self):
"""
- Read a string that's up to 2**32 bytes, the encoding
- isn't specified in the AMQP spec, so just return it as
- a plain Python string.
+ Read a string that's up to 2**32 bytes.
+
+ The encoding isn't specified in the AMQP spec, so
+ assume it's utf-8
"""
self.bitcount = self.bits = 0
slen = unpack('>I', self.input.read(4))[0]
- return self.input.read(slen)
+ return self.input.read(slen).decode('utf-8')
def read_table(self):
@@ -173,19 +172,21 @@ def read_table(self):
result = {}
while table_data.input.tell() < tlen:
name = table_data.read_shortstr()
- ftype = table_data.input.read(1)
- if ftype == 'S':
+ ftype = ord(table_data.input.read(1))
+ if ftype == 83: # 'S'
val = table_data.read_longstr()
- elif ftype == 'I':
+ elif ftype == 73: # 'I'
val = unpack('>i', table_data.input.read(4))[0]
- elif ftype == 'D':
+ elif ftype == 68: # 'D'
d = table_data.read_octet()
n = unpack('>i', table_data.input.read(4))[0]
val = Decimal(n) / Decimal(10 ** d)
- elif ftype == 'T':
+ elif ftype == 84: # 'T'
val = table_data.read_timestamp()
- elif ftype == 'F':
+ elif ftype == 70: # 'F'
val = table_data.read_table() # recurse
+ else:
+ raise ValueError('Unknown table item type: %s' % repr(ftype))
result[name] = val
return result
@@ -208,12 +209,12 @@ class AMQPWriter(object):
def __init__(self, dest=None):
"""
dest may be a file-type object (with a write() method). If None
- then a StringIO is created, and the contents can be accessed with
+ then a BytesIO is created, and the contents can be accessed with
this class's getvalue() method.
"""
if dest is None:
- self.out = StringIO()
+ self.out = BytesIO()
else:
self.out = dest
@@ -249,7 +250,7 @@ def flush(self):
def getvalue(self):
"""
- Get what's been encoded so far if we're working with a StringIO.
+ Get what's been encoded so far if we're working with a BytesIO.
"""
self._flushbits()
@@ -258,7 +259,8 @@ def getvalue(self):
def write(self, s):
"""
- Write a plain Python string, with no special encoding.
+ Write a plain Python string with no special encoding in Python 2.x,
+ or bytes in Python 3.x
"""
self._flushbits()
@@ -327,8 +329,9 @@ def write_longlong(self, n):
def write_shortstr(self, s):
"""
- Write a string up to 255 bytes long after encoding. If passed
- a unicode string, encode as UTF-8.
+ Write a string up to 255 bytes long (after any encoding).
+
+ If passed a unicode string, encode with UTF-8.
"""
self._flushbits()
@@ -342,8 +345,9 @@ def write_shortstr(self, s):
def write_longstr(self, s):
"""
- Write a string up to 2**32 bytes long after encoding. If passed
- a unicode string, encode as UTF-8.
+ Write a string up to 2**32 bytes long after encoding.
+
+ If passed a unicode string, encode as UTF-8.
"""
self._flushbits()
@@ -367,13 +371,13 @@ def write_table(self, d):
if isinstance(v, basestring):
if isinstance(v, unicode):
v = v.encode('utf-8')
- table_data.write('S')
+ table_data.write(byte(83)) # 'S'
table_data.write_longstr(v)
elif isinstance(v, (int, long)):
- table_data.write('I')
+ table_data.write(byte(73)) # 'I'
table_data.write(pack('>i', v))
elif isinstance(v, Decimal):
- table_data.write('D')
+ table_data.write(byte(68)) # 'D'
sign, digits, exponent = v.as_tuple()
v = 0
for d in digits:
@@ -383,12 +387,14 @@ def write_table(self, d):
table_data.write_octet(-exponent)
table_data.write(pack('>i', v))
elif isinstance(v, datetime):
- table_data.write('T')
+ table_data.write(byte(84)) # 'T'
table_data.write_timestamp(v)
## FIXME: timezone ?
elif isinstance(v, dict):
- table_data.write('F')
+ table_data.write(byte(70)) # 'F'
table_data.write_table(v)
+ else:
+ raise ValueError('%s not serializable in AMQP' % repr(v))
table_data = table_data.getvalue()
self.write_long(len(table_data))
self.out.write(table_data)
@@ -434,7 +440,8 @@ def __eq__(self, other):
content object.
"""
- return (self.properties == other.properties)
+ return hasattr(other, 'properties') \
+ and (self.properties == other.properties)
def __getattr__(self, name):
@@ -444,6 +451,10 @@ def __getattr__(self, name):
dictionary.
"""
+ if name == '__setstate__':
+ # Allows pickling/unpickling to work
+ raise AttributeError('__setstate__')
+
if name in self.properties:
return self.properties[name]
View
101 lib/python/amqplib/client_0_8/transport.py
@@ -20,6 +20,7 @@
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
+import re
import socket
#
@@ -31,13 +32,21 @@
except:
HAVE_PY26_SSL = False
+try:
+ bytes
+except:
+ # Python 2.5 and lower
+ bytes = str
+
from struct import pack, unpack
AMQP_PORT = 5672
# Yes, Advanced Message Queuing Protocol Protocol is redundant
-AMQP_PROTOCOL_HEADER = 'AMQP\x01\x01\x09\x01'
+AMQP_PROTOCOL_HEADER = 'AMQP\x01\x01\x09\x01'.encode('latin_1')
+# Match things like: [fe80::1]:5432, from RFC 2732
+IPV6_LITERAL = re.compile(r'\[([\.0-9a-f:]+)\](?::(\d+))?')
class _AbstractTransport(object):
"""
@@ -45,21 +54,39 @@ class _AbstractTransport(object):
"""
def __init__(self, host, connect_timeout):
- if ':' in host:
- host, port = host.split(':', 1)
- port = int(port)
+ msg = 'socket.getaddrinfo() for %s returned an empty list' % host
+ port = AMQP_PORT
+
+ m = IPV6_LITERAL.match(host)
+ if m:
+ host = m.group(1)
+ if m.group(2):
+ port = int(m.group(2))
else:
- port = AMQP_PORT
+ if ':' in host:
+ host, port = host.rsplit(':', 1)
+ port = int(port)
+
+ self.sock = None
+ for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM, socket.SOL_TCP):
+ af, socktype, proto, canonname, sa = res
+ try:
+ self.sock = socket.socket(af, socktype, proto)
+ self.sock.settimeout(connect_timeout)
+ self.sock.connect(sa)
+ except socket.error, msg:
+ self.sock.close()
+ self.sock = None
+ continue
+ break
+
+ if not self.sock:
+ # Didn't connect, return the most recent error message
+ raise socket.error, msg
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.settimeout(connect_timeout)
-
- try:
- self.sock.connect((host, port))
- except socket.error:
- self.sock.close()
- raise
self.sock.settimeout(None)
+ self.sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
+ self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self._setup_transport()
@@ -87,6 +114,14 @@ def _setup_transport(self):
pass
+ def _shutdown_transport(self):
+ """
+ Do any preliminary work in shutting down the connection.
+
+ """
+ pass
+
+
def _write(self, s):
"""
Completely write a string to the peer.
@@ -97,6 +132,11 @@ def _write(self, s):
def close(self):
if self.sock is not None:
+ self._shutdown_transport()
+ # Call shutdown first to make sure that pending messages
+ # reach the AMQP broker if the program exits after
+ # calling this method.
+ self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
self.sock = None
@@ -108,11 +148,11 @@ def read_frame(self):
"""
frame_type, channel, size = unpack('>BHI', self._read(7))
payload = self._read(size)
- ch = self._read(1)
- if ch == '\xce':
+ ch = ord(self._read(1))
+ if ch == 206: # '\xce'
return frame_type, channel, payload
else:
- raise Exception('Framing Error, received 0x%02x while expecting 0xce' % ord(ch))
+ raise Exception('Framing Error, received 0x%02x while expecting 0xce' % ch)
def write_frame(self, frame_type, channel, payload):
@@ -130,6 +170,15 @@ class SSLTransport(_AbstractTransport):
Transport that works over SSL
"""
+ def __init__(self, host, connect_timeout, ssl):
+ if isinstance(ssl, dict):
+ self.sslopts = ssl
+
+ self.sslobj = None
+
+ super(SSLTransport, self).__init__(host, connect_timeout)
+
+
def _setup_transport(self):
"""
Wrap the socket in an SSL object, either the
@@ -138,12 +187,25 @@ def _setup_transport(self):
"""
if HAVE_PY26_SSL:
- self.sslobj = ssl.wrap_socket(self.sock)
+ if hasattr(self, 'sslopts'):
+ self.sslobj = ssl.wrap_socket(self.sock, **self.sslopts)
+ else:
+ self.sslobj = ssl.wrap_socket(self.sock)
self.sslobj.do_handshake()
else:
self.sslobj = socket.ssl(self.sock)
+ def _shutdown_transport(self):
+ """
+ Unwrap a Python 2.6 SSL socket, so we can call shutdown()
+
+ """
+ if HAVE_PY26_SSL and (self.sslobj is not None):
+ self.sock = self.sslobj.unwrap()
+ self.sslobj = None
+
+
def _read(self, n):
"""
It seems that SSL Objects read() method may not supply as much
@@ -175,7 +237,6 @@ def _write(self, s):
s = s[n:]
-
class TCPTransport(_AbstractTransport):
"""
Transport that deals directly with TCP socket.
@@ -188,7 +249,7 @@ def _setup_transport(self):
"""
self._write = self.sock.sendall
- self._read_buffer = ''
+ self._read_buffer = bytes()
def _read(self, n):
@@ -215,6 +276,6 @@ def create_transport(host, connect_timeout, ssl=False):
"""
if ssl:
- return SSLTransport(host, connect_timeout)
+ return SSLTransport(host, connect_timeout, ssl)
else:
return TCPTransport(host, connect_timeout)
View
107 lib/python/anyjson/__init__.py
@@ -3,47 +3,44 @@
import sys
-__version__ = "0.3"
-__author__ = "Rune Halvorsen <runefh@gmail.com>"
+VERSION = (0, 3, 3)
+__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
+__author__ = "Rune Halvorsen"
+__contact__ = "runefh@gmail.com"
__homepage__ = "http://bitbucket.org/runeh/anyjson/"
__docformat__ = "restructuredtext"
-implementation = None
-
-"""
-.. function:: serialize(obj)
-
- Serialize the object to JSON.
-
-.. function:: deserialize(str)
-
- Deserialize JSON-encoded object to a Python object.
-
-.. function:: force_implementation(name)
-
- Load a specific json module. This is useful for testing and not much else
-
-.. attribute:: implementation
+# -eof meta-
- The json implementation object. This is probably not useful to you,
- except to get the name of the implementation in use. The name is
- available through `implementation.name`.
-
-.. data:: _modules
+#: The json implementation object. This is probably not useful to you,
+#: except to get the name of the implementation in use. The name is
+#: available through ``implementation.name``.
+implementation = None
- List of known json modules, and the names of their serialize/unserialize
- methods, as well as the exception they throw. Exception can be either
- an exception class or a string.
-"""
-_modules = [("cjson", "encode", "EncodeError", "decode", "DecodeError"),
- ("yajl", "dumps", TypeError, "loads", ValueError),
- ("jsonlib2", "write", "WriteError", "read", "ReadError"),
- ("jsonlib", "write", "WriteError", "read", "ReadError"),
- ("simplejson", "dumps", TypeError, "loads", ValueError),
- ("json", "dumps", TypeError, "loads", ValueError),
- ("django.utils.simplejson", "dumps", TypeError, "loads",
- ValueError)]
-_fields = ("modname", "encoder", "encerror", "decoder", "decerror")
+# json.loads does not support buffer() objects,
+# so we load() and StringIO instead, and it won't copy.
+if sys.version_info[0] == 3:
+ from io import StringIO
+else:
+ try:
+ from cStringIO import StringIO # noqa
+ except ImportError:
+ from StringIO import StringIO # noqa
+
+#: List of known json modules, and the names of their loads/dumps
+#: methods, as well as the exceptions they throw. Exception can be either
+#: an exception class or a string.
+_modules = [("yajl", "dumps", TypeError, "loads", ValueError, "load"),
+ ("jsonlib2", "write", "WriteError", "read", "ReadError", None),
+ ("jsonlib", "write", "WriteError", "read", "ReadError", None),
+ ("simplejson", "dumps", TypeError, "loads", ValueError, "load"),
+ ("json", "dumps", TypeError, "loads", ValueError, "load"),
+ ("django.utils.simplejson", "dumps", TypeError, "loads", ValueError, "load"),
+ ("cjson", "encode", "EncodeError", "decode", "DecodeError", None)
+ ]
+
+_fields = ("modname", "encoder", "encerror",
+ "decoder", "decerror", "filedecoder")
class _JsonImplementation(object):
@@ -52,12 +49,18 @@ class _JsonImplementation(object):
def __init__(self, modspec):
modinfo = dict(zip(_fields, modspec))
+ if modinfo["modname"] == "cjson":
+ import warnings
+ warnings.warn("cjson is deprecated! See http://pypi.python.org/pypi/python-cjson/1.0.5", DeprecationWarning)
+
# No try block. We want importerror to end up at caller
module = self._attempt_load(modinfo["modname"])
self.implementation = modinfo["modname"]
self._encode = getattr(module, modinfo["encoder"])
self._decode = getattr(module, modinfo["decoder"])
+ fdec = modinfo["filedecoder"]
+ self._filedecode = fdec and getattr(module, fdec)
self._encode_error = modinfo["encerror"]
self._decode_error = modinfo["decerror"]
@@ -68,7 +71,7 @@ def __init__(self, modspec):
self.name = modinfo["modname"]
- def __str__(self):
+ def __repr__(self):
return "<_JsonImplementation instance using %s>" % self.name
def _attempt_load(self, modname):
@@ -77,21 +80,26 @@ def _attempt_load(self, modname):
__import__(modname)
return sys.modules[modname]
- def serialize(self, data):
+ def dumps(self, data):
"""Serialize the datastructure to json. Returns a string. Raises
TypeError if the object could not be serialized."""
try:
return self._encode(data)
except self._encode_error, exc:
- raise TypeError(*exc.args)
+ raise TypeError, TypeError(*exc.args), sys.exc_info()[2]
+ serialize = dumps
- def deserialize(self, s):
+ def loads(self, s):
"""deserialize the string to python data types. Raises
- ValueError if the string vould not be parsed."""
+ ValueError if the string could not be parsed."""
+ # uses StringIO to support buffer objects.
try:
+ if self._filedecode and not isinstance(s, basestring):
+ return self._filedecode(StringIO(s))
return self._decode(s)
except self._decode_error, exc:
- raise ValueError(*exc.args)
+ raise ValueError, ValueError(*exc.args), sys.exc_info()[2]
+ deserialize = loads
def force_implementation(modname):
@@ -121,7 +129,14 @@ def force_implementation(modname):
else:
raise ImportError("No supported JSON module found")
- serialize = lambda value: implementation.serialize(value)
- deserialize = lambda value: implementation.deserialize(value)
- dumps = serialize
- loads = deserialize
+
+ def loads(value):
+ """Serialize the object to JSON."""
+ return implementation.loads(value)
+ deserialize = loads # compat
+
+
+ def dumps(value):
+ """Deserialize JSON-encoded object to a Python object."""
+ return implementation.dumps(value)
+ serialize = dumps
View
28 lib/python/celery/__init__.py
@@ -1,9 +1,35 @@
+# -*- coding: utf-8 -*-
"""Distributed Task Queue"""
+# :copyright: (c) 2009 - 2012 by Ask Solem.
+# :license: BSD, see LICENSE for more details.
-VERSION = (2, 1, 4)
+from __future__ import absolute_import
+VERSION = (2, 5, 5)
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "ask@celeryproject.org"
__homepage__ = "http://celeryproject.org"
__docformat__ = "restructuredtext"
+
+# -eof meta-
+
+import sys
+
+if sys.version_info < (2, 5):
+ raise Exception(
+ "Python 2.4 is not supported by this version. "
+ "Please use Celery versions 2.1.x or earlier.")
+
+from .local import Proxy
+
+
+def Celery(*args, **kwargs):
+ from .app import App
+ return App(*args, **kwargs)
+
+
+def _get_current_app():
+ from .app import current_app
+ return current_app()
+current_app = Proxy(_get_current_app)
View
210 lib/python/celery/abstract.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+"""
+ celery.abstract
+ ~~~~~~~~~~~~~~~
+
+ Implements components and boot-steps.
+
+ :copyright: (c) 2009 - 2012 by Ask Solem.
+ :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+from collections import defaultdict
+from importlib import import_module
+
+from .datastructures import DependencyGraph
+from .utils import instantiate
+
+
+class Namespace(object):
+ """A namespace containing components.
+
+ Every component must belong to a namespace.
+
+ When component classes are created they are added to the
+ mapping of unclaimed components. The components will be
+ claimed when the namespace they belong to is created.
+
+ :keyword name: Set the name of this namespace.
+ :keyword app: Set the Celery app for this namespace.
+
+ """
+ name = None
+ _unclaimed = defaultdict(dict)
+ _started_count = 0
+
+ def __init__(self, name=None, app=None, logger=None):
+ self.app = app
+ self.name = name or self.name
+ self.logger = logger or self.app.log.get_default_logger()
+ self.services = []
+
+ def modules(self):
+ """Subclasses can override this to return a
+ list of modules to import before components are claimed."""
+ return []
+
+ def load_modules(self):
+ """Will load the component modules this namespace depends on."""
+ for m in self.modules():
+ self.import_module(m)
+
+ def apply(self, parent, **kwargs):
+ """Apply the components in this namespace to an object.
+
+ This will apply the ``__init__`` and ``include`` methods
+ of each components with the object as argument.
+
+ For ``StartStopComponents`` the services created
+ will also be added the the objects ``components`` attribute.
+
+ """
+ self._debug("Loading modules.")
+ self.load_modules()
+ self._debug("Claiming components.")
+ self.components = self._claim()
+ self._debug("Building boot step graph.")
+ self.boot_steps = [self.bind_component(name, parent, **kwargs)
+ for name in self._finalize_boot_steps()]
+ self._debug("New boot order: %r", [c.name for c in self.boot_steps])
+
+ for component in self.boot_steps:
+ component.include(parent)
+ return self
+
+ def bind_component(self, name, parent, **kwargs):
+ """Bind component to parent object and this namespace."""
+ comp = self[name](parent, **kwargs)
+ comp.namespace = self
+ return comp
+
+ def import_module(self, module):
+ return import_module(module)
+
+ def __getitem__(self, name):
+ return self.components[name]
+
+ def _find_last(self):
+ for C in self.components.itervalues():
+ if C.last:
+ return C
+
+ def _finalize_boot_steps(self):
+ G = self.graph = DependencyGraph((C.name, C.requires)
+ for C in self.components.itervalues())
+ last = self._find_last()
+ if last:
+ for obj in G:
+ if obj != last.name:
+ G.add_edge(last.name, obj)
+ return G.topsort()
+
+ def _claim(self):
+ return self._unclaimed[self.name]
+
+ def _debug(self, msg, *args):
+ return self.logger.debug("[%s] " + msg,
+ *(self.name.capitalize(), ) + args)
+
+
+class ComponentType(type):
+ """Metaclass for components."""
+
+ def __new__(cls, name, bases, attrs):
+ abstract = attrs.pop("abstract", False)
+ if not abstract:
+ try:
+ cname = attrs["name"]
+ except KeyError:
+ raise NotImplementedError("Components must be named")
+ namespace = attrs.get("namespace", None)
+ if not namespace:
+ attrs["namespace"], _, attrs["name"] = cname.partition('.')
+ cls = super(ComponentType, cls).__new__(cls, name, bases, attrs)
+ if not abstract:
+ Namespace._unclaimed[cls.namespace][cls.name] = cls
+ return cls
+
+
+class Component(object):
+ """A component.
+
+ The :meth:`__init__` method is called when the component
+ is bound to a parent object, and can as such be used
+ to initialize attributes in the parent object at
+ parent instantiation-time.
+
+ """
+ __metaclass__ = ComponentType
+
+ #: The name of the component, or the namespace
+ #: and the name of the component separated by dot.
+ name = None
+
+ #: List of component names this component depends on.
+ #: Note that the dependencies must be in the same namespace.
+ requires = ()
+
+ #: can be used to specify the namespace,
+ #: if the name does not include it.
+ namespace = None
+
+ #: if set the component will not be registered,
+ #: but can be used as a component base class.
+ abstract = True
+
+ #: Optional obj created by the :meth:`create` method.
+ #: This is used by StartStopComponents to keep the
+ #: original service object.
+ obj = None
+
+ #: This flag is reserved for the workers Consumer,
+ #: since it is required to always be started last.
+ #: There can only be one object marked with lsat
+ #: in every namespace.
+ last = False
+
+ #: This provides the default for :meth:`include_if`.
+ enabled = True
+
+ def __init__(self, parent, **kwargs):
+ pass
+
+ def create(self, parent):
+ """Create the component."""
+ pass
+
+ def include_if(self, parent):
+ """An optional predicate that decided whether this
+ component should be created."""
+ return self.enabled
+
+ def instantiate(self, qualname, *args, **kwargs):
+ return instantiate(qualname, *args, **kwargs)
+
+ def include(self, parent):
+ if self.include_if(parent):
+ self.obj = self.create(parent)
+ return True
+
+
+class StartStopComponent(Component):
+ abstract = True
+ terminable = False
+
+ def start(self):
+ return self.obj.start()
+
+ def stop(self):
+ return self.obj.stop()
+
+ def terminate(self):
+ if self.terminable:
+ return self.obj.terminate()
+ return self.obj.stop()
+
+ def include(self, parent):
+ if super(StartStopComponent, self).include(parent):
+ parent.components.append(self.obj)
View
30 lib/python/celery/actors.py
@@ -0,0 +1,30 @@
+from __future__ import absolute_import
+
+from celery.app import app_or_default
+
+import cl
+import cl.presence
+
+
+def construct(cls, instance, connection=None, *args, **kwargs):
+ app = instance.app = app_or_default(kwargs.pop("app", None))
+ super(cls, instance).__init__(connection or app.broker_connection(),
+ *args, **kwargs)
+
+
+class Actor(cl.Actor):
+
+ def __init__(self, *args, **kwargs):
+ construct(Actor, self, *args, **kwargs)
+
+
+class Agent(cl.Agent):
+
+ def __init__(self, *args, **kwargs):
+ construct(Agent, self, *args, **kwargs)
+
+
+class AwareAgent(cl.presence.AwareAgent):
+
+ def __init__(self, *args, **kwargs):
+ construct(AwareAgent, self, *args, **kwargs)
View
294 lib/python/celery/app/__init__.py
@@ -0,0 +1,294 @@
+# -*- coding: utf-8 -*-
+"""
+ celery.app
+ ~~~~~~~~~~
+
+ Celery Application.
+
+ :copyright: (c) 2009 - 2012 by Ask Solem.
+ :license: BSD, see LICENSE for more details.
+
+"""
+
+from __future__ import absolute_import
+
+import os
+import threading
+
+from .. import registry
+from ..utils import cached_property, instantiate
+
+from .annotations import (
+ _first_match, _first_match_any,
+ prepare as prepare_annotations
+)
+from .base import BaseApp
+
+
+class _TLS(threading.local):
+ #: Apps with the :attr:`~celery.app.base.BaseApp.set_as_current` attribute
+ #: sets this, so it will always contain the last instantiated app,
+ #: and is the default app returned by :func:`app_or_default`.
+ current_app = None
+
+ #: The currently executing task.
+ current_task = None
+_tls = _TLS()
+
+
+class AppPickler(object):
+ """Default application pickler/unpickler."""
+
+ def __call__(self, cls, *args):
+ kwargs = self.build_kwargs(*args)
+ app = self.construct(cls, **kwargs)
+ self.prepare(app, **kwargs)
+ return app
+
+ def prepare(self, app, **kwargs):
+ app.conf.update(kwargs["changes"])
+
+ def build_kwargs(self, *args):
+ return self.build_standard_kwargs(*args)
+
+ def build_standard_kwargs(self, main, changes, loader, backend, amqp,
+ events, log, control, accept_magic_kwargs):
+ return dict(main=main, loader=loader, backend=backend, amqp=amqp,
+ changes=changes, events=events, log=log, control=control,
+ set_as_current=False,
+ accept_magic_kwargs=accept_magic_kwargs)
+
+ def construct(self, cls, **kwargs):
+ return cls(**kwargs)
+
+
+def _unpickle_app(cls, pickler, *args):
+ return pickler()(cls, *args)
+
+
+class App(BaseApp):
+ """Celery Application.
+
+ :param main: Name of the main module if running as `__main__`.
+ :keyword loader: The loader class, or the name of the loader class to use.
+ Default is :class:`celery.loaders.app.AppLoader`.
+ :keyword backend: The result store backend class, or the name of the
+ backend class to use. Default is the value of the
+ :setting:`CELERY_RESULT_BACKEND` setting.
+ :keyword amqp: AMQP object or class name.
+ :keyword events: Events object or class name.
+ :keyword log: Log object or class name.
+ :keyword control: Control object or class name.
+ :keyword set_as_current: Make this the global current app.
+
+ """
+ Pickler = AppPickler
+
+ def set_current(self):
+ """Make this the current app for this thread."""
+ _tls.current_app = self
+
+ def on_init(self):
+ if self.set_as_current:
+ self.set_current()
+
+ def create_task_cls(self):
+ """Creates a base task class using default configuration
+ taken from this app."""
+ conf = self.conf
+
+ from .task import BaseTask
+
+ class Task(BaseTask):
+ abstract = True
+ app = self
+ backend = self.backend
+ exchange_type = conf.CELERY_DEFAULT_EXCHANGE_TYPE
+ delivery_mode = conf.CELERY_DEFAULT_DELIVERY_MODE
+ send_error_emails = conf.CELERY_SEND_TASK_ERROR_EMAILS
+ error_whitelist = conf.CELERY_TASK_ERROR_WHITELIST
+ serializer = conf.CELERY_TASK_SERIALIZER
+ rate_limit = conf.CELERY_DEFAULT_RATE_LIMIT
+ track_started = conf.CELERY_TRACK_STARTED
+ acks_late = conf.CELERY_ACKS_LATE
+ ignore_result = conf.CELERY_IGNORE_RESULT
+ store_errors_even_if_ignored = \
+ conf.CELERY_STORE_ERRORS_EVEN_IF_IGNORED
+ accept_magic_kwargs = self.accept_magic_kwargs
+ Task.__doc__ = BaseTask.__doc__
+
+ return Task
+
+ def Worker(self, **kwargs):
+ """Create new :class:`~celery.apps.worker.Worker` instance."""
+ return instantiate("celery.apps.worker:Worker", app=self, **kwargs)
+
+ def WorkController(self, **kwargs):
+ return instantiate("celery.worker:WorkController", app=self, **kwargs)
+
+ def Beat(self, **kwargs):
+ """Create new :class:`~celery.apps.beat.Beat` instance."""
+ return instantiate("celery.apps.beat:Beat", app=self, **kwargs)
+
+ def TaskSet(self, *args, **kwargs):
+ """Create new :class:`~celery.task.sets.TaskSet`."""
+ return instantiate("celery.task.sets:TaskSet",
+ app=self, *args, **kwargs)
+
+ def worker_main(self, argv=None):
+ """Run :program:`celeryd` using `argv`. Uses :data:`sys.argv`
+ if `argv` is not specified."""
+ return instantiate("celery.bin.celeryd:WorkerCommand", app=self) \
+ .execute_from_commandline(argv)
+
+ def task(self, *args, **options):
+ """Decorator to create a task class out of any callable.
+
+ **Examples:**
+
+ .. code-block:: python
+
+ @task()
+ def refresh_feed(url):
+ return Feed.objects.get(url=url).refresh()
+
+ with setting extra options and using retry.
+
+ .. code-block:: python
+
+ from celery.task import current
+
+ @task(exchange="feeds")
+ def refresh_feed(url):
+ try:
+ return Feed.objects.get(url=url).refresh()
+ except socket.error, exc:
+ current.retry(exc=exc)
+
+ Calling the resulting task::
+
+ >>> refresh_feed("http://example.com/rss") # Regular
+ <Feed: http://example.com/rss>
+ >>> refresh_feed.delay("http://example.com/rss") # Async
+ <AsyncResult: 8998d0f4-da0b-4669-ba03-d5ab5ac6ad5d>
+
+ """
+
+ def inner_create_task_cls(**options):
+
+ def _create_task_cls(fun):
+ base = options.pop("base", None) or self.Task
+
+ T = type(fun.__name__, (base, ), dict({
+ "app": self,
+ "accept_magic_kwargs": False,
+ "run": staticmethod(fun),
+ "__doc__": fun.__doc__,
+ "__module__": fun.__module__}, **options))()
+ return registry.tasks[T.name] # global instance.
+
+ return _create_task_cls
+
+ if len(args) == 1 and callable(args[0]):
+ return inner_create_task_cls(**options)(*args)
+ return inner_create_task_cls(**options)
+
+ def annotate_task(self, task):
+ if self.annotations:
+ match = _first_match(self.annotations, task)
+ for attr, value in (match or {}).iteritems():
+ setattr(task, attr, value)
+ match_any = _first_match_any(self.annotations)
+ for attr, value in (match_any or {}).iteritems():
+ setattr(task, attr, value)
+
+ @cached_property
+ def Task(self):
+ """Default Task base class for this application."""
+ return self.create_task_cls()
+
+ @cached_property
+ def annotations(self):
+ return prepare_annotations(self.conf.CELERY_ANNOTATIONS)
+
+ def __repr__(self):
+ return "<Celery: %s:0x%x>" % (self.main or "__main__", id(self), )
+
+ def __reduce__(self):
+ # Reduce only pickles the configuration changes,
+ # so the default configuration doesn't have to be passed
+ # between processes.
+ return (_unpickle_app, (self.__class__, self.Pickler)
+ + self.__reduce_args__())
+
+ def __reduce_args__(self):
+ return (self.main,
+ self.conf.changes,
+ self.loader_cls,
+ self.backend_cls,
+ self.amqp_cls,
+ self.events_cls,
+ self.log_cls,
+ self.control_cls,
+ self.accept_magic_kwargs)
+
+
+#: The "default" loader is the default loader used by old applications.
+default_loader = os.environ.get("CELERY_LOADER") or "default"
+
+#: Global fallback app instance.
+default_app = App("default", loader=default_loader,
+ set_as_current=False,
+ accept_magic_kwargs=True)
+
+
+def current_app():
+ return getattr(_tls, "current_app", None) or default_app
+
+
+def current_task():
+ return getattr(_tls, "current_task", None)
+
+
+def _app_or_default(app=None):
+ """Returns the app provided or the default app if none.
+
+ The environment variable :envvar:`CELERY_TRACE_APP` is used to
+ trace app leaks. When enabled an exception is raised if there
+ is no active app.
+
+ """
+ if app is None:
+ return getattr(_tls, "current_app", None) or default_app
+ return app
+
+
+def _app_or_default_trace(app=None): # pragma: no cover
+ from traceback import print_stack
+ from multiprocessing import current_process
+ if app is None:
+ if getattr(_tls, "current_app", None):
+ print("-- RETURNING TO CURRENT APP --") # noqa+
+ print_stack()
+ return _tls.current_app
+ if current_process()._name == "MainProcess":
+ raise Exception("DEFAULT APP")
+ print("-- RETURNING TO DEFAULT APP --") # noqa+
+ print_stack()
+ return default_app
+ return app
+
+
+def enable_trace():
+ global app_or_default
+ app_or_default = _app_or_default_trace
+
+
+def disable_trace():
+ global app_or_default
+ app_or_default = _app_or_default
+
+
+app_or_default = _app_or_default
+if os.environ.get("CELERY_TRACE_APP"): # pragma: no cover
+ enable_trace()
View
55 lib/python/celery/app/abstract.py
@@ -0,0 +1,55 @@
+from __future__ import absolute_import
+
+
+class from_config(object):
+
+ def __init__(self, key=None):
+ self.key = key
+
+ def get_key(self, attr):
+ return attr if self.key is None else self.key
+
+
+class _configurated(type):
+
+ def __new__(cls, name, bases, attrs):
+ attrs["__confopts__"] = dict((attr, spec.get_key(attr))
+ for attr, spec in attrs.iteritems()
+ if isinstance(spec, from_config))
+ inherit_from = attrs.get("inherit_confopts", ())
+ for subcls in bases:
+ try:
+ attrs["__confopts__"].update(subcls.__confopts__)
+ except AttributeError:
+ pass
+ for subcls in inherit_from:
+ attrs["__confopts__"].update(subcls.__confopts__)
+ attrs = dict((k, v if not isinstance(v, from_config) else None)
+ for k, v in attrs.iteritems())
+ return super(_configurated, cls).__new__(cls, name, bases, attrs)
+
+
+class configurated(object):
+ __metaclass__ = _configurated
+
+ def setup_defaults(self, kwargs, namespace="celery"):
+ confopts = self.__confopts__
+ app, find = self.app, self.app.conf.find_value_for_key
+
+ for attr, keyname in confopts.iteritems():
+ try:
+ value = kwargs[attr]
+ except KeyError:
+ value = find(keyname, namespace)
+ else:
+ if value is None:
+ value = find(keyname, namespace)
+ setattr(self, attr, value)
+
+ for attr_name, attr_value in kwargs.iteritems():
+ if attr_name not in confopts and attr_value is not None:
+ setattr(self, attr_name, attr_value)
+
+ def confopts_as_dict(self):
+ return dict((key, getattr(self, key))
+ for key in self.__confopts__.iterkeys())
View
356 lib/python/celery/app/amqp.py
@@ -0,0 +1,356 @@
+# -*- coding: utf-8 -*-
+"""
+ celery.app.amqp
+ ~~~~~~~~~~~~~~~
+
+ AMQ related functionality.
+
+ :copyright: (c) 2009 - 2012 by Ask Solem.
+ :license: BSD, see LICENSE for more details.
+
+"""
+from __future__ import absolute_import
+
+from datetime import timedelta
+
+from kombu import BrokerConnection, Exchange
+from kombu import compat as messaging
+from kombu.pools import ProducerPool
+
+from .. import routes as _routes
+from .. import signals
+from ..utils import cached_property, textindent, uuid
+
+#: List of known options to a Kombu producers send method.
+#: Used to extract the message related options out of any `dict`.
+MSG_OPTIONS = ("mandatory", "priority", "immediate", "routing_key",
+ "serializer", "delivery_mode", "compression")
+
+#: Human readable queue declaration.
+QUEUE_FORMAT = """
+. %(name)s exchange:%(exchange)s (%(exchange_type)s) \
+binding:%(binding_key)s
+"""
+
+#: Set of exchange names that have already been declared.
+_exchanges_declared = set()
+
+#: Set of queue names that have already been declared.
+_queues_declared = set()
+
+
+def extract_msg_options(options, keep=MSG_OPTIONS):
+ """Extracts known options to `basic_publish` from a dict,
+ and returns a new dict."""
+ return dict((name, options.get(name)) for name in keep)
+
+
+class Queues(dict):
+ """Queue name⇒ declaration mapping.
+
+ Celery will consult this mapping to find the options
+ for any queue by name.
+
+ :param queues: Initial mapping.
+
+ """
+ #: If set, this is a subset of queues to consume from.
+ #: The rest of the queues are then used for routing only.
+ _consume_from = None
+
+ def __init__(self, queues):
+ dict.__init__(self)
+ for queue_name, options in (queues or {}).items():
+ self.add(queue_name, **options)
+
+ def add(self, queue, exchange=None, routing_key=None,
+ exchange_type="direct", **options):
+ """Add new queue.
+
+ :param queue: Name of the queue.
+ :keyword exchange: Name of the exchange.
+ :keyword routing_key: Binding key.
+ :keyword exchange_type: Type of exchange.
+ :keyword \*\*options: Additional declaration options.
+
+ """
+ q = self[queue] = self.options(exchange, routing_key,
+ exchange_type, **options)
+ return q
+
+ def options(self, exchange, routing_key,
+ exchange_type="direct", **options):
+ """Creates new option mapping for queue, with required
+ keys present."""
+ return dict(options, routing_key=routing_key,
+ binding_key=routing_key,
+ exchange=exchange,
+ exchange_type=exchange_type)
+
+ def format(self, indent=0, indent_first=True):
+ """Format routing table into string for log dumps."""
+ active = self.consume_from
+ if not active:
+ return ""
+ info = [QUEUE_FORMAT.strip() % dict(
+ name=(name + ":").ljust(12), **config)
+ for name, config in sorted(active.iteritems())]
+ if indent_first:
+ return textindent("\n".join(info), indent)
+ return info[0] + "\n" + textindent("\n".join(info[1:]), indent)
+
+ def select_subset(self, wanted, create_missing=True):
+ """Select subset of the currently defined queues.
+
+ Does not return anything: queues not in `wanted` will
+ be discarded in-place.
+
+ :param wanted: List of wanted queue names.
+ :keyword create_missing: By default any unknown queues will be
+ added automatically, but if disabled
+ the occurrence of unknown queues
+ in `wanted` will raise :exc:`KeyError`.
+
+ """
+ acc = {}
+ for queue in wanted:
+ try:
+ options = self[queue]
+ except KeyError:
+ if not create_missing:
+ raise
+ options = self.options(queue, queue)
+ acc[queue] = options
+ self._consume_from = acc
+ self.update(acc)
+
+ @property
+ def consume_from(self):
+ if self._consume_from is not None:
+ return self._consume_from
+ return self
+
+ @classmethod
+ def with_defaults(cls, queues, default_exchange, default_exchange_type):
+ """Alternate constructor that adds default exchange and
+ exchange type information to queues that does not have any."""
+ if queues is None:
+ queues = {}
+ for opts in queues.values():
+ opts.setdefault("exchange", default_exchange),
+ opts.setdefault("exchange_type", default_exchange_type)
+ opts.setdefault("binding_key", default_exchange)
+ opts.setdefault("routing_key", opts.get("binding_key"))
+ return cls(queues)
+
+
+class TaskPublisher(messaging.Publisher):
+ auto_declare = False
+ retry = False
+ retry_policy = None
+
+ def __init__(self, *args, **kwargs):
+ self.app = kwargs.pop("app")
+ self.retry = kwargs.pop("retry", self.retry)
+ self.retry_policy = kwargs.pop("retry_policy",
+ self.retry_policy or {})
+ self.utc = kwargs.pop("enable_utc", False)
+ super(TaskPublisher, self).__init__(*args, **kwargs)
+
+ def declare(self):
+ if self.exchange.name and \
+ self.exchange.name not in _exchanges_declared:
+ super(TaskPublisher, self).declare()
+ _exchanges_declared.add(self.exchange.name)
+
+ def _declare_queue(self, name, retry=False, retry_policy={}):
+ options = self.app.amqp.queues[name]
+ queue = messaging.entry_to_queue(name, **options)(self.channel)
+ if retry:
+ self.connection.ensure(queue, queue.declare, **retry_policy)()
+ else:
+ queue.declare()
+ return queue
+
+ def _declare_exchange(self, name, type, retry=False, retry_policy={}):
+ ex = Exchange(name, type=type, durable=self.durable,
+ auto_delete=self.auto_delete)(self.channel)
+ if retry:
+ return self.connection.ensure(ex, ex.declare, **retry_policy)
+ return ex.declare()
+
+ def delay_task(self, task_name, task_args=None, task_kwargs=None,
+ countdown=None, eta=None, task_id=None, taskset_id=None,
+ expires=None, exchange=None, exchange_type=None,
+ event_dispatcher=None, retry=None, retry_policy=None,
+ queue=None, now=None, retries=0, chord=None, **kwargs):
+ """Send task message."""
+
+ connection = self.connection
+ _retry_policy = self.retry_policy
+ if retry_policy: # merge default and custom policy
+ _retry_policy = dict(_retry_policy, **retry_policy)
+
+ # declare entities
+ if queue and queue not in _queues_declared:
+ entity = self._declare_queue(queue, retry, _retry_policy)
+ _exchanges_declared.add(entity.exchange.name)
+ _queues_declared.add(entity.name)
+ if exchange and exchange not in _exchanges_declared:
+ self._declare_exchange(exchange,
+ exchange_type or self.exchange_type, retry, _retry_policy)
+ _exchanges_declared.add(exchange)
+
+ task_id = task_id or uuid()
+ task_args = task_args or []
+ task_kwargs = task_kwargs or {}
+ if not isinstance(task_args, (list, tuple)):
+ raise ValueError("task args must be a list or tuple")
+ if not isinstance(task_kwargs, dict):
+ raise ValueError("task kwargs must be a dictionary")
+ if countdown: # Convert countdown to ETA.
+ now = now or self.app.now()
+ eta = now + timedelta(seconds=countdown)
+ if isinstance(expires, (int, float)):
+ now = now or self.app.now()
+ expires = now + timedelta(seconds=expires)
+ eta = eta and eta.isoformat()
+ expires = expires and expires.isoformat()
+
+ body = {"task": task_name,
+ "id": task_id,
+ "args": task_args or [],
+ "kwargs": task_kwargs or {},
+ "retries": retries or 0,
+ "eta": eta,
+ "expires": expires,
+ "utc": self.utc}
+ if taskset_id:
+ body["taskset"] = taskset_id
+ if chord:
+ body["chord"] = chord
+
+ do_retry = retry if retry is not None else self.retry
+ send = self.send
+ if do_retry:
+ send = connection.ensure(self, self.send, **_retry_policy)
+ send(body, exchange=exchange, **extract_msg_options(kwargs))
+ signals.task_sent.send(sender=task_name, **body)
+ if event_dispatcher:
+ event_dispatcher.send("task-sent", uuid=task_id,
+ name=task_name,
+ args=repr(task_args),
+ kwargs=repr(task_kwargs),
+ retries=retries,
+ eta=eta,
+ expires=expires)
+ return task_id
+
+ def __exit__(self, *exc_info):
+ try:
+ self.release()
+ except AttributeError:
+ self.close()
+
+
+class PublisherPool(ProducerPool):
+
+ def