Skip to content
This repository
Browse code

move IPython.zmq.parallel to IPython.parallel

  • Loading branch information...
commit a6a0636afdd52c42d3505f411b3f519e507d7b12 1 parent b5b9a12
Min RK minrk authored

Showing 78 changed files with 535 additions and 237 deletions. Show diff stats Hide diff stats

  1. +3 3 IPython/config/default/ipclusterz_config.py
  2. +4 4 IPython/config/default/ipcontrollerz_config.py
  3. +1 1  IPython/external/ssh/tunnel.py
  4. +2 1  IPython/{zmq → }/parallel/__init__.py
  5. 0  IPython/{zmq → }/parallel/asyncresult.py
  6. +3 18 IPython/{zmq → }/parallel/client.py
  7. 0  IPython/{zmq → }/parallel/clusterdir.py
  8. 0  IPython/{zmq → }/parallel/controller.py
  9. +1 1  IPython/{zmq → }/parallel/dependency.py
  10. 0  IPython/{zmq → }/parallel/dictdb.py
  11. 0  IPython/{zmq → }/parallel/engine.py
  12. 0  IPython/{zmq → }/parallel/entry_point.py
  13. 0  IPython/{zmq → }/parallel/error.py
  14. +4 4 IPython/{zmq → }/parallel/factory.py
  15. 0  IPython/{zmq → }/parallel/heartmonitor.py
  16. +3 3 IPython/{zmq → }/parallel/hub.py
  17. +1 1  IPython/{zmq → }/parallel/ipcluster.py
  18. +3 3 IPython/{zmq → }/parallel/ipclusterapp.py
  19. +6 6 IPython/{zmq → }/parallel/ipcontrollerapp.py
  20. +5 5 IPython/{zmq → }/parallel/ipengineapp.py
  21. +1 1  IPython/{zmq → }/parallel/iploggerapp.py
  22. 0  IPython/{zmq → }/parallel/kernelstarter.py
  23. +3 3 IPython/{zmq → }/parallel/launcher.py
  24. 0  IPython/{zmq → }/parallel/logwatcher.py
  25. 0  IPython/{zmq → }/parallel/map.py
  26. 0  IPython/{zmq → }/parallel/mongodb.py
  27. 0  IPython/{zmq → }/parallel/remotefunction.py
  28. 0  IPython/{zmq → }/parallel/remotenamespace.py
  29. 0  IPython/{zmq → }/parallel/scheduler.py
  30. 0  IPython/{zmq → }/parallel/scripts/__init__.py
  31. +1 1  IPython/{zmq → }/parallel/scripts/ipclusterz
  32. +1 1  IPython/{zmq → }/parallel/scripts/ipcontrollerz
  33. +1 1  IPython/{zmq → }/parallel/scripts/ipenginez
  34. +1 1  IPython/{zmq → }/parallel/scripts/iploggerz
  35. +2 1  IPython/{zmq → }/parallel/sqlitedb.py
  36. +1 1  IPython/{zmq → }/parallel/streamkernel.py
  37. 0  IPython/{zmq → }/parallel/streamsession.py
  38. +1 1  IPython/{zmq → }/parallel/taskthread.py
  39. +1 1  IPython/{zmq → }/parallel/tests/__init__.py
  40. +5 5 IPython/{zmq → }/parallel/tests/clienttest.py
  41. +2 2 IPython/{zmq → }/parallel/tests/test_asyncresult.py
  42. +13 4 IPython/{zmq → }/parallel/tests/test_client.py
  43. +3 3 IPython/{zmq → }/parallel/tests/test_dependency.py
  44. +1 1  IPython/{zmq → }/parallel/tests/test_newserialized.py
  45. +1 1  IPython/{zmq → }/parallel/tests/test_streamsession.py
  46. +9 9 IPython/{zmq → }/parallel/tests/test_view.py
  47. 0  IPython/{zmq → }/parallel/util.py
  48. +1 1  IPython/{zmq → }/parallel/view.py
  49. +316 0 IPython/parallel/winhpcjob.py
  50. +1 1  IPython/utils/pickleutil.py
  51. +2 2 docs/examples/newparallel/dagdeps.py
  52. +1 3 docs/examples/newparallel/demo/dependencies.py
  53. +1 1  docs/examples/newparallel/demo/map.py
  54. +2 2 docs/examples/newparallel/demo/throughput.py
  55. +1 1  docs/examples/newparallel/demo/views.py
  56. +2 2 docs/examples/newparallel/fetchparse.py
  57. +2 2 docs/examples/newparallel/helloworld.py
  58. +1 1  docs/examples/newparallel/interengine/communicator.py
  59. +2 2 docs/examples/newparallel/interengine/interengine.py
  60. +2 2 docs/examples/newparallel/mcdriver.py
  61. +2 2 docs/examples/newparallel/parallelpi.py
  62. +1 1  docs/examples/newparallel/wave2D/communicator.py
  63. +1 1  docs/examples/newparallel/wave2D/parallelwave-mpi.py
  64. +1 1  docs/examples/newparallel/wave2D/parallelwave.py
  65. +1 1  docs/examples/newparallel/workflow/client.py
  66. +2 2 docs/examples/newparallel/workflow/job_wrapper.py
  67. +1 1  docs/examples/newparallel/workflow/wmanager.py
  68. +1 1  docs/source/parallelz/dag_dependencies.txt
  69. +2 2 docs/source/parallelz/parallel_demos.txt
  70. +26 42 docs/source/parallelz/parallel_details.txt
  71. +6 6 docs/source/parallelz/parallel_intro.txt
  72. +2 2 docs/source/parallelz/parallel_mpi.txt
  73. +41 38 docs/source/parallelz/parallel_multiengine.txt
  74. +6 6 docs/source/parallelz/parallel_process.txt
  75. +11 11 docs/source/parallelz/parallel_task.txt
  76. +3 3 docs/source/parallelz/parallel_winhpc.txt
  77. +4 4 setup.py
  78. +7 6 setupbase.py
6 IPython/config/default/ipclusterz_config.py
@@ -23,8 +23,8 @@
23 23 # - PBSControllerLauncher
24 24 # - SGEControllerLauncher
25 25 # - WindowsHPCControllerLauncher
26   -# c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.LocalControllerLauncher'
27   -c.Global.controller_launcher = 'IPython.zmq.parallel.launcher.PBSControllerLauncher'
  26 +# c.Global.controller_launcher = 'IPython.parallel.launcher.LocalControllerLauncher'
  27 +c.Global.controller_launcher = 'IPython.parallel.launcher.PBSControllerLauncher'
28 28
29 29 # Options are:
30 30 # - LocalEngineSetLauncher
@@ -32,7 +32,7 @@
32 32 # - PBSEngineSetLauncher
33 33 # - SGEEngineSetLauncher
34 34 # - WindowsHPCEngineSetLauncher
35   -# c.Global.engine_launcher = 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher'
  35 +# c.Global.engine_launcher = 'IPython.parallel.launcher.LocalEngineSetLauncher'
36 36
37 37 #-----------------------------------------------------------------------------
38 38 # Global configuration
8 IPython/config/default/ipcontrollerz_config.py
@@ -89,7 +89,7 @@
89 89 # Which class to use for the db backend. Currently supported are DictDB (the
90 90 # default), and MongoDB. Uncomment this line to enable MongoDB, which will
91 91 # slow-down the Hub's responsiveness, but also reduce its memory footprint.
92   -# c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB'
  92 +# c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
93 93
94 94 # The heartbeat ping frequency. This is the frequency (in ms) at which the
95 95 # Hub pings engines for heartbeats. This determines how quickly the Hub
@@ -144,11 +144,11 @@
144 144
145 145 # ----- in-memory configuration --------
146 146 # this line restores the default behavior: in-memory storage of all results.
147   -# c.HubFactory.db_class = 'IPython.zmq.parallel.dictdb.DictDB'
  147 +# c.HubFactory.db_class = 'IPython.parallel.dictdb.DictDB'
148 148
149 149 # ----- sqlite configuration --------
150 150 # use this line to activate sqlite:
151   -# c.HubFactory.db_class = 'IPython.zmq.parallel.sqlitedb.SQLiteDB'
  151 +# c.HubFactory.db_class = 'IPython.parallel.sqlitedb.SQLiteDB'
152 152
153 153 # You can specify the name of the db-file. By default, this will be located
154 154 # in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
@@ -165,7 +165,7 @@
165 165
166 166 # ----- mongodb configuration --------
167 167 # use this line to activate mongodb:
168   -# c.HubFactory.db_class = 'IPython.zmq.parallel.mongodb.MongoDB'
  168 +# c.HubFactory.db_class = 'IPython.parallel.mongodb.MongoDB'
169 169
170 170 # You can specify the args and kwargs pymongo will use when creating the Connection.
171 171 # For more information on what these options might be, see pymongo documentation.
2  IPython/external/ssh/tunnel.py
@@ -34,7 +34,7 @@
34 34 except ImportError:
35 35 pexpect = None
36 36
37   -from IPython.zmq.parallel.entry_point import select_random_ports
  37 +from IPython.parallel.entry_point import select_random_ports
38 38
39 39 #-----------------------------------------------------------------------------
40 40 # Code
3  IPython/zmq/parallel/__init__.py → IPython/parallel/__init__.py
@@ -13,12 +13,13 @@
13 13 import zmq
14 14
15 15 if zmq.__version__ < '2.1.3':
16   - raise ImportError("IPython.zmq.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__)
  16 + raise ImportError("IPython.parallel requires pyzmq/0MQ >= 2.1.3, you appear to have %s"%zmq.__version__)
17 17
18 18 from .asyncresult import *
19 19 from .client import Client
20 20 from .dependency import *
21 21 from .remotefunction import *
22 22 from .view import *
  23 +from IPython.utils.pickleutil import Reference
23 24
24 25
0  IPython/zmq/parallel/asyncresult.py → IPython/parallel/asyncresult.py
File renamed without changes
21 IPython/zmq/parallel/client.py → IPython/parallel/client.py
@@ -24,7 +24,6 @@
24 24 # from zmq.eventloop import ioloop, zmqstream
25 25
26 26 from IPython.utils.path import get_ipython_dir
27   -from IPython.utils.pickleutil import Reference
28 27 from IPython.utils.traitlets import (HasTraits, Int, Instance, CUnicode,
29 28 Dict, List, Bool, Str, Set)
30 29 from IPython.external.decorator import decorator
@@ -33,10 +32,8 @@
33 32 from . import error
34 33 from . import util
35 34 from . import streamsession as ss
36   -from .asyncresult import AsyncResult, AsyncMapResult, AsyncHubResult
  35 +from .asyncresult import AsyncResult, AsyncHubResult
37 36 from .clusterdir import ClusterDir, ClusterDirError
38   -from .dependency import Dependency, depend, require, dependent
39   -from .remotefunction import remote, parallel, ParallelFunction, RemoteFunction
40 37 from .view import DirectView, LoadBalancedView
41 38
42 39 #--------------------------------------------------------------------------
@@ -985,7 +982,7 @@ def load_balanced_view(self, targets=None):
985 982 targets: list,slice,int,etc. [default: use all engines]
986 983 The subset of engines across which to load-balance
987 984 """
988   - if targets is None:
  985 + if targets is not None:
989 986 targets = self._build_targets(targets)[1]
990 987 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
991 988
@@ -1278,16 +1275,4 @@ def purge_results(self, jobs=[], targets=[]):
1278 1275 raise self._unwrap_exception(content)
1279 1276
1280 1277
1281   -__all__ = [ 'Client',
1282   - 'depend',
1283   - 'require',
1284   - 'remote',
1285   - 'parallel',
1286   - 'RemoteFunction',
1287   - 'ParallelFunction',
1288   - 'DirectView',
1289   - 'LoadBalancedView',
1290   - 'AsyncResult',
1291   - 'AsyncMapResult',
1292   - 'Reference'
1293   - ]
  1278 +__all__ = [ 'Client' ]
0  IPython/zmq/parallel/clusterdir.py → IPython/parallel/clusterdir.py
File renamed without changes
0  IPython/zmq/parallel/controller.py → IPython/parallel/controller.py
File renamed without changes
2  IPython/zmq/parallel/dependency.py → IPython/parallel/dependency.py
@@ -67,7 +67,7 @@ def __name__(self):
67 67 @interactive
68 68 def _require(*names):
69 69 """Helper for @require decorator."""
70   - from IPython.zmq.parallel.error import UnmetDependency
  70 + from IPython.parallel.error import UnmetDependency
71 71 user_ns = globals()
72 72 for name in names:
73 73 if name in user_ns:
0  IPython/zmq/parallel/dictdb.py → IPython/parallel/dictdb.py
File renamed without changes
0  IPython/zmq/parallel/engine.py → IPython/parallel/engine.py
File renamed without changes
0  IPython/zmq/parallel/entry_point.py → IPython/parallel/entry_point.py
File renamed without changes
0  IPython/zmq/parallel/error.py → IPython/parallel/error.py
File renamed without changes
8 IPython/zmq/parallel/factory.py → IPython/parallel/factory.py
@@ -22,8 +22,8 @@
22 22 from IPython.utils.importstring import import_item
23 23 from IPython.utils.traitlets import Str,Int,Instance, CUnicode, CStr
24 24
25   -import IPython.zmq.parallel.streamsession as ss
26   -from IPython.zmq.parallel.entry_point import select_random_ports
  25 +import IPython.parallel.streamsession as ss
  26 +from IPython.parallel.entry_point import select_random_ports
27 27
28 28 #-----------------------------------------------------------------------------
29 29 # Classes
@@ -37,7 +37,7 @@ def _logname_changed(self, name, old, new):
37 37
38 38
39 39 class SessionFactory(LoggingFactory):
40   - """The Base factory from which every factory in IPython.zmq.parallel inherits"""
  40 + """The Base factory from which every factory in IPython.parallel inherits"""
41 41
42 42 packer = Str('',config=True)
43 43 unpacker = Str('',config=True)
@@ -48,7 +48,7 @@ def _ident_default(self):
48 48 exec_key = CUnicode('',config=True)
49 49 # not configurable:
50 50 context = Instance('zmq.Context', (), {})
51   - session = Instance('IPython.zmq.parallel.streamsession.StreamSession')
  51 + session = Instance('IPython.parallel.streamsession.StreamSession')
52 52 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
53 53 def _loop_default(self):
54 54 return IOLoop.instance()
0  IPython/zmq/parallel/heartmonitor.py → IPython/parallel/heartmonitor.py
File renamed without changes
6 IPython/zmq/parallel/hub.py → IPython/parallel/hub.py
@@ -136,11 +136,11 @@ def _notifier_port_default(self):
136 136
137 137 monitor_url = CStr('')
138 138
139   - db_class = CStr('IPython.zmq.parallel.dictdb.DictDB', config=True)
  139 + db_class = CStr('IPython.parallel.dictdb.DictDB', config=True)
140 140
141 141 # not configurable
142   - db = Instance('IPython.zmq.parallel.dictdb.BaseDB')
143   - heartmonitor = Instance('IPython.zmq.parallel.heartmonitor.HeartMonitor')
  142 + db = Instance('IPython.parallel.dictdb.BaseDB')
  143 + heartmonitor = Instance('IPython.parallel.heartmonitor.HeartMonitor')
144 144 subconstructors = List()
145 145 _constructed = Bool(False)
146 146
2  IPython/zmq/parallel/ipcluster.py → IPython/parallel/ipcluster.py
@@ -56,7 +56,7 @@ def strip_args(flags, args=sys.argv[1:]):
56 56
57 57 def launch_process(mod, args):
58 58 """Launch a controller or engine in a subprocess."""
59   - code = "from IPython.zmq.parallel.%s import launch_new_instance;launch_new_instance()"%mod
  59 + code = "from IPython.parallel.%s import launch_new_instance;launch_new_instance()"%mod
60 60 arguments = [ sys.executable, '-c', code ] + args
61 61 blackholew = file(os.devnull, 'w')
62 62 blackholer = file(os.devnull, 'r')
6 IPython/zmq/parallel/ipclusterapp.py → IPython/parallel/ipclusterapp.py
@@ -26,7 +26,7 @@
26 26
27 27 from IPython.external.argparse import ArgumentParser, SUPPRESS
28 28 from IPython.utils.importstring import import_item
29   -from IPython.zmq.parallel.clusterdir import (
  29 +from IPython.parallel.clusterdir import (
30 30 ApplicationWithClusterDir, ClusterDirConfigLoader,
31 31 ClusterDirError, PIDFileError
32 32 )
@@ -260,9 +260,9 @@ class IPClusterApp(ApplicationWithClusterDir):
260 260 def create_default_config(self):
261 261 super(IPClusterApp, self).create_default_config()
262 262 self.default_config.Global.controller_launcher = \
263   - 'IPython.zmq.parallel.launcher.LocalControllerLauncher'
  263 + 'IPython.parallel.launcher.LocalControllerLauncher'
264 264 self.default_config.Global.engine_launcher = \
265   - 'IPython.zmq.parallel.launcher.LocalEngineSetLauncher'
  265 + 'IPython.parallel.launcher.LocalEngineSetLauncher'
266 266 self.default_config.Global.n = 2
267 267 self.default_config.Global.delay = 2
268 268 self.default_config.Global.reset_config = False
12 IPython/zmq/parallel/ipcontrollerapp.py → IPython/parallel/ipcontrollerapp.py
@@ -30,13 +30,13 @@
30 30 from zmq.utils import jsonapi as json
31 31
32 32 from IPython.config.loader import Config
33   -from IPython.zmq.parallel import factory
34   -from IPython.zmq.parallel.controller import ControllerFactory
35   -from IPython.zmq.parallel.clusterdir import (
  33 +from IPython.parallel import factory
  34 +from IPython.parallel.controller import ControllerFactory
  35 +from IPython.parallel.clusterdir import (
36 36 ApplicationWithClusterDir,
37 37 ClusterDirConfigLoader
38 38 )
39   -from IPython.zmq.parallel.util import disambiguate_ip_address, split_url
  39 +from IPython.parallel.util import disambiguate_ip_address, split_url
40 40 # from IPython.kernel.fcutil import FCServiceFactory, FURLError
41 41 from IPython.utils.traitlets import Instance, Unicode
42 42
@@ -117,11 +117,11 @@ def _add_arguments(self):
117 117 ## Hub Config:
118 118 paa('--mongodb',
119 119 dest='HubFactory.db_class', action='store_const',
120   - const='IPython.zmq.parallel.mongodb.MongoDB',
  120 + const='IPython.parallel.mongodb.MongoDB',
121 121 help='Use MongoDB for task storage [default: in-memory]')
122 122 paa('--sqlite',
123 123 dest='HubFactory.db_class', action='store_const',
124   - const='IPython.zmq.parallel.sqlitedb.SQLiteDB',
  124 + const='IPython.parallel.sqlitedb.SQLiteDB',
125 125 help='Use SQLite3 for DB task storage [default: in-memory]')
126 126 paa('--hb',
127 127 type=int, dest='HubFactory.hb', nargs=2,
10 IPython/zmq/parallel/ipengineapp.py → IPython/parallel/ipengineapp.py
@@ -22,16 +22,16 @@
22 22 import zmq
23 23 from zmq.eventloop import ioloop
24 24
25   -from IPython.zmq.parallel.clusterdir import (
  25 +from IPython.parallel.clusterdir import (
26 26 ApplicationWithClusterDir,
27 27 ClusterDirConfigLoader
28 28 )
29 29 from IPython.zmq.log import EnginePUBHandler
30 30
31   -from IPython.zmq.parallel import factory
32   -from IPython.zmq.parallel.engine import EngineFactory
33   -from IPython.zmq.parallel.streamkernel import Kernel
34   -from IPython.zmq.parallel.util import disambiguate_url
  31 +from IPython.parallel import factory
  32 +from IPython.parallel.engine import EngineFactory
  33 +from IPython.parallel.streamkernel import Kernel
  34 +from IPython.parallel.util import disambiguate_url
35 35 from IPython.utils.importstring import import_item
36 36
37 37
2  IPython/zmq/parallel/iploggerapp.py → IPython/parallel/iploggerapp.py
@@ -20,7 +20,7 @@
20 20
21 21 import zmq
22 22
23   -from IPython.zmq.parallel.clusterdir import (
  23 +from IPython.parallel.clusterdir import (
24 24 ApplicationWithClusterDir,
25 25 ClusterDirConfigLoader
26 26 )
0  IPython/zmq/parallel/kernelstarter.py → IPython/parallel/kernelstarter.py
File renamed without changes
6 IPython/zmq/parallel/launcher.py → IPython/parallel/launcher.py
@@ -64,15 +64,15 @@ def check_output(*args, **kwargs):
64 64
65 65
66 66 ipclusterz_cmd_argv = pycmd2argv(get_ipython_module_path(
67   - 'IPython.zmq.parallel.ipclusterapp'
  67 + 'IPython.parallel.ipclusterapp'
68 68 ))
69 69
70 70 ipenginez_cmd_argv = pycmd2argv(get_ipython_module_path(
71   - 'IPython.zmq.parallel.ipengineapp'
  71 + 'IPython.parallel.ipengineapp'
72 72 ))
73 73
74 74 ipcontrollerz_cmd_argv = pycmd2argv(get_ipython_module_path(
75   - 'IPython.zmq.parallel.ipcontrollerapp'
  75 + 'IPython.parallel.ipcontrollerapp'
76 76 ))
77 77
78 78 #-----------------------------------------------------------------------------
0  IPython/zmq/parallel/logwatcher.py → IPython/parallel/logwatcher.py
File renamed without changes
0  IPython/zmq/parallel/map.py → IPython/parallel/map.py
File renamed without changes
0  IPython/zmq/parallel/mongodb.py → IPython/parallel/mongodb.py
File renamed without changes
0  IPython/zmq/parallel/remotefunction.py → IPython/parallel/remotefunction.py
File renamed without changes
0  IPython/zmq/parallel/remotenamespace.py → IPython/parallel/remotenamespace.py
File renamed without changes
0  IPython/zmq/parallel/scheduler.py → IPython/parallel/scheduler.py
File renamed without changes
0  IPython/zmq/parallel/scripts/__init__.py → IPython/parallel/scripts/__init__.py
File renamed without changes
2  IPython/zmq/parallel/scripts/ipclusterz → IPython/parallel/scripts/ipclusterz
@@ -13,6 +13,6 @@
13 13 #-----------------------------------------------------------------------------
14 14
15 15
16   -from IPython.zmq.parallel.ipclusterapp import launch_new_instance
  16 +from IPython.parallel.ipclusterapp import launch_new_instance
17 17
18 18 launch_new_instance()
2  IPython/zmq/parallel/scripts/ipcontrollerz → IPython/parallel/scripts/ipcontrollerz
@@ -13,6 +13,6 @@
13 13 #-----------------------------------------------------------------------------
14 14
15 15
16   -from IPython.zmq.parallel.ipcontrollerapp import launch_new_instance
  16 +from IPython.parallel.ipcontrollerapp import launch_new_instance
17 17
18 18 launch_new_instance()
2  IPython/zmq/parallel/scripts/ipenginez → IPython/parallel/scripts/ipenginez
@@ -13,7 +13,7 @@
13 13 #-----------------------------------------------------------------------------
14 14
15 15
16   -from IPython.zmq.parallel.ipengineapp import launch_new_instance
  16 +from IPython.parallel.ipengineapp import launch_new_instance
17 17
18 18 launch_new_instance()
19 19
2  IPython/zmq/parallel/scripts/iploggerz → IPython/parallel/scripts/iploggerz
@@ -13,7 +13,7 @@
13 13 #-----------------------------------------------------------------------------
14 14
15 15
16   -from IPython.zmq.parallel.iploggerapp import launch_new_instance
  16 +from IPython.parallel.iploggerapp import launch_new_instance
17 17
18 18 launch_new_instance()
19 19
3  IPython/zmq/parallel/sqlitedb.py → IPython/parallel/sqlitedb.py
@@ -133,7 +133,8 @@ def _init_db(self):
133 133 sqlite3.register_converter('bufs', _convert_bufs)
134 134 # connect to the db
135 135 dbfile = os.path.join(self.location, self.filename)
136   - self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES)
  136 + self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES, cached_statements=16)
  137 + # print dir(self._db)
137 138
138 139 self._db.execute("""CREATE TABLE IF NOT EXISTS %s
139 140 (msg_id text PRIMARY KEY,
2  IPython/zmq/parallel/streamkernel.py → IPython/parallel/streamkernel.py
@@ -71,7 +71,7 @@ class Kernel(SessionFactory):
71 71 control_stream = Instance(zmqstream.ZMQStream)
72 72 task_stream = Instance(zmqstream.ZMQStream)
73 73 iopub_stream = Instance(zmqstream.ZMQStream)
74   - client = Instance('IPython.zmq.parallel.client.Client')
  74 + client = Instance('IPython.parallel.client.Client')
75 75
76 76 # internals
77 77 shell_streams = List()
0  IPython/zmq/parallel/streamsession.py → IPython/parallel/streamsession.py
File renamed without changes
2  IPython/zmq/parallel/taskthread.py → IPython/parallel/taskthread.py
@@ -18,7 +18,7 @@
18 18 import zmq
19 19 from zmq.core.poll import _poll as poll
20 20 from zmq.devices import ThreadDevice
21   -from IPython.zmq.parallel import streamsession as ss
  21 +from IPython.parallel import streamsession as ss
22 22
23 23
24 24 class QueueStream(object):
2  IPython/zmq/parallel/tests/__init__.py → IPython/parallel/tests/__init__.py
@@ -15,7 +15,7 @@
15 15 import time
16 16 from subprocess import Popen, PIPE, STDOUT
17 17
18   -from IPython.zmq.parallel import client
  18 +from IPython.parallel import client
19 19
20 20 processes = []
21 21 blackhole = tempfile.TemporaryFile()
10 IPython/zmq/parallel/tests/clienttest.py → IPython/parallel/tests/clienttest.py
@@ -20,11 +20,11 @@
20 20
21 21 from IPython.external.decorator import decorator
22 22
23   -from IPython.zmq.parallel import error
24   -from IPython.zmq.parallel.client import Client
25   -from IPython.zmq.parallel.ipcluster import launch_process
26   -from IPython.zmq.parallel.entry_point import select_random_ports
27   -from IPython.zmq.parallel.tests import processes,add_engines
  23 +from IPython.parallel import error
  24 +from IPython.parallel.client import Client
  25 +from IPython.parallel.ipcluster import launch_process
  26 +from IPython.parallel.entry_point import select_random_ports
  27 +from IPython.parallel.tests import processes,add_engines
28 28
29 29 # simple tasks for use in apply tests
30 30
4 IPython/zmq/parallel/tests/test_asyncresult.py → IPython/parallel/tests/test_asyncresult.py
@@ -12,9 +12,9 @@
12 12 #-------------------------------------------------------------------------------
13 13
14 14
15   -from IPython.zmq.parallel.error import TimeoutError
  15 +from IPython.parallel.error import TimeoutError
16 16
17   -from IPython.zmq.parallel.tests import add_engines
  17 +from IPython.parallel.tests import add_engines
18 18 from .clienttest import ClusterTestCase
19 19
20 20 def setup():
17 IPython/zmq/parallel/tests/test_client.py → IPython/parallel/tests/test_client.py
@@ -16,10 +16,10 @@
16 16
17 17 import zmq
18 18
19   -from IPython.zmq.parallel import client as clientmod
20   -from IPython.zmq.parallel import error
21   -from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult
22   -from IPython.zmq.parallel.view import LoadBalancedView, DirectView
  19 +from IPython.parallel import client as clientmod
  20 +from IPython.parallel import error
  21 +from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult
  22 +from IPython.parallel.view import LoadBalancedView, DirectView
23 23
24 24 from clienttest import ClusterTestCase, segfault, wait, add_engines
25 25
@@ -61,6 +61,15 @@ def test_view_indexing(self):
61 61 self.assertEquals(v.targets, targets[-1])
62 62 self.assertRaises(TypeError, lambda : self.client[None])
63 63
  64 + def test_lbview_targets(self):
  65 + """test load_balanced_view targets"""
  66 + v = self.client.load_balanced_view()
  67 + self.assertEquals(v.targets, None)
  68 + v = self.client.load_balanced_view(-1)
  69 + self.assertEquals(v.targets, [self.client.ids[-1]])
  70 + v = self.client.load_balanced_view('all')
  71 + self.assertEquals(v.targets, self.client.ids)
  72 +
64 73 def test_targets(self):
65 74 """test various valid targets arguments"""
66 75 build = self.client._build_targets
6 IPython/zmq/parallel/tests/test_dependency.py → IPython/parallel/tests/test_dependency.py
@@ -18,10 +18,10 @@
18 18
19 19 from IPython.utils.pickleutil import can, uncan
20 20
21   -from IPython.zmq.parallel import dependency as dmod
22   -from IPython.zmq.parallel.util import interactive
  21 +from IPython.parallel import dependency as dmod
  22 +from IPython.parallel.util import interactive
23 23
24   -from IPython.zmq.parallel.tests import add_engines
  24 +from IPython.parallel.tests import add_engines
25 25 from .clienttest import ClusterTestCase
26 26
27 27 def setup():
2  IPython/zmq/parallel/tests/test_newserialized.py → IPython/parallel/tests/test_newserialized.py
@@ -16,7 +16,7 @@
16 16 from IPython.testing.parametric import parametric
17 17 from IPython.utils import newserialized as ns
18 18 from IPython.utils.pickleutil import can, uncan, CannedObject, CannedFunction
19   -from IPython.zmq.parallel.tests.clienttest import skip_without
  19 +from IPython.parallel.tests.clienttest import skip_without
20 20
21 21
22 22 class CanningTestCase(TestCase):
2  IPython/zmq/parallel/tests/test_streamsession.py → IPython/parallel/tests/test_streamsession.py
@@ -18,7 +18,7 @@
18 18 from zmq.tests import BaseZMQTestCase
19 19 from zmq.eventloop.zmqstream import ZMQStream
20 20 # from IPython.zmq.tests import SessionTestCase
21   -from IPython.zmq.parallel import streamsession as ss
  21 +from IPython.parallel import streamsession as ss
22 22
23 23 class SessionTestCase(BaseZMQTestCase):
24 24
18 IPython/zmq/parallel/tests/test_view.py → IPython/parallel/tests/test_view.py
@@ -15,13 +15,13 @@
15 15
16 16 import zmq
17 17
18   -from IPython.zmq.parallel import client as clientmod
19   -from IPython.zmq.parallel import error
20   -from IPython.zmq.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult
21   -from IPython.zmq.parallel.view import LoadBalancedView, DirectView
22   -from IPython.zmq.parallel.util import interactive
  18 +from IPython import parallel as pmod
  19 +from IPython.parallel import error
  20 +from IPython.parallel.asyncresult import AsyncResult, AsyncHubResult, AsyncMapResult
  21 +from IPython.parallel.view import LoadBalancedView, DirectView
  22 +from IPython.parallel.util import interactive
23 23
24   -from IPython.zmq.parallel.tests import add_engines
  24 +from IPython.parallel.tests import add_engines
25 25
26 26 from .clienttest import ClusterTestCase, segfault, wait, skip_without
27 27
@@ -129,7 +129,7 @@ def echo(a=10):
129 129
130 130 def test_get_result(self):
131 131 """test getting results from the Hub."""
132   - c = clientmod.Client(profile='iptest')
  132 + c = pmod.Client(profile='iptest')
133 133 # self.add_engines(1)
134 134 t = c.ids[-1]
135 135 v = c[t]
@@ -154,7 +154,7 @@ def test_run_newline(self):
154 154 """)
155 155 v = self.client[-1]
156 156 v.run(tmpfile, block=True)
157   - self.assertEquals(v.apply_sync(lambda f: f(), clientmod.Reference('g')), 5)
  157 + self.assertEquals(v.apply_sync(lambda f: f(), pmod.Reference('g')), 5)
158 158
159 159 def test_apply_tracked(self):
160 160 """test tracking for apply"""
@@ -206,7 +206,7 @@ def test_scatter_tracked(self):
206 206 def test_remote_reference(self):
207 207 v = self.client[-1]
208 208 v['a'] = 123
209   - ra = clientmod.Reference('a')
  209 + ra = pmod.Reference('a')
210 210 b = v.apply_sync(lambda x: x, ra)
211 211 self.assertEquals(b, 123)
212 212
0  IPython/zmq/parallel/util.py → IPython/parallel/util.py
File renamed without changes
2  IPython/zmq/parallel/view.py → IPython/parallel/view.py
@@ -105,7 +105,7 @@ class View(HasTraits):
105 105 history=List()
106 106 outstanding = Set()
107 107 results = Dict()
108   - client = Instance('IPython.zmq.parallel.client.Client')
  108 + client = Instance('IPython.parallel.client.Client')
109 109
110 110 _socket = Instance('zmq.Socket')
111 111 _flag_names = List(['targets', 'block', 'track'])
316 IPython/parallel/winhpcjob.py
... ... @@ -0,0 +1,316 @@
  1 +#!/usr/bin/env python
  2 +# encoding: utf-8
  3 +"""
  4 +Job and task components for writing .xml files that the Windows HPC Server
  5 +2008 can use to start jobs.
  6 +"""
  7 +
  8 +#-----------------------------------------------------------------------------
  9 +# Copyright (C) 2008-2009 The IPython Development Team
  10 +#
  11 +# Distributed under the terms of the BSD License. The full license is in
  12 +# the file COPYING, distributed as part of this software.
  13 +#-----------------------------------------------------------------------------
  14 +
  15 +#-----------------------------------------------------------------------------
  16 +# Imports
  17 +#-----------------------------------------------------------------------------
  18 +
  19 +from __future__ import with_statement
  20 +
  21 +import os
  22 +import re
  23 +import uuid
  24 +
  25 +from xml.etree import ElementTree as ET
  26 +
  27 +from IPython.config.configurable import Configurable
  28 +from IPython.utils.traitlets import (
  29 + Str, Int, List, Instance,
  30 + Enum, Bool, CStr
  31 +)
  32 +
  33 +#-----------------------------------------------------------------------------
  34 +# Job and Task classes
  35 +#-----------------------------------------------------------------------------
  36 +
  37 +
  38 +def as_str(value):
  39 + if isinstance(value, str):
  40 + return value
  41 + elif isinstance(value, bool):
  42 + if value:
  43 + return 'true'
  44 + else:
  45 + return 'false'
  46 + elif isinstance(value, (int, float)):
  47 + return repr(value)
  48 + else:
  49 + return value
  50 +
  51 +
  52 +def indent(elem, level=0):
  53 + i = "\n" + level*" "
  54 + if len(elem):
  55 + if not elem.text or not elem.text.strip():
  56 + elem.text = i + " "
  57 + if not elem.tail or not elem.tail.strip():
  58 + elem.tail = i
  59 + for elem in elem:
  60 + indent(elem, level+1)
  61 + if not elem.tail or not elem.tail.strip():
  62 + elem.tail = i
  63 + else:
  64 + if level and (not elem.tail or not elem.tail.strip()):
  65 + elem.tail = i
  66 +
  67 +
  68 +def find_username():
  69 + domain = os.environ.get('USERDOMAIN')
  70 + username = os.environ.get('USERNAME','')
  71 + if domain is None:
  72 + return username
  73 + else:
  74 + return '%s\\%s' % (domain, username)
  75 +
  76 +
  77 +class WinHPCJob(Configurable):
  78 +
  79 + job_id = Str('')
  80 + job_name = Str('MyJob', config=True)
  81 + min_cores = Int(1, config=True)
  82 + max_cores = Int(1, config=True)
  83 + min_sockets = Int(1, config=True)
  84 + max_sockets = Int(1, config=True)
  85 + min_nodes = Int(1, config=True)
  86 + max_nodes = Int(1, config=True)
  87 + unit_type = Str("Core", config=True)
  88 + auto_calculate_min = Bool(True, config=True)
  89 + auto_calculate_max = Bool(True, config=True)
  90 + run_until_canceled = Bool(False, config=True)
  91 + is_exclusive = Bool(False, config=True)
  92 + username = Str(find_username(), config=True)
  93 + job_type = Str('Batch', config=True)
  94 + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
  95 + default_value='Highest', config=True)
  96 + requested_nodes = Str('', config=True)
  97 + project = Str('IPython', config=True)
  98 + xmlns = Str('http://schemas.microsoft.com/HPCS2008/scheduler/')
  99 + version = Str("2.000")
  100 + tasks = List([])
  101 +
  102 + @property
  103 + def owner(self):
  104 + return self.username
  105 +
  106 + def _write_attr(self, root, attr, key):
  107 + s = as_str(getattr(self, attr, ''))
  108 + if s:
  109 + root.set(key, s)
  110 +
  111 + def as_element(self):
  112 + # We have to add _A_ type things to get the right order than
  113 + # the MSFT XML parser expects.
  114 + root = ET.Element('Job')
  115 + self._write_attr(root, 'version', '_A_Version')
  116 + self._write_attr(root, 'job_name', '_B_Name')
  117 + self._write_attr(root, 'unit_type', '_C_UnitType')
  118 + self._write_attr(root, 'min_cores', '_D_MinCores')
  119 + self._write_attr(root, 'max_cores', '_E_MaxCores')
  120 + self._write_attr(root, 'min_sockets', '_F_MinSockets')
  121 + self._write_attr(root, 'max_sockets', '_G_MaxSockets')
  122 + self._write_attr(root, 'min_nodes', '_H_MinNodes')
  123 + self._write_attr(root, 'max_nodes', '_I_MaxNodes')
  124 + self._write_attr(root, 'run_until_canceled', '_J_RunUntilCanceled')
  125 + self._write_attr(root, 'is_exclusive', '_K_IsExclusive')
  126 + self._write_attr(root, 'username', '_L_UserName')
  127 + self._write_attr(root, 'job_type', '_M_JobType')
  128 + self._write_attr(root, 'priority', '_N_Priority')
  129 + self._write_attr(root, 'requested_nodes', '_O_RequestedNodes')
  130 + self._write_attr(root, 'auto_calculate_max', '_P_AutoCalculateMax')
  131 + self._write_attr(root, 'auto_calculate_min', '_Q_AutoCalculateMin')
  132 + self._write_attr(root, 'project', '_R_Project')
  133 + self._write_attr(root, 'owner', '_S_Owner')
  134 + self._write_attr(root, 'xmlns', '_T_xmlns')
  135 + dependencies = ET.SubElement(root, "Dependencies")
  136 + etasks = ET.SubElement(root, "Tasks")
  137 + for t in self.tasks:
  138 + etasks.append(t.as_element())
  139 + return root
  140 +
  141 + def tostring(self):
  142 + """Return the string representation of the job description XML."""
  143 + root = self.as_element()
  144 + indent(root)
  145 + txt = ET.tostring(root, encoding="utf-8")
  146 + # Now remove the tokens used to order the attributes.
  147 + txt = re.sub(r'_[A-Z]_','',txt)
  148 + txt = '<?xml version="1.0" encoding="utf-8"?>\n' + txt
  149 + return txt
  150 +
  151 + def write(self, filename):
  152 + """Write the XML job description to a file."""
  153 + txt = self.tostring()
  154 + with open(filename, 'w') as f:
  155 + f.write(txt)
  156 +
  157 + def add_task(self, task):
  158 + """Add a task to the job.
  159 +
  160 + Parameters
  161 + ----------
  162 + task : :class:`WinHPCTask`
  163 + The task object to add.
  164 + """
  165 + self.tasks.append(task)
  166 +
  167 +
  168 +class WinHPCTask(Configurable):
  169 +
  170 + task_id = Str('')
  171 + task_name = Str('')
  172 + version = Str("2.000")
  173 + min_cores = Int(1, config=True)
  174 + max_cores = Int(1, config=True)
  175 + min_sockets = Int(1, config=True)
  176 + max_sockets = Int(1, config=True)
  177 + min_nodes = Int(1, config=True)
  178 + max_nodes = Int(1, config=True)
  179 + unit_type = Str("Core", config=True)
  180 + command_line = CStr('', config=True)
  181 + work_directory = CStr('', config=True)
  182 + is_rerunnaable = Bool(True, config=True)
  183 + std_out_file_path = CStr('', config=True)
  184 + std_err_file_path = CStr('', config=True)
  185 + is_parametric = Bool(False, config=True)
  186 + environment_variables = Instance(dict, args=(), config=True)
  187 +
  188 + def _write_attr(self, root, attr, key):
  189 + s = as_str(getattr(self, attr, ''))
  190 + if s:
  191 + root.set(key, s)
  192 +
  193 + def as_element(self):
  194 + root = ET.Element('Task')
  195 + self._write_attr(root, 'version', '_A_Version')
  196 + self._write_attr(root, 'task_name', '_B_Name')
  197 + self._write_attr(root, 'min_cores', '_C_MinCores')
  198 + self._write_attr(root, 'max_cores', '_D_MaxCores')
  199 + self._write_attr(root, 'min_sockets', '_E_MinSockets')
  200 + self._write_attr(root, 'max_sockets', '_F_MaxSockets')
  201 + self._write_attr(root, 'min_nodes', '_G_MinNodes')
  202 + self._write_attr(root, 'max_nodes', '_H_MaxNodes')
  203 + self._write_attr(root, 'command_line', '_I_CommandLine')
  204 + self._write_attr(root, 'work_directory', '_J_WorkDirectory')
  205 + self._write_attr(root, 'is_rerunnaable', '_K_IsRerunnable')
  206 + self._write_attr(root, 'std_out_file_path', '_L_StdOutFilePath')
  207 + self._write_attr(root, 'std_err_file_path', '_M_StdErrFilePath')
  208 + self._write_attr(root, 'is_parametric', '_N_IsParametric')
  209 + self._write_attr(root, 'unit_type', '_O_UnitType')
  210 + root.append(self.get_env_vars())
  211 + return root
  212 +
  213 + def get_env_vars(self):
  214 + env_vars = ET.Element('EnvironmentVariables')
  215 + for k, v in self.environment_variables.iteritems():
  216 + variable = ET.SubElement(env_vars, "Variable")
  217 + name = ET.SubElement(variable, "Name")
  218 + name.text = k
  219 + value = ET.SubElement(variable, "Value")
  220 + value.text = v
  221 + return env_vars
  222 +
  223 +
  224 +
  225 +# By declaring these, we can configure the controller and engine separately!
  226 +
  227 +class IPControllerJob(WinHPCJob):
  228 + job_name = Str('IPController', config=False)
  229 + is_exclusive = Bool(False, config=True)
  230 + username = Str(find_username(), config=True)
  231 + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
  232 + default_value='Highest', config=True)
  233 + requested_nodes = Str('', config=True)
  234 + project = Str('IPython', config=True)
  235 +
  236 +
  237 +class IPEngineSetJob(WinHPCJob):
  238 + job_name = Str('IPEngineSet', config=False)
  239 + is_exclusive = Bool(False, config=True)
  240 + username = Str(find_username(), config=True)
  241 + priority = Enum(('Lowest','BelowNormal','Normal','AboveNormal','Highest'),
  242 + default_value='Highest', config=True)
  243 + requested_nodes = Str('', config=True)
  244 + project = Str('IPython', config=True)
  245 +
  246 +
  247 +class IPControllerTask(WinHPCTask):
  248 +
  249 + task_name = Str('IPController', config=True)
  250 + controller_cmd = List(['ipcontroller.exe'], config=True)
  251 + controller_args = List(['--log-to-file', '--log-level', '40'], config=True)
  252 + # I don't want these to be configurable
  253 + std_out_file_path = CStr('', config=False)
  254 + std_err_file_path = CStr('', config=False)
  255 + min_cores = Int(1, config=False)
  256 + max_cores = Int(1, config=False)
  257 + min_sockets = Int(1, config=False)
  258 + max_sockets = Int(1, config=False)
  259 + min_nodes = Int(1, config=False)
  260 + max_nodes = Int(1, config=False)
  261 + unit_type = Str("Core", config=False)
  262 + work_directory = CStr('', config=False)
  263 +
  264 + def __init__(self, config=None):
  265 + super(IPControllerTask, self).__init__(config=config)
  266 + the_uuid = uuid.uuid1()
  267 + self.std_out_file_path = os.path.join('log','ipcontroller-%s.out' % the_uuid)
  268 + self.std_err_file_path = os.path.join('log','ipcontroller-%s.err' % the_uuid)
  269 +
  270 + @property
  271 + def command_line(self):
  272 + return ' '.join(self.controller_cmd + self.controller_args)
  273 +
  274 +
  275 +class IPEngineTask(WinHPCTask):
  276 +
  277 + task_name = Str('IPEngine', config=True)
  278 + engine_cmd = List(['ipengine.exe'], config=True)
  279 + engine_args = List(['--log-to-file', '--log-level', '40'], config=True)
  280 + # I don't want these to be configurable
  281 + std_out_file_path = CStr('', config=False)
  282 + std_err_file_path = CStr('', config=False)
  283 + min_cores = Int(1, config=False)
  284 + max_cores = Int(1, config=False)
  285 + min_sockets = Int(1, config=False)
  286 + max_sockets = Int(1, config=False)
  287 + min_nodes = Int(1, config=False)
  288 + max_nodes = Int(1, config=False)
  289 + unit_type = Str("Core", config=False)
  290 + work_directory = CStr('', config=False)
  291 +
  292 + def __init__(self, config=None):
  293 + super(IPEngineTask,self).__init__(config=config)
  294 + the_uuid = uuid.uuid1()
  295 + self.std_out_file_path = os.path.join('log','ipengine-%s.out' % the_uuid)
  296 + self.std_err_file_path = os.path.join('log','ipengine-%s.err' % the_uuid)
  297 +
  298 + @property
  299 + def command_line(self):
  300 + return ' '.join(self.engine_cmd + self.engine_args)
  301 +
  302 +
  303 +# j = WinHPCJob(None)
  304 +# j.job_name = 'IPCluster'
  305 +# j.username = 'GNET\\bgranger'
  306 +# j.requested_nodes = 'GREEN'
  307 +#
  308 +# t = WinHPCTask(None)
  309 +# t.task_name = 'Controller'
  310 +# t.command_line = r"\\blue\domainusers$\bgranger\Python\Python25\Scripts\ipcontroller.exe --log-to-file -p default --log-level 10"
  311 +# t.work_directory = r"\\blue\domainusers$\bgranger\.ipython\cluster_default"
  312 +# t.std_out_file_path = 'controller-out.txt'
  313 +# t.std_err_file_path = 'controller-err.txt'
  314 +# t.environment_variables['PYTHONPATH'] = r"\\blue\domainusers$\bgranger\Python\Python25\Lib\site-packages"
  315 +# j.add_task(t)
  316 +
2  IPython/utils/pickleutil.py
@@ -93,7 +93,7 @@ def getObject(self, g=None):
93 93
94 94 def can(obj):
95 95 # import here to prevent module-level circular imports
96   - from IPython.zmq.parallel.dependency import dependent
  96 + from IPython.parallel.dependency import dependent
97 97 if isinstance(obj, dependent):
98 98 keys = ('f','df')
99 99 return CannedObject(obj, keys=keys)
4 docs/examples/newparallel/dagdeps.py
@@ -8,7 +8,7 @@
8 8 """
9 9 import networkx as nx
10 10 from random import randint, random
11   -from IPython.zmq.parallel import client as cmod
  11 +from IPython import parallel
12 12
13 13 def randomwait():
14 14 import time
@@ -87,7 +87,7 @@ def main(nodes, edges):
87 87 for node in G:
88 88 jobs[node] = randomwait
89 89
90   - client = cmod.Client()
  90 + client = parallel.Client()
91 91 view = client.load_balanced_view()
92 92 print "submitting %i tasks with %i dependencies"%(nodes,edges)
93 93 results = submit_jobs(view, G, jobs)
4 docs/examples/newparallel/demo/dependencies.py
... ... @@ -1,6 +1,4 @@
1   -from IPython.zmq.parallel import error
2   -from IPython.zmq.parallel.dependency import Dependency
3   -from IPython.zmq.parallel.client import *
  1 +from IPython.parallel import *
4 2
5 3 client = Client()
6 4
2  docs/examples/newparallel/demo/map.py
... ... @@ -1,4 +1,4 @@
1   -from IPython.zmq.parallel.client import *
  1 +from IPython.parallel import *
2 2
3 3 client = Client()
4 4 view = client[:]
4 docs/examples/newparallel/demo/throughput.py
... ... @@ -1,6 +1,6 @@
1 1 import time
2 2 import numpy as np
3   -from IPython.zmq.parallel import client as clientmod
  3 +from IPython import parallel
4 4
5 5 nlist = map(int, np.logspace(2,9,16,base=2))
6 6 nlist2 = map(int, np.logspace(2,8,15,base=2))
@@ -14,7 +14,7 @@ def echo(s=''):
14 14 return s
15 15
16 16 def time_throughput(nmessages, t=0, f=wait):
17   - client = clientmod.Client()
  17 + client = parallel.Client()
18 18 view = client[None]
19 19 # do one ping before starting timing
20 20 if f is echo:
2  docs/examples/newparallel/demo/views.py
... ... @@ -1,4 +1,4 @@
1   -from IPython.zmq.parallel.client import *
  1 +from IPython.parallel import *
2 2
3 3 client = Client()
4 4
4 docs/examples/newparallel/fetchparse.py
@@ -11,7 +11,7 @@
11 11 ipclusterz start -n 4
12 12 """
13 13 import sys
14   -from IPython.zmq.parallel import client, error
  14 +from IPython.parallel import Client, error
15 15 import time
16 16 import BeautifulSoup # this isn't necessary, but it helps throw the dependency error earlier
17 17
@@ -39,7 +39,7 @@ class DistributedSpider(object):
39 39 pollingDelay = 0.5
40 40
41 41 def __init__(self, site):
42   - self.client = client.Client()
  42 + self.client = Client()
43 43 self.view = self.client.load_balanced_view()
44 44 self.mux = self.client[:]
45 45
4 docs/examples/newparallel/helloworld.py
@@ -2,9 +2,9 @@
2 2 A Distributed Hello world
3 3 Ken Kinder <ken@kenkinder.com>
4 4 """
5   -from IPython.zmq.parallel import client
  5 +from IPython.parallel import Client
6 6
7   -rc = client.Client()
  7 +rc = Client()
8 8
9 9 def sleep_and_echo(t, msg):
10 10 import time
2  docs/examples/newparallel/interengine/communicator.py
@@ -3,7 +3,7 @@
3 3 import uuid
4 4 import zmq
5 5
6   -from IPython.zmq.parallel.util import disambiguate_url
  6 +from IPython.parallel.util import disambiguate_url
7 7
8 8 class EngineCommunicator(object):
9 9
4 docs/examples/newparallel/interengine/interengine.py
... ... @@ -1,9 +1,9 @@
1 1 import sys
2 2
3   -from IPython.zmq.parallel import client
  3 +from IPython.parallel import Client
4 4
5 5
6   -rc = client.Client()
  6 +rc = Client()
7 7 rc.block=True
8 8 view = rc[:]
9 9 view.run('communicator.py')
4 docs/examples/newparallel/mcdriver.py
@@ -7,7 +7,7 @@
7 7
8 8 import sys
9 9 import time
10   -from IPython.zmq.parallel import client
  10 +from IPython.parallel import Client
11 11 import numpy as np
12 12 from mcpricer import price_options
13 13 from matplotlib import pyplot as plt
@@ -45,7 +45,7 @@ def ask_question(text, the_type, default):
45 45
46 46 # The Client is used to setup the calculation and works with all
47 47 # engines.
48   -c = client.Client(profile=cluster_profile)
  48 +c = Client(profile=cluster_profile)
49 49
50 50 # A LoadBalancedView is an interface to the engines that provides dynamic load
51 51 # balancing at the expense of not knowing which engine will execute the code.
4 docs/examples/newparallel/parallelpi.py
@@ -16,7 +16,7 @@
16 16 of the IPython engines.
17 17 """
18 18
19   -from IPython.zmq.parallel import client
  19 +from IPython.parallel import Client
20 20 from matplotlib import pyplot as plt
21 21 import numpy as np
22 22 from pidigits import *
@@ -27,7 +27,7 @@
27 27 files = [filestring % {'i':i} for i in range(1,16)]
28 28
29 29 # Connect to the IPython cluster
30   -c = client.Client()
  30 +c = Client()
31 31 c[:].run('pidigits.py')
32 32
33 33 # the number of engines
2  docs/examples/newparallel/wave2D/communicator.py
@@ -5,7 +5,7 @@
5 5
6 6 import zmq
7 7
8   -from IPython.zmq.parallel.util import disambiguate_url
  8 +from IPython.parallel.util import disambiguate_url
9 9
10 10 class EngineCommunicator(object):
11 11 """An object that connects Engines to each other.
2  docs/examples/newparallel/wave2D/parallelwave-mpi.py