Browse files

reorganize default config files to match profiles as directories

Old files are left in place, until applications make the transition to newapp.
  • Loading branch information...
1 parent 5742cd3 commit e3bab276e977746522702da6166154940d82199b @minrk committed May 25, 2011
View
5 IPython/config/profile/README
@@ -0,0 +1,5 @@
+This is the IPython directory.
+
+For more information on configuring IPython, do:
+
+ipython config -h
View
24 IPython/config/profile/cluster/ipython_config.py
@@ -0,0 +1,24 @@
+c = get_config()
+
+# This can be used at any point in a config file to load a sub config
+# and merge it into the current one.
+load_subconfig('ipython_config.py')
+
+lines = """
+from IPython.kernel.client import *
+"""
+
+# You have to make sure that attributes that are containers already
+# exist before using them. Simple assigning a new list will override
+# all previous values.
+if hasattr(c.Global, 'exec_lines'):
+ c.Global.exec_lines.append(lines)
+else:
+ c.Global.exec_lines = [lines]
+
+# Load the parallelmagic extension to enable %result, %px, %autopx magics.
+if hasattr(c.Global, 'extensions'):
+ c.Global.extensions.append('parallelmagic')
+else:
+ c.Global.extensions = ['parallelmagic']
+
View
0 IPython/config/profile/default/__init__.py
No changes.
View
241 IPython/config/profile/default/ipcluster_config.py
@@ -0,0 +1,241 @@
+import os
+
+c = get_config()
+
+#-----------------------------------------------------------------------------
+# Select which launchers to use
+#-----------------------------------------------------------------------------
+
+# This allows you to control what method is used to start the controller
+# and engines. The following methods are currently supported:
+# - Start as a regular process on localhost.
+# - Start using mpiexec.
+# - Start using the Windows HPC Server 2008 scheduler
+# - Start using PBS/SGE
+# - Start using SSH
+
+
+# The selected launchers can be configured below.
+
+# Options are:
+# - LocalControllerLauncher
+# - MPIExecControllerLauncher
+# - PBSControllerLauncher
+# - SGEControllerLauncher
+# - WindowsHPCControllerLauncher
+# c.Global.controller_launcher = 'IPython.parallel.apps.launcher.LocalControllerLauncher'
+# c.Global.controller_launcher = 'IPython.parallel.apps.launcher.PBSControllerLauncher'
+
+# Options are:
+# - LocalEngineSetLauncher
+# - MPIExecEngineSetLauncher
+# - PBSEngineSetLauncher
+# - SGEEngineSetLauncher
+# - WindowsHPCEngineSetLauncher
+# c.Global.engine_launcher = 'IPython.parallel.apps.launcher.LocalEngineSetLauncher'
+
+#-----------------------------------------------------------------------------
+# Global configuration
+#-----------------------------------------------------------------------------
+
+# The default number of engines that will be started. This is overridden by
+# the -n command line option: "ipcluster start -n 4"
+# c.Global.n = 2
+
+# Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
+# c.Global.log_to_file = False
+
+# Remove old logs from cluster_dir/log before starting.
+# c.Global.clean_logs = True
+
+# The working directory for the process. The application will use os.chdir
+# to change to this directory before starting.
+# c.Global.work_dir = os.getcwd()
+
+
+#-----------------------------------------------------------------------------
+# Local process launchers
+#-----------------------------------------------------------------------------
+
+# The command line arguments to call the controller with.
+# c.LocalControllerLauncher.controller_args = \
+# ['--log-to-file','--log-level', '40']
+
+# The working directory for the controller
+# c.LocalEngineSetLauncher.work_dir = u''
+
+# Command line argument passed to the engines.
+# c.LocalEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40']
+
+#-----------------------------------------------------------------------------
+# MPIExec launchers
+#-----------------------------------------------------------------------------
+
+# The mpiexec/mpirun command to use in both the controller and engines.
+# c.MPIExecLauncher.mpi_cmd = ['mpiexec']
+
+# Additional arguments to pass to the actual mpiexec command.
+# c.MPIExecLauncher.mpi_args = []
+
+# The mpiexec/mpirun command and args can be overridden if they should be different
+# for controller and engines.
+# c.MPIExecControllerLauncher.mpi_cmd = ['mpiexec']
+# c.MPIExecControllerLauncher.mpi_args = []
+# c.MPIExecEngineSetLauncher.mpi_cmd = ['mpiexec']
+# c.MPIExecEngineSetLauncher.mpi_args = []
+
+# The command line argument to call the controller with.
+# c.MPIExecControllerLauncher.controller_args = \
+# ['--log-to-file','--log-level', '40']
+
+# Command line argument passed to the engines.
+# c.MPIExecEngineSetLauncher.engine_args = ['--log-to-file','--log-level', '40']
+
+# The default number of engines to start if not given elsewhere.
+# c.MPIExecEngineSetLauncher.n = 1
+
+#-----------------------------------------------------------------------------
+# SSH launchers
+#-----------------------------------------------------------------------------
+
+# ipclusterz can be used to launch controller and engines remotely via ssh.
+# Note that currently ipclusterz does not do any file distribution, so if
+# machines are not on a shared filesystem, config and json files must be
+# distributed. For this reason, the reuse_files defaults to True on an
+# ssh-launched Controller. This flag can be overridded by the program_args
+# attribute of c.SSHControllerLauncher.
+
+# set the ssh cmd for launching remote commands. The default is ['ssh']
+# c.SSHLauncher.ssh_cmd = ['ssh']
+
+# set the ssh cmd for launching remote commands. The default is ['ssh']
+# c.SSHLauncher.ssh_args = ['tt']
+
+# Set the user and hostname for the controller
+# c.SSHControllerLauncher.hostname = 'controller.example.com'
+# c.SSHControllerLauncher.user = os.environ.get('USER','username')
+
+# Set the arguments to be passed to ipcontrollerz
+# note that remotely launched ipcontrollerz will not get the contents of
+# the local ipcontrollerz_config.py unless it resides on the *remote host*
+# in the location specified by the --cluster_dir argument.
+# c.SSHControllerLauncher.program_args = ['-r', '-ip', '0.0.0.0', '--cluster_dir', '/path/to/cd']
+
+# Set the default args passed to ipenginez for SSH launched engines
+# c.SSHEngineSetLauncher.engine_args = ['--mpi', 'mpi4py']
+
+# SSH engines are launched as a dict of locations/n-engines.
+# if a value is a tuple instead of an int, it is assumed to be of the form
+# (n, [args]), setting the arguments to passed to ipenginez on `host`.
+# otherwise, c.SSHEngineSetLauncher.engine_args will be used as the default.
+
+# In this case, there will be 3 engines at my.example.com, and
+# 2 at you@ipython.scipy.org with a special json connector location.
+# c.SSHEngineSetLauncher.engines = {'my.example.com' : 3,
+# 'you@ipython.scipy.org' : (2, ['-f', '/path/to/ipcontroller-engine.json']}
+# }
+
+#-----------------------------------------------------------------------------
+# Unix batch (PBS) schedulers launchers
+#-----------------------------------------------------------------------------
+
+# SGE and PBS are very similar. All configurables in this section called 'PBS*'
+# also exist as 'SGE*'.
+
+# The command line program to use to submit a PBS job.
+# c.PBSLauncher.submit_command = ['qsub']
+
+# The command line program to use to delete a PBS job.
+# c.PBSLauncher.delete_command = ['qdel']
+
+# The PBS queue in which the job should run
+# c.PBSLauncher.queue = 'myqueue'
+
+# A regular expression that takes the output of qsub and find the job id.
+# c.PBSLauncher.job_id_regexp = r'\d+'
+
+# If for some reason the Controller and Engines have different options above, they
+# can be set as c.PBSControllerLauncher.<option> etc.
+
+# PBS and SGE have default templates, but you can specify your own, either as strings
+# or from files, as described here:
+
+# The batch submission script used to start the controller. This is where
+# environment variables would be setup, etc. This string is interpreted using
+# the Itpl module in IPython.external. Basically, you can use ${n} for the
+# number of engine and ${cluster_dir} for the cluster_dir.
+# c.PBSControllerLauncher.batch_template = """
+# #PBS -N ipcontroller
+# #PBS -q $queue
+#
+# ipcontrollerz --cluster-dir $cluster_dir
+# """
+
+# You can also load this template from a file
+# c.PBSControllerLauncher.batch_template_file = u"/path/to/my/template.sh"
+
+# The name of the instantiated batch script that will actually be used to
+# submit the job. This will be written to the cluster directory.
+# c.PBSControllerLauncher.batch_file_name = u'pbs_controller'
+
+# The batch submission script used to start the engines. This is where
+# environment variables would be setup, etc. This string is interpreted using
+# the Itpl module in IPython.external. Basically, you can use ${n} for the
+# number of engine and ${cluster_dir} for the cluster_dir.
+# c.PBSEngineSetLauncher.batch_template = """
+# #PBS -N ipcontroller
+# #PBS -l nprocs=$n
+#
+# ipenginez --cluster-dir $cluster_dir$s
+# """
+
+# You can also load this template from a file
+# c.PBSControllerLauncher.batch_template_file = u"/path/to/my/template.sh"
+
+# The name of the instantiated batch script that will actually be used to
+# submit the job. This will be written to the cluster directory.
+# c.PBSEngineSetLauncher.batch_file_name = u'pbs_engines'
+
+
+
+#-----------------------------------------------------------------------------
+# Windows HPC Server 2008 launcher configuration
+#-----------------------------------------------------------------------------
+
+# c.IPControllerJob.job_name = 'IPController'
+# c.IPControllerJob.is_exclusive = False
+# c.IPControllerJob.username = r'USERDOMAIN\USERNAME'
+# c.IPControllerJob.priority = 'Highest'
+# c.IPControllerJob.requested_nodes = ''
+# c.IPControllerJob.project = 'MyProject'
+
+# c.IPControllerTask.task_name = 'IPController'
+# c.IPControllerTask.controller_cmd = [u'ipcontroller.exe']
+# c.IPControllerTask.controller_args = ['--log-to-file', '--log-level', '40']
+# c.IPControllerTask.environment_variables = {}
+
+# c.WindowsHPCControllerLauncher.scheduler = 'HEADNODE'
+# c.WindowsHPCControllerLauncher.job_file_name = u'ipcontroller_job.xml'
+
+
+# c.IPEngineSetJob.job_name = 'IPEngineSet'
+# c.IPEngineSetJob.is_exclusive = False
+# c.IPEngineSetJob.username = r'USERDOMAIN\USERNAME'
+# c.IPEngineSetJob.priority = 'Highest'
+# c.IPEngineSetJob.requested_nodes = ''
+# c.IPEngineSetJob.project = 'MyProject'
+
+# c.IPEngineTask.task_name = 'IPEngine'
+# c.IPEngineTask.engine_cmd = [u'ipengine.exe']
+# c.IPEngineTask.engine_args = ['--log-to-file', '--log-level', '40']
+# c.IPEngineTask.environment_variables = {}
+
+# c.WindowsHPCEngineSetLauncher.scheduler = 'HEADNODE'
+# c.WindowsHPCEngineSetLauncher.job_file_name = u'ipengineset_job.xml'
+
+
+
+
+
+
+
View
180 IPython/config/profile/default/ipcontroller_config.py
@@ -0,0 +1,180 @@
+from IPython.config.loader import Config
+
+c = get_config()
+
+#-----------------------------------------------------------------------------
+# Global configuration
+#-----------------------------------------------------------------------------
+
+# Basic Global config attributes
+
+# Start up messages are logged to stdout using the logging module.
+# These all happen before the twisted reactor is started and are
+# useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL)
+# and smaller is more verbose.
+# c.Global.log_level = 20
+
+# Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
+# c.Global.log_to_file = False
+
+# Remove old logs from cluster_dir/log before starting.
+# c.Global.clean_logs = True
+
+# A list of Python statements that will be run before starting the
+# controller. This is provided because occasionally certain things need to
+# be imported in the controller for pickling to work.
+# c.Global.import_statements = ['import math']
+
+# Reuse the controller's JSON files. If False, JSON files are regenerated
+# each time the controller is run. If True, they will be reused, *but*, you
+# also must set the network ports by hand. If set, this will override the
+# values set for the client and engine connections below.
+# c.Global.reuse_files = True
+
+# Enable exec_key authentication on all messages. Default is True
+# c.Global.secure = True
+
+# The working directory for the process. The application will use os.chdir
+# to change to this directory before starting.
+# c.Global.work_dir = os.getcwd()
+
+# The log url for logging to an `iploggerz` application. This will override
+# log-to-file.
+# c.Global.log_url = 'tcp://127.0.0.1:20202'
+
+# The specific external IP that is used to disambiguate multi-interface URLs.
+# The default behavior is to guess from external IPs gleaned from `socket`.
+# c.Global.location = '192.168.1.123'
+
+# The ssh server remote clients should use to connect to this controller.
+# It must be a machine that can see the interface specified in client_ip.
+# The default for client_ip is localhost, in which case the sshserver must
+# be an external IP of the controller machine.
+# c.Global.sshserver = 'controller.example.com'
+
+# the url to use for registration. If set, this overrides engine-ip,
+# engine-transport client-ip,client-transport, and regport.
+# c.RegistrationFactory.url = 'tcp://*:12345'
+
+# the port to use for registration. Clients and Engines both use this
+# port for registration.
+# c.RegistrationFactory.regport = 10101
+
+#-----------------------------------------------------------------------------
+# Configure the Task Scheduler
+#-----------------------------------------------------------------------------
+
+# The routing scheme. 'pure' will use the pure-ZMQ scheduler. Any other
+# value will use a Python scheduler with various routing schemes.
+# python schemes are: lru, weighted, random, twobin. Default is 'weighted'.
+# Note that the pure ZMQ scheduler does not support many features, such as
+# dying engines, dependencies, or engine-subset load-balancing.
+# c.ControllerFactory.scheme = 'pure'
+
+# The Python scheduler can limit the number of outstanding tasks per engine
+# by using an HWM option. This allows engines with long-running tasks
+# to not steal too many tasks from other engines. The default is 0, which
+# means agressively distribute messages, never waiting for them to finish.
+# c.TaskScheduler.hwm = 0
+
+# Whether to use Threads or Processes to start the Schedulers. Threads will
+# use less resources, but potentially reduce throughput. Default is to
+# use processes. Note that the a Python scheduler will always be in a Process.
+# c.ControllerFactory.usethreads
+
+#-----------------------------------------------------------------------------
+# Configure the Hub
+#-----------------------------------------------------------------------------
+
+# Which class to use for the db backend. Currently supported are DictDB (the
+# default), and MongoDB. Uncomment this line to enable MongoDB, which will
+# slow-down the Hub's responsiveness, but also reduce its memory footprint.
+# c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
+
+# The heartbeat ping frequency. This is the frequency (in ms) at which the
+# Hub pings engines for heartbeats. This determines how quickly the Hub
+# will react to engines coming and going. A lower number means faster response
+# time, but more network activity. The default is 100ms
+# c.HubFactory.ping = 100
+
+# HubFactory queue port pairs, to set by name: mux, iopub, control, task. Set
+# each as a tuple of length 2 of ints. The default is to find random
+# available ports
+# c.HubFactory.mux = (10102,10112)
+
+#-----------------------------------------------------------------------------
+# Configure the client connections
+#-----------------------------------------------------------------------------
+
+# Basic client connection config attributes
+
+# The network interface the controller will listen on for client connections.
+# This should be an IP address or interface on the controller. An asterisk
+# means listen on all interfaces. The transport can be any transport
+# supported by zeromq (tcp,epgm,pgm,ib,ipc):
+# c.HubFactory.client_ip = '*'
+# c.HubFactory.client_transport = 'tcp'
+
+# individual client ports to configure by name: query_port, notifier_port
+# c.HubFactory.query_port = 12345
+
+#-----------------------------------------------------------------------------
+# Configure the engine connections
+#-----------------------------------------------------------------------------
+
+# Basic config attributes for the engine connections.
+
+# The network interface the controller will listen on for engine connections.
+# This should be an IP address or interface on the controller. An asterisk
+# means listen on all interfaces. The transport can be any transport
+# supported by zeromq (tcp,epgm,pgm,ib,ipc):
+# c.HubFactory.engine_ip = '*'
+# c.HubFactory.engine_transport = 'tcp'
+
+# set the engine heartbeat ports to use:
+# c.HubFactory.hb = (10303,10313)
+
+#-----------------------------------------------------------------------------
+# Configure the TaskRecord database backend
+#-----------------------------------------------------------------------------
+
+# For memory/persistance reasons, tasks can be stored out-of-memory in a database.
+# Currently, only sqlite and mongodb are supported as backends, but the interface
+# is fairly simple, so advanced developers could write their own backend.
+
+# ----- in-memory configuration --------
+# this line restores the default behavior: in-memory storage of all results.
+# c.HubFactory.db_class = 'IPython.parallel.controller.dictdb.DictDB'
+
+# ----- sqlite configuration --------
+# use this line to activate sqlite:
+# c.HubFactory.db_class = 'IPython.parallel.controller.sqlitedb.SQLiteDB'
+
+# You can specify the name of the db-file. By default, this will be located
+# in the active cluster_dir, e.g. ~/.ipython/clusterz_default/tasks.db
+# c.SQLiteDB.filename = 'tasks.db'
+
+# You can also specify the location of the db-file, if you want it to be somewhere
+# other than the cluster_dir.
+# c.SQLiteDB.location = '/scratch/'
+
+# This will specify the name of the table for the controller to use. The default
+# behavior is to use the session ID of the SessionFactory object (a uuid). Overriding
+# this will result in results persisting for multiple sessions.
+# c.SQLiteDB.table = 'results'
+
+# ----- mongodb configuration --------
+# use this line to activate mongodb:
+# c.HubFactory.db_class = 'IPython.parallel.controller.mongodb.MongoDB'
+
+# You can specify the args and kwargs pymongo will use when creating the Connection.
+# For more information on what these options might be, see pymongo documentation.
+# c.MongoDB.connection_kwargs = {}
+# c.MongoDB.connection_args = []
+
+# This will specify the name of the mongo database for the controller to use. The default
+# behavior is to use the session ID of the SessionFactory object (a uuid). Overriding
+# this will result in task results persisting through multiple sessions.
+# c.MongoDB.database = 'ipythondb'
+
+
View
85 IPython/config/profile/default/ipengine_config.py
@@ -0,0 +1,85 @@
+c = get_config()
+
+#-----------------------------------------------------------------------------
+# Global configuration
+#-----------------------------------------------------------------------------
+
+# Start up messages are logged to stdout using the logging module.
+# These all happen before the twisted reactor is started and are
+# useful for debugging purposes. Can be (10=DEBUG,20=INFO,30=WARN,40=CRITICAL)
+# and smaller is more verbose.
+# c.Global.log_level = 20
+
+# Log to a file in cluster_dir/log, otherwise just log to sys.stdout.
+# c.Global.log_to_file = False
+
+# Remove old logs from cluster_dir/log before starting.
+# c.Global.clean_logs = True
+
+# A list of strings that will be executed in the users namespace on the engine
+# before it connects to the controller.
+# c.Global.exec_lines = ['import numpy']
+
+# The engine will try to connect to the controller multiple times, to allow
+# the controller time to startup and write its FURL file. These parameters
+# control the number of retries (connect_max_tries) and the initial delay
+# (connect_delay) between attemps. The actual delay between attempts gets
+# longer each time by a factor of 1.5 (delay[i] = 1.5*delay[i-1])
+# those attemps.
+# c.Global.connect_delay = 0.1
+# c.Global.connect_max_tries = 15
+
+# By default, the engine will look for the controller's JSON file in its own
+# cluster directory. Sometimes, the JSON file will be elsewhere and this
+# attribute can be set to the full path of the JSON file.
+# c.Global.url_file = u'/path/to/my/ipcontroller-engine.json'
+
+# The working directory for the process. The application will use os.chdir
+# to change to this directory before starting.
+# c.Global.work_dir = os.getcwd()
+
+#-----------------------------------------------------------------------------
+# MPI configuration
+#-----------------------------------------------------------------------------
+
+# Upon starting the engine can be configured to call MPI_Init. This section
+# configures that.
+
+# Select which MPI section to execute to setup MPI. The value of this
+# attribute must match the name of another attribute in the MPI config
+# section (mpi4py, pytrilinos, etc.). This can also be set by the --mpi
+# command line option.
+# c.MPI.use = ''
+
+# Initialize MPI using mpi4py. To use this, set c.MPI.use = 'mpi4py' to use
+# --mpi=mpi4py at the command line.
+# c.MPI.mpi4py = """from mpi4py import MPI as mpi
+# mpi.size = mpi.COMM_WORLD.Get_size()
+# mpi.rank = mpi.COMM_WORLD.Get_rank()
+# """
+
+# Initialize MPI using pytrilinos. To use this, set c.MPI.use = 'pytrilinos'
+# to use --mpi=pytrilinos at the command line.
+# c.MPI.pytrilinos = """from PyTrilinos import Epetra
+# class SimpleStruct:
+# pass
+# mpi = SimpleStruct()
+# mpi.rank = 0
+# mpi.size = 0
+# """
+
+#-----------------------------------------------------------------------------
+# Developer level configuration attributes
+#-----------------------------------------------------------------------------
+
+# You shouldn't have to modify anything in this section. These attributes
+# are more for developers who want to change the behavior of the controller
+# at a fundamental level.
+
+# You should not have to change these attributes.
+
+# c.Global.url_file_name = u'ipcontroller-engine.furl'
+
+
+
+
View
165 IPython/config/profile/default/ipython_config.py
@@ -0,0 +1,165 @@
+# Get the config being loaded so we can set attributes on it
+c = get_config()
+
+#-----------------------------------------------------------------------------
+# Global options
+#-----------------------------------------------------------------------------
+
+# c.Global.display_banner = True
+
+# c.Global.classic = False
+
+# c.Global.nosep = True
+
+# If you still use multiple versions of IPytho on the same machine,
+# set this to True to suppress warnings about old configuration files
+# c.Global.ignore_old_config = False
+
+# Set this to determine the detail of what is logged at startup.
+# The default is 30 and possible values are 0,10,20,30,40,50.
+# c.Global.log_level = 20
+
+# This should be a list of importable Python modules that have an
+# load_ipython_extension(ip) method. This method gets called when the extension
+# is loaded. You can put your extensions anywhere they can be imported
+# but we add the extensions subdir of the ipython directory to sys.path
+# during extension loading, so you can put them there as well.
+# c.Global.extensions = [
+# 'myextension'
+# ]
+
+# These lines are run in IPython in the user's namespace after extensions
+# are loaded. They can contain full IPython syntax with magics etc.
+# c.Global.exec_lines = [
+# 'import numpy',
+# 'a = 10; b = 20',
+# '1/0'
+# ]
+
+# These files are run in IPython in the user's namespace. Files with a .py
+# extension need to be pure Python. Files with a .ipy extension can have
+# custom IPython syntax (like magics, etc.).
+# These files need to be in the cwd, the ipython_dir or be absolute paths.
+# c.Global.exec_files = [
+# 'mycode.py',
+# 'fancy.ipy'
+# ]
+
+#-----------------------------------------------------------------------------
+# InteractiveShell options
+#-----------------------------------------------------------------------------
+
+# c.InteractiveShell.autocall = 1
+
+# c.TerminalInteractiveShell.autoedit_syntax = False
+
+# c.InteractiveShell.autoindent = True
+
+# c.InteractiveShell.automagic = False
+
+# c.TerminalTerminalInteractiveShell.banner1 = 'This if for overriding the default IPython banner'
+
+# c.TerminalTerminalInteractiveShell.banner2 = "This is for extra banner text"
+
+# c.InteractiveShell.cache_size = 1000
+
+# c.InteractiveShell.colors = 'LightBG'
+
+# c.InteractiveShell.color_info = True
+
+# c.TerminalInteractiveShell.confirm_exit = True
+
+# c.InteractiveShell.deep_reload = False
+
+# c.TerminalInteractiveShell.editor = 'nano'
+
+# c.InteractiveShell.logstart = True
+
+# c.InteractiveShell.logfile = u'ipython_log.py'
+
+# c.InteractiveShell.logappend = u'mylog.py'
+
+# c.InteractiveShell.object_info_string_level = 0
+
+# c.TerminalInteractiveShell.pager = 'less'
+
+# c.InteractiveShell.pdb = False
+
+# c.InteractiveShell.prompt_in1 = 'In [\#]: '
+# c.InteractiveShell.prompt_in2 = ' .\D.: '
+# c.InteractiveShell.prompt_out = 'Out[\#]: '
+# c.InteractiveShell.prompts_pad_left = True
+
+# c.InteractiveShell.quiet = False
+
+# c.InteractiveShell.history_length = 10000
+
+# Readline
+# c.InteractiveShell.readline_use = True
+
+# be careful with meta-key ('\M-<x>') bindings, because
+# they conflict with 8-bit encodings (e.g. UTF8)
+
+# c.InteractiveShell.readline_parse_and_bind = [
+# 'tab: complete',
+# '"\C-l": possible-completions',
+# 'set show-all-if-ambiguous on',
+# '"\C-o": tab-insert',
+# '"\C-r": reverse-search-history',
+# '"\C-s": forward-search-history',
+# '"\C-p": history-search-backward',
+# '"\C-n": history-search-forward',
+# '"\e[A": history-search-backward',
+# '"\e[B": history-search-forward',
+# '"\C-k": kill-line',
+# '"\C-u": unix-line-discard',
+# ]
+# c.InteractiveShell.readline_remove_delims = '-/~'
+# c.InteractiveShell.readline_merge_completions = True
+# c.InteractiveShell.readline_omit__names = 0
+
+# c.TerminalInteractiveShell.screen_length = 0
+
+# c.InteractiveShell.separate_in = '\n'
+# c.InteractiveShell.separate_out = ''
+# c.InteractiveShell.separate_out2 = ''
+
+# c.TerminalInteractiveShell.term_title = False
+
+# c.InteractiveShell.wildcards_case_sensitive = True
+
+# c.InteractiveShell.xmode = 'Context'
+
+#-----------------------------------------------------------------------------
+# Formatter and display options
+#-----------------------------------------------------------------------------
+
+# c.PlainTextFormatter.pprint = True
+
+#-----------------------------------------------------------------------------
+# PrefilterManager options
+#-----------------------------------------------------------------------------
+
+# c.PrefilterManager.multi_line_specials = True
+
+#-----------------------------------------------------------------------------
+# AliasManager options
+#-----------------------------------------------------------------------------
+
+# Do this to disable all defaults
+# c.AliasManager.default_aliases = []
+
+# c.AliasManager.user_aliases = [
+# ('foo', 'echo Hi')
+# ]
+
+#-----------------------------------------------------------------------------
+# HistoryManager options
+#-----------------------------------------------------------------------------
+
+# Enable logging output as well as input to the database.
+# c.HistoryManager.db_log_output = False
+
+# Only write to the database every n commands - this can save disk
+# access (and hence power) over the default of writing on every command.
+# c.HistoryManager.db_cache_size = 0
View
19 IPython/config/profile/math/ipython_config.py
@@ -0,0 +1,19 @@
+c = get_config()
+
+# This can be used at any point in a config file to load a sub config
+# and merge it into the current one.
+load_subconfig('ipython_config.py')
+
+lines = """
+import cmath
+from math import *
+"""
+
+# You have to make sure that attributes that are containers already
+# exist before using them. Simple assigning a new list will override
+# all previous values.
+if hasattr(c.Global, 'exec_lines'):
+ c.Global.exec_lines.append(lines)
+else:
+ c.Global.exec_lines = [lines]
+
View
22 IPython/config/profile/pylab/ipython_config.py
@@ -0,0 +1,22 @@
+c = get_config()
+
+# This can be used at any point in a config file to load a sub config
+# and merge it into the current one.
+load_subconfig('ipython_config.py')
+
+lines = """
+import matplotlib
+%gui -a wx
+matplotlib.use('wxagg')
+matplotlib.interactive(True)
+from matplotlib import pyplot as plt
+from matplotlib.pyplot import *
+"""
+
+# You have to make sure that attributes that are containers already
+# exist before using them. Simple assigning a new list will override
+# all previous values.
+if hasattr(c.Global, 'exec_lines'):
+ c.Global.exec_lines.append(lines)
+else:
+ c.Global.exec_lines = [lines]
View
29 IPython/config/profile/pysh/ipython_config.py
@@ -0,0 +1,29 @@
+c = get_config()
+
+# This can be used at any point in a config file to load a sub config
+# and merge it into the current one.
+load_subconfig('ipython_config.py')
+
+c.InteractiveShell.prompt_in1 = '\C_LightGreen\u@\h\C_LightBlue[\C_LightCyan\Y1\C_LightBlue]\C_Green|\#> '
+c.InteractiveShell.prompt_in2 = '\C_Green|\C_LightGreen\D\C_Green> '
+c.InteractiveShell.prompt_out = '<\#> '
+
+c.InteractiveShell.prompts_pad_left = True
+
+c.InteractiveShell.separate_in = ''
+c.InteractiveShell.separate_out = ''
+c.InteractiveShell.separate_out2 = ''
+
+c.PrefilterManager.multi_line_specials = True
+
+lines = """
+%rehashx
+"""
+
+# You have to make sure that attributes that are containers already
+# exist before using them. Simple assigning a new list will override
+# all previous values.
+if hasattr(c.Global, 'exec_lines'):
+ c.Global.exec_lines.append(lines)
+else:
+ c.Global.exec_lines = [lines]
View
29 IPython/config/profile/sympy/ipython_config.py
@@ -0,0 +1,29 @@
+c = get_config()
+
+# This can be used at any point in a config file to load a sub config
+# and merge it into the current one.
+load_subconfig('ipython_config.py')
+
+lines = """
+from __future__ import division
+from sympy import *
+x, y, z = symbols('xyz')
+k, m, n = symbols('kmn', integer=True)
+f, g, h = map(Function, 'fgh')
+"""
+
+# You have to make sure that attributes that are containers already
+# exist before using them. Simple assigning a new list will override
+# all previous values.
+
+if hasattr(c.Global, 'exec_lines'):
+ c.Global.exec_lines.append(lines)
+else:
+ c.Global.exec_lines = [lines]
+
+# Load the sympy_printing extension to enable nice printing of sympy expr's.
+if hasattr(c.Global, 'extensions'):
+ c.Global.extensions.append('sympy_printing')
+else:
+ c.Global.extensions = ['sympy_printing']
+
View
4 setupbase.py
@@ -150,8 +150,8 @@ def find_package_data():
# This is not enough for these things to appear in an sdist.
# We need to muck with the MANIFEST to get this to work
package_data = {
- 'IPython.config.userconfig' : ['*'],
- 'IPython.testing' : ['*.txt']
+ 'IPython.config.profile' : ['README', '*/*.py'],
+ 'IPython.testing' : ['*.txt'],
}
return package_data

0 comments on commit e3bab27

Please sign in to comment.