diff --git a/git/db/cmd/base.py b/git/db/cmd/base.py
index 5fa82dc77..63b029bd7 100644
--- a/git/db/cmd/base.py
+++ b/git/db/cmd/base.py
@@ -2,7 +2,7 @@
 :note: we could add all implementations of the basic interfaces, its more efficient though
  	to obtain them from the pure implementation"""
 from git.exc import (
-					GitCommandError, 
+					GitCommandError,
 					BadObject
 					)
 
@@ -12,7 +12,7 @@
 							)
 
 from git.util import (
-							bin_to_hex, 
+							bin_to_hex,
 							hex_to_bin,
 							isfile,
 							join_path,
@@ -31,7 +31,7 @@
 from git.refs import (
 						Reference,
 						RemoteReference,
-						SymbolicReference, 
+						SymbolicReference,
 						TagReference
 					)
 from git.objects.commit import Commit
@@ -41,7 +41,7 @@
 import sys
 
 
-__all__ = ('CmdTransportMixin', 'GitCommandMixin', 'CmdPushInfo', 'CmdFetchInfo', 
+__all__ = ('CmdTransportMixin', 'GitCommandMixin', 'CmdPushInfo', 'CmdFetchInfo',
 			'CmdRemoteProgress', 'CmdObjectDBRMixin', 'CmdHighLevelRepository')
 
 
@@ -50,13 +50,13 @@
 def touch(filename):
 	fp = open(filename, "a")
 	fp.close()
-	
-	
+
+
 def digest_process_messages(fh, progress):
 	"""Read progress messages from file-like object fh, supplying the respective
 	progress messages to the progress instance.
-	
-	:return: list(line, ...) list of lines without linebreaks that did 
+
+	:return: list(line, ...) list of lines without linebreaks that did
 		not contain progress information"""
 	line_so_far = ''
 	dropped_lines = list()
@@ -64,7 +64,7 @@ def digest_process_messages(fh, progress):
 		char = fh.read(1)
 		if not char:
 			break
-		
+
 		if char in ('\r', '\n'):
 			dropped_lines.extend(progress._parse_progress_line(line_so_far))
 			line_so_far = ''
@@ -73,7 +73,7 @@ def digest_process_messages(fh, progress):
 		# END process parsed line
 	# END while file is not done reading
 	return dropped_lines
-	
+
 def finalize_process(proc):
 	"""Wait for the process (fetch, pull or push) and handle its errors accordingly"""
 	try:
@@ -85,13 +85,13 @@ def finalize_process(proc):
 			raise
 		pass
 	# END exception handling
-	
+
 
 def get_fetch_info_from_stderr(repo, proc, progress):
 	# skip first line as it is some remote info we are not interested in
 	output = IterableList('name')
-	
-	
+
+
 	# lines which are no progress are fetch info lines
 	# this also waits for the command to finish
 	# Skip some progress lines that don't provide relevant information
@@ -107,17 +107,17 @@ def get_fetch_info_from_stderr(repo, proc, progress):
 		# END handle special messages
 		fetch_info_lines.append(line)
 	# END for each line
-	
-	# read head information 
+
+	# read head information
 	fp = open(join(repo.git_dir, 'FETCH_HEAD'),'r')
 	fetch_head_info = fp.readlines()
 	fp.close()
-	
+
 	assert len(fetch_info_lines) == len(fetch_head_info)
-	
-	output.extend(CmdFetchInfo._from_line(repo, err_line, fetch_line) 
+
+	output.extend(CmdFetchInfo._from_line(repo, err_line, fetch_line)
 					for err_line,fetch_line in zip(fetch_info_lines, fetch_head_info))
-	
+
 	finalize_process(proc)
 	return output
 
@@ -127,7 +127,7 @@ def get_push_info(repo, remotename_or_url, proc, progress):
 	# read the lines manually as it will use carriage returns between the messages
 	# to override the previous one. This is why we read the bytes manually
 	digest_process_messages(proc.stderr, progress)
-	
+
 	output = IterableList('name')
 	for line in proc.stdout.readlines():
 		try:
@@ -135,15 +135,15 @@ def get_push_info(repo, remotename_or_url, proc, progress):
 		except ValueError:
 			# if an error happens, additional info is given which we cannot parse
 			pass
-		# END exception handling 
+		# END exception handling
 	# END for each line
-	
+
 	finalize_process(proc)
 	return output
 
 def add_progress(kwargs, git, progress):
-	"""Add the --progress flag to the given kwargs dict if supported by the 
-	git command. If the actual progress in the given progress instance is not 
+	"""Add the --progress flag to the given kwargs dict if supported by the
+	git command. If the actual progress in the given progress instance is not
 	given, we do not request any progress
 	:return: possibly altered kwargs"""
 	if progress._progress is not None:
@@ -158,62 +158,62 @@ def add_progress(kwargs, git, progress):
 
 class CmdRemoteProgress(RemoteProgress):
 	"""
-	A Remote progress implementation taking a user derived progress to call the 
+	A Remote progress implementation taking a user derived progress to call the
 	respective methods on.
 	"""
 	__slots__ = ("_seen_ops", '_progress')
 	re_op_absolute = re.compile("(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
 	re_op_relative = re.compile("(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
-	
+
 	def __init__(self, progress_instance = None):
 		self._seen_ops = list()
 		if progress_instance is None:
 			progress_instance = RemoteProgress()
 		#END assure proper instance
 		self._progress = progress_instance
-	
+
 	def _parse_progress_line(self, line):
 		"""Parse progress information from the given line as retrieved by git-push
 		or git-fetch
-		
-		Call the own update(), __call__() and line_dropped() methods according 
+
+		Call the own update(), __call__() and line_dropped() methods according
 		to the parsed result.
-		
+
 		:return: list(line, ...) list of lines that could not be processed"""
 		# handle
-		# Counting objects: 4, done. 
+		# Counting objects: 4, done.
 		# Compressing objects:	50% (1/2)	\rCompressing objects: 100% (2/2)	\rCompressing objects: 100% (2/2), done.
 		sub_lines = line.split('\r')
 		failed_lines = list()
 		for sline in sub_lines:
-			# find esacpe characters and cut them away - regex will not work with 
+			# find esacpe characters and cut them away - regex will not work with
 			# them as they are non-ascii. As git might expect a tty, it will send them
 			last_valid_index = None
 			for i,c in enumerate(reversed(sline)):
 				if ord(c) < 32:
 					# its a slice index
-					last_valid_index = -i-1 
+					last_valid_index = -i-1
 				# END character was non-ascii
 			# END for each character in sline
 			if last_valid_index is not None:
 				sline = sline[:last_valid_index]
 			# END cut away invalid part
 			sline = sline.rstrip()
-			
+
 			cur_count, max_count = None, None
 			match = self.re_op_relative.match(sline)
 			if match is None:
 				match = self.re_op_absolute.match(sline)
-				
+
 			if not match:
 				self._progress.line_dropped(sline)
 				failed_lines.append(sline)
 				continue
 			# END could not get match
-			
+
 			op_code = 0
 			remote, op_name, percent, cur_count, max_count, message = match.groups()
-			
+
 			# get operation id
 			if op_name == "Counting objects":
 				op_code |= self.COUNTING
@@ -227,7 +227,7 @@ def _parse_progress_line(self, line):
 				op_code |= self.RESOLVING
 			else:
 				# Note: On windows it can happen that partial lines are sent
-				# Hence we get something like "CompreReceiving objects", which is 
+				# Hence we get something like "CompreReceiving objects", which is
 				# a blend of "Compressing objects" and "Receiving objects".
 				# This can't really be prevented, so we drop the line verbosely
 				# to make sure we get informed in case the process spits out new
@@ -238,24 +238,24 @@ def _parse_progress_line(self, line):
 				# drop it
 				return failed_lines
 			#END handle opcode
-			
+
 			# figure out stage
 			if op_code not in self._seen_ops:
 				self._seen_ops.append(op_code)
 				op_code |= self.BEGIN
 			# END begin opcode
-			
+
 			if message is None:
 				message = ''
 			# END message handling
-			
+
 			message = message.strip()
 			done_token = ', done.'
 			if message.endswith(done_token):
 				op_code |= self.END
 				message = message[:-len(done_token)]
 			# END end message handling
-			
+
 			self._progress.update(op_code, cur_count, max_count, message, line)
 			self._progress(message, line)
 		# END for each sub line
@@ -266,17 +266,17 @@ class CmdPushInfo(PushInfo):
 	"""
 	Pure Python implementation of a PushInfo interface
 	"""
-	__slots__ = ('local_ref', 'remote_ref_string', 'flags', 'old_commit_binsha', 
+	__slots__ = ('local_ref', 'remote_ref_string', 'flags', 'old_commit_binsha',
 				'_remotename_or_url', 'repo', 'summary')
-	
-	_flag_map = {	'X' : PushInfo.NO_MATCH, 
+
+	_flag_map = {	'X' : PushInfo.NO_MATCH,
 					'-' : PushInfo.DELETED, '*' : 0,
-					'+' : PushInfo.FORCED_UPDATE, 
-					' ' : PushInfo.FAST_FORWARD, 
-					'=' : PushInfo.UP_TO_DATE, 
+					'+' : PushInfo.FORCED_UPDATE,
+					' ' : PushInfo.FAST_FORWARD,
+					'=' : PushInfo.UP_TO_DATE,
 					'!' : PushInfo.ERROR }
-	
-	def __init__(self, flags, local_ref, remote_ref_string, repo, remotename_or_url, old_commit_binsha=None, 
+
+	def __init__(self, flags, local_ref, remote_ref_string, repo, remotename_or_url, old_commit_binsha=None,
 					summary=''):
 		""" Initialize a new instance """
 		self.flags = flags
@@ -286,12 +286,12 @@ def __init__(self, flags, local_ref, remote_ref_string, repo, remotename_or_url,
 		self._remotename_or_url = remotename_or_url
 		self.old_commit_binsha = old_commit_binsha
 		self.summary = summary
-		
+
 	@property
 	def remote_ref(self):
 		"""
 		:return:
-			Remote Reference or TagReference in the local repository corresponding 
+			Remote Reference or TagReference in the local repository corresponding
 			to the remote_ref_string kept in this instance."""
 		# translate heads to a local remote, tags stay as they are
 		if self.remote_ref_string.startswith("refs/tags"):
@@ -305,29 +305,29 @@ def remote_ref(self):
 			return RemoteReference(self.repo, "refs/remotes/%s/%s" % (str(self._remotename_or_url), remote_ref.name))
 		else:
 			raise ValueError("Could not handle remote ref: %r" % self.remote_ref_string)
-		# END 
-		
+		# END
+
 	@classmethod
 	def _from_line(cls, repo, remotename_or_url, line):
 		"""Create a new PushInfo instance as parsed from line which is expected to be like
 			refs/heads/master:refs/heads/master 05d2687..1d0568e"""
 		control_character, from_to, summary = line.split('\t', 3)
 		flags = 0
-		
+
 		# control character handling
 		try:
 			flags |= cls._flag_map[ control_character ]
 		except KeyError:
-			raise ValueError("Control Character %r unknown as parsed from line %r" % (control_character, line)) 
+			raise ValueError("Control Character %r unknown as parsed from line %r" % (control_character, line))
 		# END handle control character
-		
+
 		# from_to handling
 		from_ref_string, to_ref_string = from_to.split(':')
 		if flags & cls.DELETED:
 			from_ref = None
 		else:
 			from_ref = Reference.from_path(repo, from_ref_string)
-		
+
 		# commit handling, could be message or commit info
 		old_commit_binsha = None
 		if summary.startswith('['):
@@ -345,7 +345,7 @@ def _from_line(cls, repo, remotename_or_url, line):
 				flags |= cls.NEW_HEAD
 			# uptodate encoded in control character
 		else:
-			# fast-forward or forced update - was encoded in control character, 
+			# fast-forward or forced update - was encoded in control character,
 			# but we parse the old and new commit
 			split_token = "..."
 			if control_character == " ":
@@ -353,26 +353,26 @@ def _from_line(cls, repo, remotename_or_url, line):
 			old_sha, new_sha = summary.split(' ')[0].split(split_token)
 			old_commit_binsha = repo.resolve(old_sha)
 		# END message handling
-		
+
 		return cls(flags, from_ref, to_ref_string, repo, remotename_or_url, old_commit_binsha, summary)
-		
+
 
 class CmdFetchInfo(FetchInfo):
 	"""
 	Pure python implementation of a FetchInfo interface
 	"""
 	__slots__ = ('ref','old_commit_binsha', 'flags', 'note')
-	
+
 	#							  %c	%-*s %-*s			  -> %s		  (%s)
 	re_fetch_result = re.compile("^\s*(.) (\[?[\w\s\.]+\]?)\s+(.+) -> ([/\w_\+\.-]+)(	 \(.*\)?$)?")
-	
-	_flag_map = {	'!' : FetchInfo.ERROR, 
-					'+' : FetchInfo.FORCED_UPDATE, 
-					'-' : FetchInfo.TAG_UPDATE, 
+
+	_flag_map = {	'!' : FetchInfo.ERROR,
+					'+' : FetchInfo.FORCED_UPDATE,
+					'-' : FetchInfo.TAG_UPDATE,
 					'*' : 0,
-					'=' : FetchInfo.HEAD_UPTODATE, 
-					' ' : FetchInfo.FAST_FORWARD } 
-	
+					'=' : FetchInfo.HEAD_UPTODATE,
+					' ' : FetchInfo.FAST_FORWARD }
+
 	def __init__(self, ref, flags, note = '', old_commit_binsha = None):
 		"""
 		Initialize a new instance
@@ -381,28 +381,28 @@ def __init__(self, ref, flags, note = '', old_commit_binsha = None):
 		self.flags = flags
 		self.note = note
 		self.old_commit_binsha = old_commit_binsha
-		
+
 	def __str__(self):
 		return self.name
-		
+
 	@property
 	def name(self):
 		""":return: Name of our remote ref"""
 		return self.ref.name
-		
+
 	@property
 	def commit(self):
 		""":return: Commit of our remote ref"""
 		return self.ref.commit
-		
+
 	@classmethod
 	def _from_line(cls, repo, line, fetch_line):
 		"""Parse information from the given line as returned by git-fetch -v
 		and return a new CmdFetchInfo object representing this information.
-		
+
 		We can handle a line as follows
 		"%c %-*s %-*s -> %s%s"
-		
+
 		Where c is either ' ', !, +, -, *, or =
 		! means error
 		+ means success forcing update
@@ -410,13 +410,13 @@ def _from_line(cls, repo, line, fetch_line):
 		* means birth of new branch or tag
 		= means the head was up to date ( and not moved )
 		' ' means a fast-forward
-		
+
 		fetch line is the corresponding line from FETCH_HEAD, like
 		acb0fa8b94ef421ad60c8507b634759a472cd56c	not-for-merge	branch '0.1.7RC' of /tmp/tmpya0vairemote_repo"""
 		match = cls.re_fetch_result.match(line)
 		if match is None:
 			raise ValueError("Failed to parse line: %r" % line)
-			
+
 		# parse lines
 		control_character, operation, local_remote_ref, remote_local_ref, note = match.groups()
 		try:
@@ -424,11 +424,11 @@ def _from_line(cls, repo, line, fetch_line):
 			ref_type_name, fetch_note = fetch_note.split(' ', 1)
 		except ValueError:	# unpack error
 			raise ValueError("Failed to parse FETCH__HEAD line: %r" % fetch_line)
-		
+
 		# handle FETCH_HEAD and figure out ref type
-		# If we do not specify a target branch like master:refs/remotes/origin/master, 
+		# If we do not specify a target branch like master:refs/remotes/origin/master,
 		# the fetch result is stored in FETCH_HEAD which destroys the rule we usually
-		# have. In that case we use a symbolic reference which is detached 
+		# have. In that case we use a symbolic reference which is detached
 		ref_type = None
 		if remote_local_ref == "FETCH_HEAD":
 			ref_type = SymbolicReference
@@ -441,10 +441,10 @@ def _from_line(cls, repo, line, fetch_line):
 		else:
 			raise TypeError("Cannot handle reference type: %r" % ref_type_name)
 		#END handle ref type
-			
+
 		# create ref instance
 		if ref_type is SymbolicReference:
-			remote_local_ref = ref_type(repo, "FETCH_HEAD") 
+			remote_local_ref = ref_type(repo, "FETCH_HEAD")
 		else:
 			# determine prefix. Tags are usually pulled into refs/tags, they may have subdirectories.
 			# It is not clear sometimes where exactly the item is, unless we have an absolute path as indicated
@@ -466,23 +466,23 @@ def _from_line(cls, repo, line, fetch_line):
 			else:
 				ref_path = join_path(ref_type._common_path_default, remote_local_ref)
 			#END obtain refpath
-			
-			# even though the path could be within the git conventions, we make 
+
+			# even though the path could be within the git conventions, we make
 			# sure we respect whatever the user wanted, and disabled path checking
 			remote_local_ref = ref_type(repo, ref_path, check_path=False)
-		# END create ref instance 
-		
-		
+		# END create ref instance
+
+
 		note = ( note and note.strip() ) or ''
-		
+
 		# parse flags from control_character
 		flags = 0
 		try:
 			flags |= cls._flag_map[control_character]
 		except KeyError:
 			raise ValueError("Control character %r unknown as parsed from line %r" % (control_character, line))
-		# END control char exception hanlding 
-		
+		# END control char exception hanlding
+
 		# parse operation string for more info - makes no sense for symbolic refs
 		old_commit_binsha = None
 		if isinstance(remote_local_ref, Reference):
@@ -499,27 +499,27 @@ def _from_line(cls, repo, line, fetch_line):
 				old_commit_binsha = repo.resolve(operation.split(split_token)[0])
 			# END handle refspec
 		# END reference flag handling
-		
+
 		return cls(remote_local_ref, flags, note, old_commit_binsha)
-		
+
 
 class GitCommandMixin(object):
 	"""A mixin to provide the git command object through the git property"""
-	
+
 	def __init__(self, *args, **kwargs):
 		"""Initialize this instance with the root and a git command"""
 		super(GitCommandMixin, self).__init__(*args, **kwargs)
 		self._git = Git(self.working_dir)
-	
+
 	@property
 	def git(self):
 		return self._git
-	
+
 
 class CmdObjectDBRMixin(object):
 	"""A mixing implementing object reading through a git command
 	It will create objects only in the loose object database.
-	:note: for now, we use the git command to do all the lookup, just until he 
+	:note: for now, we use the git command to do all the lookup, just until he
 		have packs and the other implementations
 	"""
 	#{ ODB Interface
@@ -528,23 +528,23 @@ class CmdObjectDBRMixin(object):
 	def info(self, sha):
 		hexsha, typename, size = self._git.get_object_header(bin_to_hex(sha))
 		return OInfo(hex_to_bin(hexsha), typename, size)
-		
+
 	def stream(self, sha):
 		"""For now, all lookup is done by git itself
-		:note: As we don't know when the stream is actually read (and if it is 
+		:note: As we don't know when the stream is actually read (and if it is
 			stored for later use) we read the data rigth away and cache it.
-			This has HUGE performance implication, both for memory as for 
+			This has HUGE performance implication, both for memory as for
 			reading/deserializing objects, but we have no other choice in order
 			to make the database behaviour consistent with other implementations !"""
-		
+
 		hexsha, typename, size, data = self._git.get_object_data(bin_to_hex(sha))
 		return OStream(hex_to_bin(hexsha), typename, size, StringIO(data))
-		
+
 	def partial_to_complete_sha_hex(self, partial_hexsha):
 		""":return: Full binary 20 byte sha from the given partial hexsha
 		:raise AmbiguousObjectName:
 		:raise BadObject:
-		:note: currently we only raise BadObject as git does not communicate 
+		:note: currently we only raise BadObject as git does not communicate
 			AmbiguousObjects separately"""
 		try:
 			hexsha, typename, size = self._git.get_object_header(partial_hexsha)
@@ -552,20 +552,20 @@ def partial_to_complete_sha_hex(self, partial_hexsha):
 		except (GitCommandError, ValueError):
 			raise BadObject(partial_hexsha)
 		# END handle exceptions
-	
+
 	#} END odb interface
-	
+
 
 class CmdTransportMixin(TransportDB):
 	"""A mixin requiring the .git property as well as repository paths
-	
+
 	It will create objects only in the loose object database.
-	:note: for now, we use the git command to do all the lookup, just until he 
+	:note: for now, we use the git command to do all the lookup, just until he
 		have packs and the other implementations
 	"""
-	
+
 	#{ Transport DB interface
-	
+
 	def push(self, url, refspecs=None, progress=None, **kwargs):
 		"""Push given refspecs using the git default implementation
 		:param url: may be a remote name or a url
@@ -575,10 +575,10 @@ def push(self, url, refspecs=None, progress=None, **kwargs):
 		progress = CmdRemoteProgress(progress)
 		proc = self._git.push(url, refspecs, porcelain=True, as_process=True, **add_progress(kwargs, self.git, progress))
 		return get_push_info(self, url, proc, progress)
-		
+
 	def pull(self, url, refspecs=None, progress=None, **kwargs):
-		"""Fetch and merge the given refspecs. 
-		If not refspecs are given, the merge will only work properly if you 
+		"""Fetch and merge the given refspecs.
+		If not refspecs are given, the merge will only work properly if you
 		have setup upstream (tracking) branches.
 		:param url: may be a remote name or a url
 		:param refspecs: see push()
@@ -586,7 +586,7 @@ def pull(self, url, refspecs=None, progress=None, **kwargs):
 		progress = CmdRemoteProgress(progress)
 		proc = self._git.pull(url, refspecs, with_extended_output=True, as_process=True, v=True, **add_progress(kwargs, self.git, progress))
 		return get_fetch_info_from_stderr(self, proc, progress)
-		
+
 	def fetch(self, url, refspecs=None, progress=None, **kwargs):
 		"""Fetch the latest changes
 		:param url: may be a remote name or a url
@@ -595,36 +595,36 @@ def fetch(self, url, refspecs=None, progress=None, **kwargs):
 		progress = CmdRemoteProgress(progress)
 		proc = self._git.fetch(url, refspecs, with_extended_output=True, as_process=True, v=True, **add_progress(kwargs, self.git, progress))
 		return get_fetch_info_from_stderr(self, proc, progress)
-		
+
 	#} end transport db interface
-	
-	
+
+
 class CmdHighLevelRepository(HighLevelRepository):
 	"""An intermediate interface carrying advanced git functionality that can be used
 	in other comound repositories which do not implement this functionality themselves.
-	
+
 	The mixin must be used with repositories compatible to the GitCommandMixin.
-	
+
 	:note: at some point, methods provided here are supposed to be provided by custom interfaces"""
 	DAEMON_EXPORT_FILE = 'git-daemon-export-ok'
-	
+
 	# precompiled regex
 	re_whitespace = re.compile(r'\s+')
 	re_hexsha_only = re.compile('^[0-9A-Fa-f]{40}$')
 	re_hexsha_shortened = re.compile('^[0-9A-Fa-f]{4,40}$')
 	re_author_committer_start = re.compile(r'^(author|committer)')
 	re_tab_full_line = re.compile(r'^\t(.*)$')
-	
+
 	#{ Configuration
 	CommitCls = Commit
 	GitCls = Git
 	#} END configuration
-	
+
 	def daemon_export():
 		def _get_daemon_export(self):
 			filename = join(self.git_dir, self.DAEMON_EXPORT_FILE)
 			return os.path.exists(filename)
-	
+
 		def _set_daemon_export(self, value):
 			filename = join(self.git_dir, self.DAEMON_EXPORT_FILE)
 			fileexists = os.path.exists(filename)
@@ -635,7 +635,7 @@ def _set_daemon_export(self, value):
 
 		return property(_get_daemon_export, _set_daemon_export,
 						doc="If True, git-daemon may export this repository")
-		
+
 	daemon_export = daemon_export()
 
 	def is_dirty(self, index=True, working_tree=True, untracked_files=False):
@@ -643,10 +643,10 @@ def is_dirty(self, index=True, working_tree=True, untracked_files=False):
 			# Bare repositories with no associated working directory are
 			# always consired to be clean.
 			return False
-		
+
 		# start from the one which is fastest to evaluate
 		default_args = ('--abbrev=40', '--full-index', '--raw')
-		if index: 
+		if index:
 			# diff index against HEAD
 			if isfile(self.index.path) and self.head.is_valid() and \
 				len(self.git.diff('HEAD', '--cached', *default_args)):
@@ -662,7 +662,7 @@ def is_dirty(self, index=True, working_tree=True, untracked_files=False):
 				return True
 		# END untracked files
 		return False
-		
+
 	@property
 	def untracked_files(self):
 		# make sure we get all files, no only untracked directores
@@ -675,7 +675,7 @@ def untracked_files(self):
 			# skip two lines
 			stream.next()
 			stream.next()
-			
+
 			for untracked_info in stream:
 				if not untracked_info.startswith("#\t"):
 					break
@@ -694,7 +694,7 @@ def blame(self, rev, file):
 			parts = self.re_whitespace.split(line, 1)
 			firstpart = parts[0]
 			if self.re_hexsha_only.search(firstpart):
-				# handles 
+				# handles
 				# 634396b2f541a9f2d58b00be1a07f0c358b999b3 1 1 7		- indicates blame-data start
 				# 634396b2f541a9f2d58b00be1a07f0c358b999b3 2 2
 				digits = parts[-1].split(" ")
@@ -708,7 +708,7 @@ def blame(self, rev, file):
 			else:
 				m = self.re_author_committer_start.search(firstpart)
 				if m:
-					# handles: 
+					# handles:
 					# author Tom Preston-Werner
 					# author-mail <tom@mojombo.com>
 					# author-time 1192271832
@@ -776,7 +776,7 @@ def init(cls, path=None, mkdir=True, **kwargs):
 
 	@classmethod
 	def _clone(cls, git, url, path, progress, **kwargs):
-		# special handling for windows for path at which the clone should be 
+		# special handling for windows for path at which the clone should be
 		# created.
 		# tilde '~' will be expanded to the HOME no matter where the ~ occours. Hence
 		# we at least give a proper error instead of letting git fail
@@ -785,9 +785,9 @@ def _clone(cls, git, url, path, progress, **kwargs):
 		if os.name == 'nt':
 			if '~' in path:
 				raise OSError("Git cannot handle the ~ character in path %r correctly" % path)
-				
-			# on windows, git will think paths like c: are relative and prepend the 
-			# current working dir ( before it fails ). We temporarily adjust the working 
+
+			# on windows, git will think paths like c: are relative and prepend the
+			# current working dir ( before it fails ). We temporarily adjust the working
 			# dir to make this actually work
 			match = re.match("(\w:[/\\\])(.*)", path)
 			if match:
@@ -797,9 +797,9 @@ def _clone(cls, git, url, path, progress, **kwargs):
 				os.chdir(drive)
 				path = rest_of_path
 				kwargs['with_keep_cwd'] = True
-			# END cwd preparation 
-		# END windows handling 
-		
+			# END cwd preparation
+		# END windows handling
+
 		try:
 			proc = git.clone(url, path, with_extended_output=True, as_process=True, v=True, **add_progress(kwargs, git, progress))
 			if progress is not None:
@@ -812,16 +812,16 @@ def _clone(cls, git, url, path, progress, **kwargs):
 				path = prev_path
 			# END reset previous working dir
 		# END bad windows handling
-		
-		# our git command could have a different working dir than our actual 
+
+		# our git command could have a different working dir than our actual
 		# environment, hence we prepend its working dir if required
 		if not os.path.isabs(path) and git.working_dir:
 			path = join(git._working_dir, path)
-			
-		# adjust remotes - there may be operating systems which use backslashes, 
+
+		# adjust remotes - there may be operating systems which use backslashes,
 		# These might be given as initial paths, but when handling the config file
 		# that contains the remote from which we were clones, git stops liking it
-		# as it will escape the backslashes. Hence we undo the escaping just to be 
+		# as it will escape the backslashes. Hence we undo the escaping just to be
 		# sure
 		repo = cls(os.path.abspath(path))
 		if repo.remotes:
@@ -833,7 +833,7 @@ def clone(self, path, progress = None, **kwargs):
 		"""
 		:param kwargs:
 			All remaining keyword arguments are given to the git-clone command
-			
+
 		For more information, see the respective method in HighLevelRepository"""
 		return self._clone(self.git, self.git_dir, path, CmdRemoteProgress(progress), **kwargs)
 
@@ -848,15 +848,15 @@ def archive(self, ostream, treeish=None, prefix=None,  **kwargs):
 		"""For all args see HighLevelRepository interface
 		:parm kwargs:
 			Additional arguments passed to git-archive
-			NOTE: Use the 'format' argument to define the kind of format. Use 
+			NOTE: Use the 'format' argument to define the kind of format. Use
 			specialized ostreams to write any format supported by python
 
 		:raise GitCommandError: in case something went wrong"""
 		if treeish is None:
 			treeish = self.head.commit
 		if prefix and 'prefix' not in kwargs:
-			kwargs['prefix'] = prefix 
+			kwargs['prefix'] = prefix
 		kwargs['output_stream'] = ostream
-		
+
 		self.git.archive(treeish, **kwargs)
 		return self
diff --git a/git/db/compat.py b/git/db/compat.py
index 771a1e770..e0b1c17f2 100644
--- a/git/db/compat.py
+++ b/git/db/compat.py
@@ -7,26 +7,26 @@
 class RepoCompatibilityInterfaceNoBare(object):
 	"""Interface to install backwards compatability of the new complex repository
 	types with the previous, all in one, repository."""
-	
+
 	def rev_parse(self, *args, **kwargs):
 		return self.resolve_object(*args, **kwargs)
-		
+
 	@property
 	def odb(self):
 		"""The odb is now an integrated part of each repository"""
 		return self
-		
+
 	@property
 	def active_branch(self):
 		"""The name of the currently active branch.
 
 		:return: Head to the active branch"""
 		return self.head.reference
-		
+
 	def __repr__(self):
 		"""Return the representation of the repository, the way it used to be"""
 		return '<git.Repo "%s">' % self.git_dir
-		
+
 	@property
 	def branches(self):
 		return self.heads
@@ -35,11 +35,11 @@ def branches(self):
 class RepoCompatibilityInterface(RepoCompatibilityInterfaceNoBare):
 	"""Interface to install backwards compatability of the new complex repository
 	types with the previous, all in one, repository."""
-	
+
 	@property
 	def bare(self):
 		return self.is_bare
-		
+
 	@property
 	def refs(self):
 		return self.references
diff --git a/git/db/interface.py b/git/db/interface.py
index 2ff44f261..fa6d3b848 100644
--- a/git/db/interface.py
+++ b/git/db/interface.py
@@ -4,26 +4,26 @@
 # the New BSD License: http://www.opensource.org/licenses/bsd-license.php
 """Contains interfaces for basic database building blocks"""
 
-__all__ = (	'ObjectDBR', 'ObjectDBW', 'RootPathDB', 'CompoundDB', 'CachingDB', 
-			'TransportDB', 'ConfigurationMixin', 'RepositoryPathsMixin',  
-			'RefSpec', 'FetchInfo', 'PushInfo', 'ReferencesMixin', 'SubmoduleDB', 
+__all__ = (	'ObjectDBR', 'ObjectDBW', 'RootPathDB', 'CompoundDB', 'CachingDB',
+			'TransportDB', 'ConfigurationMixin', 'RepositoryPathsMixin',
+			'RefSpec', 'FetchInfo', 'PushInfo', 'ReferencesMixin', 'SubmoduleDB',
 			'IndexDB', 'HighLevelRepository')
 
 
 class ObjectDBR(object):
 	"""Defines an interface for object database lookup.
 	Objects are identified either by their 20 byte bin sha"""
-	
+
 	def __contains__(self, sha):
 		return self.has_obj(sha)
-	
-	#{ Query Interface 
+
+	#{ Query Interface
 	def has_object(self, sha):
 		"""
 		:return: True if the object identified by the given 20 bytes
 			binary sha is contained in the database"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def has_object_async(self, reader):
 		"""Return a reader yielding information about the membership of objects
 		as identified by shas
@@ -31,184 +31,184 @@ def has_object_async(self, reader):
 		:return: async.Reader yielding tuples of (sha, bool) pairs which indicate
 			whether the given sha exists in the database or not"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def info(self, sha):
 		""" :return: OInfo instance
 		:param sha: bytes binary sha
 		:raise BadObject:"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def info_async(self, reader):
 		"""Retrieve information of a multitude of objects asynchronously
 		:param reader: Channel yielding the sha's of the objects of interest
 		:return: async.Reader yielding OInfo|InvalidOInfo, in any order"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def stream(self, sha):
 		""":return: OStream instance
 		:param sha: 20 bytes binary sha
 		:raise BadObject:"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def stream_async(self, reader):
 		"""Retrieve the OStream of multiple objects
 		:param reader: see ``info``
 		:param max_threads: see ``ObjectDBW.store``
 		:return: async.Reader yielding OStream|InvalidOStream instances in any order
-		:note: depending on the system configuration, it might not be possible to 
+		:note: depending on the system configuration, it might not be possible to
 			read all OStreams at once. Instead, read them individually using reader.read(x)
 			where x is small enough."""
 		raise NotImplementedError("To be implemented in subclass")
-	
+
 	def size(self):
 		""":return: amount of objects in this database"""
 		raise NotImplementedError()
-		
+
 	def sha_iter(self):
 		"""Return iterator yielding 20 byte shas for all objects in this data base"""
 		raise NotImplementedError()
-		
+
 	def partial_to_complete_sha_hex(self, partial_hexsha):
 		"""
 		:return: 20 byte binary sha1 from the given less-than-40 byte hexsha
 		:param partial_hexsha: hexsha with less than 40 byte
-		:raise AmbiguousObjectName: If multiple objects would match the given sha 
+		:raise AmbiguousObjectName: If multiple objects would match the given sha
 		:raies BadObject: If object was not found"""
 		raise NotImplementedError()
-			
+
 	def partial_to_complete_sha(self, partial_binsha, canonical_length):
 		""":return: 20 byte sha as inferred by the given partial binary sha
-		:param partial_binsha: binary sha with less than 20 bytes 
+		:param partial_binsha: binary sha with less than 20 bytes
 		:param canonical_length: length of the corresponding canonical (hexadecimal) representation.
 			It is required as binary sha's cannot display whether the original hex sha
 			had an odd or even number of characters
-		:raise AmbiguousObjectName: 
+		:raise AmbiguousObjectName:
 		:raise BadObject: """
 	#} END query interface
-	
-	
+
+
 class ObjectDBW(object):
 	"""Defines an interface to create objects in the database"""
-	
+
 	#{ Edit Interface
 	def set_ostream(self, stream):
 		"""
 		Adjusts the stream to which all data should be sent when storing new objects
-		
+
 		:param stream: if not None, the stream to use, if None the default stream
 			will be used.
 		:return: previously installed stream, or None if there was no override
 		:raise TypeError: if the stream doesn't have the supported functionality"""
 		raise NotImplementedError("To be implemented in subclass")
-		
+
 	def ostream(self):
 		"""
 		:return: overridden output stream this instance will write to, or None
 			if it will write to the default stream"""
 		raise NotImplementedError("To be implemented in subclass")
-	
+
 	def store(self, istream):
 		"""
 		Create a new object in the database
 		:return: the input istream object with its sha set to its corresponding value
-		
-		:param istream: IStream compatible instance. If its sha is already set 
-			to a value, the object will just be stored in the our database format, 
+
+		:param istream: IStream compatible instance. If its sha is already set
+			to a value, the object will just be stored in the our database format,
 			in which case the input stream is expected to be in object format ( header + contents ).
 		:raise IOError: if data could not be written"""
 		raise NotImplementedError("To be implemented in subclass")
-	
+
 	def store_async(self, reader):
 		"""
-		Create multiple new objects in the database asynchronously. The method will 
-		return right away, returning an output channel which receives the results as 
+		Create multiple new objects in the database asynchronously. The method will
+		return right away, returning an output channel which receives the results as
 		they are computed.
-		
+
 		:return: Channel yielding your IStream which served as input, in any order.
-			The IStreams sha will be set to the sha it received during the process, 
+			The IStreams sha will be set to the sha it received during the process,
 			or its error attribute will be set to the exception informing about the error.
-			
+
 		:param reader: async.Reader yielding IStream instances.
 			The same instances will be used in the output channel as were received
 			in by the Reader.
-		
-		:note:As some ODB implementations implement this operation atomic, they might 
-			abort the whole operation if one item could not be processed. Hence check how 
+
+		:note:As some ODB implementations implement this operation atomic, they might
+			abort the whole operation if one item could not be processed. Hence check how
 			many items have actually been produced."""
 		raise NotImplementedError("To be implemented in subclass")
-	
+
 	#} END edit interface
-	
+
 
 class RootPathDB(object):
 	"""Provides basic facilities to retrieve files of interest"""
-	
+
 	def __init__(self, root_path):
 		"""Initialize this instance to look for its files at the given root path
 		All subsequent operations will be relative to this path
-		:raise InvalidDBRoot: 
+		:raise InvalidDBRoot:
 		:note: The base will not perform any accessablity checking as the base
-			might not yet be accessible, but become accessible before the first 
+			might not yet be accessible, but become accessible before the first
 			access."""
 		try:
 			super(RootPathDB, self).__init__(root_path)
 		except TypeError:
 			pass
 		# END handle py 2.6
-		
+
 	#{ Interface
 	def root_path(self):
 		""":return: path at which this db operates"""
 		raise NotImplementedError()
-	
+
 	def db_path(self, rela_path):
 		"""
-		:return: the given relative path relative to our database root, allowing 
+		:return: the given relative path relative to our database root, allowing
 			to pontentially access datafiles
-		:param rela_path: if not None or '', the relative path will be appended 
+		:param rela_path: if not None or '', the relative path will be appended
 			to the database root path. Otherwise you will obtain the database root path itself"""
 		raise NotImplementedError()
 	#} END interface
-		
+
 
 class CachingDB(object):
 	"""A database which uses caches to speed-up access"""
-	
-	#{ Interface 
-	
+
+	#{ Interface
+
 	def update_cache(self, force=False):
 		"""
 		Call this method if the underlying data changed to trigger an update
 		of the internal caching structures.
-		
+
 		:param force: if True, the update must be performed. Otherwise the implementation
 			may decide not to perform an update if it thinks nothing has changed.
 		:return: True if an update was performed as something change indeed"""
-		
+
 	# END interface
 
 
 class CompoundDB(object):
 	"""A database which delegates calls to sub-databases.
 	They should usually be cached and lazy-loaded"""
-	
+
 	#{ Interface
-	
+
 	def databases(self):
 		""":return: tuple of database instances we use for lookups"""
 		raise NotImplementedError()
 
 	#} END interface
-	
-	
+
+
 class IndexDB(object):
-	"""A database which provides a flattened index to all objects in its currently 
+	"""A database which provides a flattened index to all objects in its currently
 	active tree."""
 	@property
 	def index(self):
 		""":return: IndexFile compatible instance"""
 		raise NotImplementedError()
-	
+
 
 class RefSpec(object):
 	"""A refspec is a simple container which provides information about the way
@@ -216,17 +216,17 @@ class RefSpec(object):
 	the actual objects which is done using reference names (or respective instances
 	which resolve to actual reference names)."""
 	__slots__ = ('source', 'destination', 'force')
-	
+
 	def __init__(self, source, destination, force=False):
 		"""initalize the instance with the required values
-		:param source: reference name or instance. If None, the Destination 
+		:param source: reference name or instance. If None, the Destination
 			is supposed to be deleted."""
 		self.source = source
 		self.destination = destination
 		self.force = force
 		if self.destination is None:
 			raise ValueError("Destination must be set")
-		
+
 	def __str__(self):
 		""":return: a git-style refspec"""
 		s = str(self.source)
@@ -239,69 +239,69 @@ def __str__(self):
 			p = '+'
 		#END handle force
 		res = "%s%s:%s" % (p, s, d)
-		
+
 	def delete_destination(self):
 		return self.source is None
-		
-		
+
+
 class RemoteProgress(object):
 	"""
 	Handler providing an interface to parse progress information emitted by git-push
 	and git-fetch and to dispatch callbacks allowing subclasses to react to the progress.
-	
+
 	Subclasses should derive from this type.
 	"""
 	_num_op_codes = 7
 	BEGIN, END, COUNTING, COMPRESSING, WRITING, RECEIVING, RESOLVING =  [1 << x for x in range(_num_op_codes)]
 	STAGE_MASK = BEGIN|END
 	OP_MASK = ~STAGE_MASK
-	
+
 	#{ Subclass Interface
-	
+
 	def line_dropped(self, line):
 		"""Called whenever a line could not be understood and was therefore dropped."""
 		pass
-	
+
 	def update(self, op_code, cur_count, max_count=None, message='', input=''):
 		"""Called whenever the progress changes
-		
+
 		:param op_code:
 			Integer allowing to be compared against Operation IDs and stage IDs.
-			
-			Stage IDs are BEGIN and END. BEGIN will only be set once for each Operation 
+
+			Stage IDs are BEGIN and END. BEGIN will only be set once for each Operation
 			ID as well as END. It may be that BEGIN and END are set at once in case only
 			one progress message was emitted due to the speed of the operation.
 			Between BEGIN and END, none of these flags will be set
-			
-			Operation IDs are all held within the OP_MASK. Only one Operation ID will 
+
+			Operation IDs are all held within the OP_MASK. Only one Operation ID will
 			be active per call.
 		:param cur_count: Current absolute count of items
-			
+
 		:param max_count:
-			The maximum count of items we expect. It may be None in case there is 
+			The maximum count of items we expect. It may be None in case there is
 			no maximum number of items or if it is (yet) unknown.
-		
+
 		:param message:
 			In case of the 'WRITING' operation, it contains the amount of bytes
 			transferred. It may possibly be used for other purposes as well.
-		
+
 		:param input:
 			The actual input string that was used to parse the information from.
 			This is usually a line from the output of git-fetch, but really
 			depends on the implementation
-		
+
 		You may read the contents of the current line in self._cur_line"""
 		pass
-	
+
 	def __call__(self, message, input=''):
 		"""Same as update, but with a simpler interface which only provides the
 		message of the operation.
-		:note: This method will be called in addition to the update method. It is 
+		:note: This method will be called in addition to the update method. It is
 			up to you which one you implement"""
 		pass
 	#} END subclass interface
-	
-		
+
+
 class PushInfo(object):
 	"""A type presenting information about the result of a push operation for exactly
 	one refspec
@@ -310,74 +310,74 @@ class PushInfo(object):
 	local_ref			# Reference pointing to the local reference that was pushed
 						# It is None if the ref was deleted.
 	remote_ref_string 	# path to the remote reference located on the remote side
-	remote_ref 			# Remote Reference on the local side corresponding to 
+	remote_ref 			# Remote Reference on the local side corresponding to
 						# the remote_ref_string. It can be a TagReference as well.
 	old_commit_binsha 	# binary sha to commit at which the remote_ref was standing before we pushed
 						# it to local_ref.commit. Will be None if an error was indicated
 	summary				# summary line providing human readable english text about the push
 	"""
 	__slots__ = tuple()
-	
+
 	NEW_TAG, NEW_HEAD, NO_MATCH, REJECTED, REMOTE_REJECTED, REMOTE_FAILURE, DELETED, \
 	FORCED_UPDATE, FAST_FORWARD, UP_TO_DATE, ERROR = [ 1 << x for x in range(11) ]
-		
-		
+
+
 class FetchInfo(object):
 	"""A type presenting information about the fetch operation on exactly one refspec
-	
+
 	The following members are defined:
-	ref				# name of the reference to the changed 
+	ref				# name of the reference to the changed
 					# remote head or FETCH_HEAD. Implementations can provide
 					# actual class instance which convert to a respective string
-	flags			# additional flags to be & with enumeration members, 
-					# i.e. info.flags & info.REJECTED 
+	flags			# additional flags to be & with enumeration members,
+					# i.e. info.flags & info.REJECTED
 					# is 0 if ref is FETCH_HEAD
 	note				# additional notes given by the fetch-pack implementation intended for the user
-	old_commit_binsha# if info.flags & info.FORCED_UPDATE|info.FAST_FORWARD, 
+	old_commit_binsha# if info.flags & info.FORCED_UPDATE|info.FAST_FORWARD,
 					# field is set to the previous location of ref as binary sha or None"""
 	__slots__ = tuple()
-	
+
 	NEW_TAG, NEW_HEAD, HEAD_UPTODATE, TAG_UPDATE, REJECTED, FORCED_UPDATE, \
 	FAST_FORWARD, ERROR = [ 1 << x for x in range(8) ]
 
 
 class TransportDB(object):
 	"""A database which allows to transport objects from and to different locations
-	which are specified by urls (location) and refspecs (what to transport, 
+	which are specified by urls (location) and refspecs (what to transport,
 	see http://www.kernel.org/pub/software/scm/git/docs/git-fetch.html).
-	
+
 	At the beginning of a transport operation, it will be determined which objects
 	have to be sent (either by this or by the other side).
-	
-	Afterwards a pack with the required objects is sent (or received). If there is 
+
+	Afterwards a pack with the required objects is sent (or received). If there is
 	nothing to send, the pack will be empty.
-	
+
 	As refspecs involve symbolic names for references to be handled, we require
 	RefParse functionality. How this is done is up to the actual implementation."""
 	# The following variables need to be set by the derived class
-	
+
 	#{ Interface
-	
+
 	def fetch(self, url, refspecs, progress=None, **kwargs):
 		"""Fetch the objects defined by the given refspec from the given url.
-		:param url: url identifying the source of the objects. It may also be 
+		:param url: url identifying the source of the objects. It may also be
 			a symbol from which the respective url can be resolved, like the
 			name of the remote. The implementation should allow objects as input
 			as well, these are assumed to resovle to a meaningful string though.
-		:param refspecs: iterable of reference specifiers or RefSpec instance, 
+		:param refspecs: iterable of reference specifiers or RefSpec instance,
 			identifying the references to be fetch from the remote.
 		:param progress: RemoteProgress derived instance which receives progress messages for user consumption or None
-		:param kwargs: may be used for additional parameters that the actual implementation could 
+		:param kwargs: may be used for additional parameters that the actual implementation could
 			find useful.
-		:return: List of FetchInfo compatible instances which provide information about what 
+		:return: List of FetchInfo compatible instances which provide information about what
 			was previously fetched, in the order of the input refspecs.
 		:note: even if the operation fails, one of the returned FetchInfo instances
 			may still contain errors or failures in only part of the refspecs.
-		:raise: if any issue occours during the transport or if the url is not 
+		:raise: if any issue occours during the transport or if the url is not
 			supported by the protocol.
 		"""
 		raise NotImplementedError()
-		
+
 	def push(self, url, refspecs, progress=None, **kwargs):
 		"""Transport the objects identified by the given refspec to the remote
 		at the given url.
@@ -385,104 +385,104 @@ def push(self, url, refspecs, progress=None, **kwargs):
 			see fetch() for more details
 		:param refspecs: iterable of refspecs strings or RefSpec instances
 			to identify the objects to push
-		:param progress: see fetch() 
+		:param progress: see fetch()
 		:param kwargs: additional arguments which may be provided by the caller
 			as they may be useful to the actual implementation
 		:todo: what to return ?
 		:raise: if any issue arises during transport or if the url cannot be handled"""
 		raise NotImplementedError()
-		
+
 	@property
 	def remotes(self):
 		""":return: An IterableList of Remote objects allowing to access and manipulate remotes
 		:note: Remote objects can also be used for the actual push or fetch operation"""
 		raise NotImplementedError()
-		
+
 	def remote(self, name='origin'):
 		""":return: Remote object with the given name
 		:note: it does not necessarily exist, hence this is just a more convenient way
 			to construct Remote objects"""
 		raise NotImplementedError()
-		
+
 	#}end interface
-	
-			
+
+
 	#{ Utility Methods
-		
+
 	def create_remote(self, name, url, **kwargs):
 		"""Create a new remote with the given name pointing to the given url
 		:return: Remote instance, compatible to the Remote interface"""
 		return Remote.create(self, name, url, **kwargs)
-		
+
 	def delete_remote(self, remote):
 		"""Delete the given remote.
 		:param remote: a Remote instance"""
 		return Remote.remove(self, remote)
-		
+
 	#} END utility methods
 
 
 class ReferencesMixin(object):
 	"""Database providing reference objects which in turn point to database objects
 	like Commits or Tag(Object)s.
-	
-	The returned types are compatible to the interfaces of the pure python 
+
+	The returned types are compatible to the interfaces of the pure python
 	reference implementation in GitDB.ref"""
-	
+
 	def resolve(self, name):
-		"""Resolve the given name into a binary sha. Valid names are as defined 
+		"""Resolve the given name into a binary sha. Valid names are as defined
 		in the rev-parse documentation http://www.kernel.org/pub/software/scm/git/docs/git-rev-parse.html
 		:return: binary sha matching the name
 		:raise AmbiguousObjectName:
 		:raise BadObject: """
 		raise NotImplementedError()
-		
+
 	def resolve_object(self, name):
-		"""As ``resolve()``, but returns the Objecft instance pointed to by the 
+		"""As ``resolve()``, but returns the Objecft instance pointed to by the
 		resolved binary sha
 		:return: Object instance of the correct type, e.g. shas pointing to commits
 			will be represented by a Commit object"""
 		raise NotImplementedError()
-	
+
 	@property
 	def references(self):
 		""":return: iterable list of all Reference objects representing tags, heads
-		and remote references. This is the most general method to obtain any 
+		and remote references. This is the most general method to obtain any
 		references."""
 		raise NotImplementedError()
-		
+
 	@property
 	def heads(self):
 		""":return: IterableList with HeadReference objects pointing to all
 		heads in the repository."""
 		raise NotImplementedError()
-		
+
 	@property
 	def head(self):
 		""":return: HEAD Object pointing to the current head reference"""
 		raise NotImplementedError()
-		
+
 	@property
 	def tags(self):
-		""":return: An IterableList of TagReferences or compatible items that 
+		""":return: An IterableList of TagReferences or compatible items that
 		are available in this repo"""
 		raise NotImplementedError()
 
 	#{ Utility Methods
-	
+
 	def tag(self, name):
 		""":return: Tag with the given name
 		:note: It does not necessarily exist, hence this is just a more convenient
 			way to construct TagReference objects"""
 		raise NotImplementedError()
-		
-	
+
+
 	def commit(self, rev=None):
 		"""The Commit object for the specified revision
 		:param rev: revision specifier, see git-rev-parse for viable options.
 		:return: Commit compatible object"""
 		raise NotImplementedError()
-		
+
 	def iter_trees(self, *args, **kwargs):
 		""":return: Iterator yielding Tree compatible objects
 		:note: Takes all arguments known to iter_commits method"""
@@ -491,15 +491,15 @@ def iter_trees(self, *args, **kwargs):
 	def tree(self, rev=None):
 		"""The Tree (compatible) object for the given treeish revision
 		Examples::
-	
+
 			  repo.tree(repo.heads[0])
 
 		:param rev: is a revision pointing to a Treeish ( being a commit or tree )
 		:return: ``git.Tree``
-			
+
 		:note:
 			If you need a non-root level tree, find it by iterating the root tree. Otherwise
-			it cannot know about its path relative to the repository root and subsequent 
+			it cannot know about its path relative to the repository root and subsequent
 			operations might have unexpected results."""
 		raise NotImplementedError()
 
@@ -513,80 +513,80 @@ def iter_commits(self, rev=None, paths='', **kwargs):
 		:parm paths:
 			is an optional path or a list of paths to limit the returned commits to
 			Commits that do not contain that path or the paths will not be returned.
-		
+
 		:parm kwargs:
-			Arguments to be passed to git-rev-list - common ones are 
+			Arguments to be passed to git-rev-list - common ones are
 			max_count and skip
 
-		:note: to receive only commits between two named revisions, use the 
+		:note: to receive only commits between two named revisions, use the
 			"revA..revB" revision specifier
 
 		:return: iterator yielding Commit compatible instances"""
 		raise NotImplementedError()
 
-	
+
 	#} END utility methods
-		
+
 	#{ Edit Methods
-		
+
 	def create_head(self, path, commit='HEAD', force=False, logmsg=None ):
 		"""Create a new head within the repository.
 		:param commit:  a resolvable name to the commit or a Commit or Reference instance the new head should point to
 		:param force: if True, a head will be created even though it already exists
 			Otherwise an exception will be raised.
-		:param logmsg: message to append to the reference log. If None, a default message 
+		:param logmsg: message to append to the reference log. If None, a default message
 			will be used
 		:return: newly created Head instances"""
 		raise NotImplementedError()
-		
+
 	def delete_head(self, *heads):
 		"""Delete the given heads
 		:param heads: list of Head references that are to be deleted"""
 		raise NotImplementedError()
-		
+
 	def create_tag(self, path, ref='HEAD', message=None, force=False):
 		"""Create a new tag reference.
 		:param path: name or path of the new tag.
 		:param ref: resolvable name of the reference or commit, or Commit or Reference
 			instance describing the commit the tag should point to.
-		:param message: message to be attached to the tag reference. This will 
+		:param message: message to be attached to the tag reference. This will
 			create an actual Tag object carrying the message. Otherwise a TagReference
 			will be generated.
 		:param force: if True, the Tag will be created even if another tag does already
 			exist at the given path. Otherwise an exception will be thrown
 		:return: TagReference object """
 		raise NotImplementedError()
-		
+
 	def delete_tag(self, *tags):
 		"""Delete the given tag references
 		:param tags: TagReferences to delete"""
 		raise NotImplementedError()
-		
+
 	#}END edit methods
 
 
 class RepositoryPathsMixin(object):
-	"""Represents basic functionality of a full git repository. This involves an 
+	"""Represents basic functionality of a full git repository. This involves an
 	optional working tree, a git directory with references and an object directory.
-	
-	This type collects the respective paths and verifies the provided base path 
+
+	This type collects the respective paths and verifies the provided base path
 	truly is a git repository.
-	
-	If the underlying type provides the config_reader() method, we can properly determine 
+
+	If the underlying type provides the config_reader() method, we can properly determine
 	whether this is a bare repository as well. Otherwise it will make an educated guess
 	based on the path name."""
 	#{ Subclass Interface
 	def _initialize(self, path):
-		"""initialize this instance with the given path. It may point to 
+		"""initialize this instance with the given path. It may point to
 		any location within the repositories own data, as well as the working tree.
-		
-		The implementation will move up and search for traces of a git repository, 
-		which is indicated by a child directory ending with .git or the 
+
+		The implementation will move up and search for traces of a git repository,
+		which is indicated by a child directory ending with .git or the
 		current path portion ending with .git.
-		
+
 		The paths made available for query are suitable for full git repositories
 		only. Plain object databases need to be fed the "objects" directory path.
-		
+
 		:param path: the path to initialize the repository with
 			It is a path to either the root git directory or the bare git repo::
 
@@ -594,159 +594,159 @@ def _initialize(self, path):
 			repo = Repo("/Users/mtrier/Development/git-python.git")
 			repo = Repo("~/Development/git-python.git")
 			repo = Repo("$REPOSITORIES/Development/git-python.git")
-		
+
 		:raise InvalidDBRoot:
 		"""
 		raise NotImplementedError()
 	#} end subclass interface
-	
+
 	#{ Object Interface
-	
+
 	def __eq__(self, rhs):
 		raise NotImplementedError()
-		
+
 	def __ne__(self, rhs):
 		raise NotImplementedError()
-		
+
 	def __hash__(self):
 		raise NotImplementedError()
 
 	def __repr__(self):
 		raise NotImplementedError()
-	
+
 	#} END object interface
-	
+
 	#{ Interface
-	
+
 	@property
 	def is_bare(self):
 		""":return: True if this is a bare repository
 		:note: this value is cached upon initialization"""
 		raise NotImplementedError()
-		
+
 	@property
 	def git_dir(self):
-		""":return: path to directory containing this actual git repository (which 
+		""":return: path to directory containing this actual git repository (which
 		in turn provides access to objects and references"""
 		raise NotImplementedError()
-		
+
 	@property
 	def working_tree_dir(self):
-		""":return: path to directory containing the working tree checkout of our 
+		""":return: path to directory containing the working tree checkout of our
 		git repository.
 		:raise AssertionError: If this is a bare repository"""
 		raise NotImplementedError()
-		
+
 	@property
 	def objects_dir(self):
 		""":return: path to the repository's objects directory"""
 		raise NotImplementedError()
-		
+
 	@property
 	def working_dir(self):
-		""":return: working directory of the git process or related tools, being 
+		""":return: working directory of the git process or related tools, being
 		either the working_tree_dir if available or the git_path"""
 		raise NotImplementedError()
 
 	@property
 	def description(self):
-		""":return: description text associated with this repository or set the 
+		""":return: description text associated with this repository or set the
 		description."""
 		raise NotImplementedError()
-	
+
 	#} END interface
-		
-		
+
+
 class ConfigurationMixin(object):
 	"""Interface providing configuration handler instances, which provide locked access
 	to a single git-style configuration file (ini like format, using tabs as improve readablity).
-	
+
 	Configuration readers can be initialized with multiple files at once, whose information is concatenated
-	when reading. Lower-level files overwrite values from higher level files, i.e. a repository configuration file 
+	when reading. Lower-level files overwrite values from higher level files, i.e. a repository configuration file
 	overwrites information coming from a system configuration file
-	
+
 	:note: for the 'repository' config level, a git_path() compatible type is required"""
 	config_level = ("system", "global", "repository")
-		
+
 	#{ Interface
-	
+
 	def config_reader(self, config_level=None):
 		"""
 		:return:
 			GitConfigParser allowing to read the full git configuration, but not to write it
-			
-			The configuration will include values from the system, user and repository 
+
+			The configuration will include values from the system, user and repository
 			configuration files.
-			
+
 		:param config_level:
 			For possible values, see config_writer method
-			If None, all applicable levels will be used. Specify a level in case 
-			you know which exact file you whish to read to prevent reading multiple files for 
+			If None, all applicable levels will be used. Specify a level in case
+			you know which exact file you whish to read to prevent reading multiple files for
 			instance
-		:note: On windows, system configuration cannot currently be read as the path is 
+		:note: On windows, system configuration cannot currently be read as the path is
 			unknown, instead the global path will be used."""
 		raise NotImplementedError()
-		
+
 	def config_writer(self, config_level="repository"):
 		"""
 		:return:
 			GitConfigParser allowing to write values of the specified configuration file level.
-			Config writers should be retrieved, used to change the configuration ,and written 
+			Config writers should be retrieved, used to change the configuration ,and written
 			right away as they will lock the configuration file in question and prevent other's
 			to write it.
-			
+
 		:param config_level:
 			One of the following values
 			system = sytem wide configuration file
 			global = user level configuration file
 			repository = configuration file for this repostory only"""
 		raise NotImplementedError()
-	
-	
+
+
 	#} END interface
-	
-	
+
+
 class SubmoduleDB(object):
 	"""Interface providing access to git repository submodules.
 	The actual implementation is found in the Submodule object type, which is
 	currently only available in one implementation."""
-	
+
 	@property
 	def submodules(self):
 		"""
 		:return: git.IterableList(Submodule, ...) of direct submodules
 			available from the current head"""
 		raise NotImplementedError()
-		
+
 	def submodule(self, name):
-		""" :return: Submodule with the given name 
+		""" :return: Submodule with the given name
 		:raise ValueError: If no such submodule exists"""
 		raise NotImplementedError()
-		
+
 	def create_submodule(self, *args, **kwargs):
 		"""Create a new submodule
-		
-		:note: See the documentation of Submodule.add for a description of the 
+
+		:note: See the documentation of Submodule.add for a description of the
 			applicable parameters
 		:return: created submodules"""
 		raise NotImplementedError()
-		
+
 	def iter_submodules(self, *args, **kwargs):
 		"""An iterator yielding Submodule instances, see Traversable interface
 		for a description of args and kwargs
 		:return: Iterator"""
 		raise NotImplementedError()
-		
+
 	def submodule_update(self, *args, **kwargs):
-		"""Update the submodules, keeping the repository consistent as it will 
+		"""Update the submodules, keeping the repository consistent as it will
 		take the previous state into consideration. For more information, please
 		see the documentation of RootModule.update"""
 		raise NotImplementedError()
-		
-		
+
+
 class HighLevelRepository(object):
 	"""An interface combining several high-level repository functionality and properties"""
-	
+
 	@property
 	def daemon_export(self):
 		""":return: True if the repository may be published by the git-daemon"""
@@ -756,16 +756,16 @@ def is_dirty(self, index=True, working_tree=True, untracked_files=False):
 		"""
 		:return:
 			``True``, the repository is considered dirty. By default it will react
-			like a git-status without untracked files, hence it is dirty if the 
+			like a git-status without untracked files, hence it is dirty if the
 			index or the working copy have changes."""
 		raise NotImplementedError()
-		
+
 	@property
 	def untracked_files(self):
 		"""
 		:return:
 			list(str,...)
-			
+
 		:note:
 			ignored files will not appear here, i.e. files mentioned in .gitignore.
 			Bare repositories never have untracked files"""
@@ -777,23 +777,23 @@ def blame(self, rev, file):
 		:parm rev: revision specifier, see git-rev-parse for viable options.
 		:return:
 			list: [Commit, list: [<line>]]
-			A list of tuples associating a Commit object with a list of lines that 
+			A list of tuples associating a Commit object with a list of lines that
 			changed within the given commit. The Commit objects will be given in order
 			of appearance."""
 		raise NotImplementedError()
-		
+
 	@classmethod
 	def init(cls, path=None, mkdir=True):
 		"""Initialize a git repository at the given path if specified
 
 		:param path:
 			is the full path to the repo (traditionally ends with /<name>.git)
-			or None in which case the repository will be created in the current 
+			or None in which case the repository will be created in the current
 			working directory
 
 		:parm mkdir:
 			if specified will create the repository directory if it doesn't
-			already exists. Creates the directory with a mode=0755. 
+			already exists. Creates the directory with a mode=0755.
 			Only effective if a path is explicitly given
 
 		:return: Instance pointing to the newly created repository with similar capabilities
@@ -807,7 +807,7 @@ def clone(self, path, progress = None):
 
 		:param progress:
 			a RemoteProgress instance or None if no progress information is required
-		
+
 		:return: ``git.Repo`` (the newly cloned repo)"""
 		raise NotImplementedError()
 
@@ -828,9 +828,9 @@ def archive(self, ostream, treeish=None, prefix=None):
 		:parm prefix: is the optional prefix to prepend to each filename in the archive
 		:parm kwargs:
 			Additional arguments passed to git-archive
-			NOTE: Use the 'format' argument to define the kind of format. Use 
+			NOTE: Use the 'format' argument to define the kind of format. Use
 			specialized ostreams to write any format supported by python
 		:return: self"""
 		raise NotImplementedError()
-	
-	
+
+
diff --git a/git/db/py/base.py b/git/db/py/base.py
index d34e8b124..2100cc396 100644
--- a/git/db/py/base.py
+++ b/git/db/py/base.py
@@ -12,7 +12,7 @@
 		normpath,
 		abspath,
 		dirname,
-		LazyMixin, 
+		LazyMixin,
 		hex_to_bin,
 		bin_to_hex,
 		expandvars,
@@ -24,7 +24,7 @@
 from git.index import IndexFile
 from git.config import GitConfigParser
 from git.exc import 	(
-						BadObject, 
+						BadObject,
 						AmbiguousObjectName,
 						InvalidGitRepositoryError,
 						NoSuchPathError
@@ -37,28 +37,28 @@
 import os
 
 
-__all__ = (	'PureObjectDBR', 'PureObjectDBW', 'PureRootPathDB', 'PureCompoundDB', 
+__all__ = (	'PureObjectDBR', 'PureObjectDBW', 'PureRootPathDB', 'PureCompoundDB',
 			'PureConfigurationMixin', 'PureRepositoryPathsMixin', 'PureAlternatesFileMixin',
 			'PureIndexDB')
-                                                                        
+
 
 class PureObjectDBR(ObjectDBR):
-	
-	#{ Query Interface 
-		
+
+	#{ Query Interface
+
 	def has_object_async(self, reader):
 		task = ChannelThreadTask(reader, str(self.has_object_async), lambda sha: (sha, self.has_object(sha)))
-		return pool.add_task(task) 
-		
+		return pool.add_task(task)
+
 	def info_async(self, reader):
 		task = ChannelThreadTask(reader, str(self.info_async), self.info)
 		return pool.add_task(task)
-		
+
 	def stream_async(self, reader):
 		# base implementation just uses the stream method repeatedly
 		task = ChannelThreadTask(reader, str(self.stream_async), self.stream)
 		return pool.add_task(task)
-	
+
 	def partial_to_complete_sha_hex(self, partial_hexsha):
 		len_partial_hexsha = len(partial_hexsha)
 		if len_partial_hexsha % 2 != 0:
@@ -67,56 +67,56 @@ def partial_to_complete_sha_hex(self, partial_hexsha):
 			partial_binsha = hex_to_bin(partial_hexsha)
 		# END assure successful binary conversion
 		return self.partial_to_complete_sha(partial_binsha, len(partial_hexsha))
-	
+
 	#} END query interface
-	
-	
+
+
 class PureObjectDBW(ObjectDBW):
-	
+
 	def __init__(self, *args, **kwargs):
 		try:
 			super(PureObjectDBW, self).__init__(*args, **kwargs)
 		except TypeError:
 			pass
-		#END handle py 2.6 
+		#END handle py 2.6
 		self._ostream = None
-	
+
 	#{ Edit Interface
 	def set_ostream(self, stream):
 		cstream = self._ostream
 		self._ostream = stream
 		return cstream
-		
+
 	def ostream(self):
 		return self._ostream
-	
+
 	def store_async(self, reader):
-		task = ChannelThreadTask(reader, str(self.store_async), self.store) 
+		task = ChannelThreadTask(reader, str(self.store_async), self.store)
 		return pool.add_task(task)
-	
+
 	#} END edit interface
-	
+
 
 class PureRootPathDB(RootPathDB):
-	
+
 	def __init__(self, root_path):
 		self._root_path = root_path
 		super(PureRootPathDB, self).__init__(root_path)
-		
-		
-	#{ Interface 
+
+
+	#{ Interface
 	def root_path(self):
 		return self._root_path
-	
+
 	def db_path(self, rela_path=None):
 		if not rela_path:
 			return self._root_path
 		return join(self._root_path, rela_path)
 	#} END interface
-		
+
 
 def _databases_recursive(database, output):
-	"""Fill output list with database from db, in order. Deals with Loose, Packed 
+	"""Fill output list with database from db, in order. Deals with Loose, Packed
 	and compound databases."""
 	if isinstance(database, CompoundDB):
 		compounds = list()
@@ -127,7 +127,7 @@ def _databases_recursive(database, output):
 	else:
 		output.append(database)
 	# END handle database type
-	
+
 
 class PureCompoundDB(CompoundDB, PureObjectDBR, LazyMixin, CachingDB):
 	def _set_cache_(self, attr):
@@ -135,16 +135,16 @@ def _set_cache_(self, attr):
 			self._dbs = list()
 		else:
 			super(PureCompoundDB, self)._set_cache_(attr)
-	
-	#{ PureObjectDBR interface 
-	
+
+	#{ PureObjectDBR interface
+
 	def has_object(self, sha):
 		for db in self._dbs:
 			if db.has_object(sha):
 				return True
 		#END for each db
 		return False
-		
+
 	def info(self, sha):
 		for db in self._dbs:
 			try:
@@ -152,7 +152,7 @@ def info(self, sha):
 			except BadObject:
 				pass
 		#END for each db
-		
+
 	def stream(self, sha):
 		for db in self._dbs:
 			try:
@@ -163,14 +163,14 @@ def stream(self, sha):
 
 	def size(self):
 		return reduce(lambda x,y: x+y, (db.size() for db in self._dbs), 0)
-		
+
 	def sha_iter(self):
 		return chain(*(db.sha_iter() for db in self._dbs))
-		
+
 	#} END object DBR Interface
-	
+
 	#{ Interface
-	
+
 	def databases(self):
 		return tuple(self._dbs)
 
@@ -183,15 +183,15 @@ def update_cache(self, force=False):
 			# END if is caching db
 		# END for each database to update
 		return stat
-		
+
 	def partial_to_complete_sha_hex(self, partial_hexsha):
 		len_partial_hexsha = len(partial_hexsha)
 		if len_partial_hexsha % 2 != 0:
 			partial_binsha = hex_to_bin(partial_hexsha + "0")
 		else:
 			partial_binsha = hex_to_bin(partial_hexsha)
-		# END assure successful binary conversion 
-		
+		# END assure successful binary conversion
+
 		candidate = None
 		for db in self._dbs:
 			full_bin_sha = None
@@ -213,34 +213,34 @@ def partial_to_complete_sha_hex(self, partial_hexsha):
 		if not candidate:
 			raise BadObject(partial_binsha)
 		return candidate
-		
+
 	def partial_to_complete_sha(self, partial_binsha, hex_len):
 		"""Simple adaptor to feed into our implementation"""
 		return self.partial_to_complete_sha_hex(bin_to_hex(partial_binsha)[:hex_len])
 	#} END interface
-	
-		
+
+
 class PureRepositoryPathsMixin(RepositoryPathsMixin):
 	# slots has no effect here, its just to keep track of used attrs
 	__slots__  = ("_git_path", '_bare', '_working_tree_dir')
-	
-	#{ Configuration 
+
+	#{ Configuration
 	repo_dir = '.git'
 	objs_dir = 'objects'
 	#} END configuration
-	
+
 	#{ Subclass Interface
 	def _initialize(self, path):
 		epath = abspath(expandvars(expanduser(path or os.getcwd())))
 
 		if not exists(epath):
 			raise NoSuchPathError(epath)
-		#END check file 
+		#END check file
 
 		self._working_tree_dir = None
 		self._git_path = None
 		curpath = epath
-		
+
 		# walk up the path to find the .git dir
 		while curpath:
 			if is_git_dir(curpath):
@@ -256,7 +256,7 @@ def _initialize(self, path):
 			if not dummy:
 				break
 		# END while curpath
-		
+
 		if self._git_path is None:
 			raise InvalidGitRepositoryError(epath)
 		# END path not found
@@ -264,55 +264,55 @@ def _initialize(self, path):
 		self._bare = self._working_tree_dir is None
 		if hasattr(self, 'config_reader'):
 			try:
-				self._bare = self.config_reader("repository").getboolean('core','bare') 
+				self._bare = self.config_reader("repository").getboolean('core','bare')
 			except Exception:
 				# lets not assume the option exists, although it should
 				pass
 			#END handle exception
 		#END check bare flag
 		self._working_tree_dir = self._bare and None or self._working_tree_dir
-		
+
 	#} end subclass interface
-	
+
 	#{ Object Interface
-	
+
 	def __eq__(self, rhs):
 		if hasattr(rhs, 'git_dir'):
 			return self.git_dir == rhs.git_dir
 		return False
-		
+
 	def __ne__(self, rhs):
 		return not self.__eq__(rhs)
-		
+
 	def __hash__(self):
 		return hash(self.git_dir)
 
 	def __repr__(self):
 		return "%s(%r)" % (type(self).__name__, self.git_dir)
-	
+
 	#} END object interface
-	
+
 	#{ Interface
-	
+
 	@property
 	def is_bare(self):
 		return self._bare
-		
+
 	@property
 	def git_dir(self):
 		return self._git_path
-		
+
 	@property
 	def working_tree_dir(self):
 		if self._working_tree_dir is None:
 			raise AssertionError("Repository at %s is bare and does not have a working tree directory" % self.git_dir)
 		#END assertion
 		return dirname(self.git_dir)
-	
+
 	@property
 	def objects_dir(self):
 		return join(self.git_dir, self.objs_dir)
-	
+
 	@property
 	def working_dir(self):
 		if self.is_bare:
@@ -320,35 +320,35 @@ def working_dir(self):
 		else:
 			return self.working_tree_dir
 		#END handle bare state
-		
+
 	def _mk_description():
 		def _get_description(self):
 			filename = join(self.git_dir, 'description')
 			return file(filename).read().rstrip()
-	
+
 		def _set_description(self, descr):
 			filename = join(self.git_dir, 'description')
 			file(filename, 'w').write(descr+'\n')
-			
+
 		return property(_get_description, _set_description, "Descriptive text for the content of the repository")
 
 	description = _mk_description()
 	del(_mk_description)
-	
+
 	#} END interface
-		
-		
+
+
 class PureConfigurationMixin(ConfigurationMixin):
-	
+
 	#{ Configuration
 	system_config_file_name = "gitconfig"
 	repo_config_file_name = "config"
 	#} END
-	
+
 	def __new__(cls, *args, **kwargs):
 		"""This is just a stupid workaround for the evil py2.6 change which makes mixins quite impossible"""
 		return super(PureConfigurationMixin, cls).__new__(cls, *args, **kwargs)
-	
+
 	def __init__(self, *args, **kwargs):
 		"""Verify prereqs"""
 		try:
@@ -357,14 +357,14 @@ def __init__(self, *args, **kwargs):
 			pass
 		#END handle code-breaking change in python 2.6
 		assert hasattr(self, 'git_dir')
-	
+
 	def _path_at_level(self, level ):
-		# we do not support an absolute path of the gitconfig on windows , 
+		# we do not support an absolute path of the gitconfig on windows ,
 		# use the global config instead
 		if sys.platform == "win32" and level == "system":
 			level = "global"
 		#END handle windows
-			
+
 		if level == "system":
 			return "/etc/%s" % self.system_config_file_name
 		elif level == "global":
@@ -372,11 +372,11 @@ def _path_at_level(self, level ):
 		elif level == "repository":
 			return join(self.git_dir, self.repo_config_file_name)
 		#END handle level
-		
+
 		raise ValueError("Invalid configuration level: %r" % level)
-		
+
 	#{ Interface
-	
+
 	def config_reader(self, config_level=None):
 		files = None
 		if config_level is None:
@@ -385,36 +385,36 @@ def config_reader(self, config_level=None):
 			files = [ self._path_at_level(config_level) ]
 		#END handle level
 		return GitConfigParser(files, read_only=True)
-		
+
 	def config_writer(self, config_level="repository"):
 		return GitConfigParser(self._path_at_level(config_level), read_only=False)
-	
-	
+
+
 	#} END interface
-	
-	
+
+
 class PureIndexDB(IndexDB):
 	#{ Configuration
 	IndexCls = IndexFile
 	#} END configuration
-	
+
 	@property
 	def index(self):
 		return self.IndexCls(self)
-	
-	
+
+
 class PureAlternatesFileMixin(object):
 	"""Utility able to read and write an alternates file through the alternates property
 	It needs to be part of a type with the git_dir or db_path property.
-	
+
 	The file by default is assumed to be located at the default location as imposed
 	by the standard git repository layout"""
-	
+
 	#{ Configuration
 	alternates_filepath = os.path.join('info', 'alternates')	# relative path to alternates file
-	
+
 	#} END configuration
-	
+
 	def __init__(self, *args, **kwargs):
 		try:
 			super(PureAlternatesFileMixin, self).__init__(*args, **kwargs)
@@ -422,9 +422,9 @@ def __init__(self, *args, **kwargs):
 			pass
 		#END handle py2.6 code breaking changes
 		self._alternates_path()	# throws on incompatible type
-	
-	#{ Interface 
-	
+
+	#{ Interface
+
 	def _alternates_path(self):
 		if hasattr(self, 'git_dir'):
 			return join(self.git_dir, 'objects', self.alternates_filepath)
@@ -433,7 +433,7 @@ def _alternates_path(self):
 		else:
 			raise AssertionError("This mixin requires a parent type with either the git_dir property or db_path method")
 		#END handle path
-	
+
 	def _get_alternates(self):
 		"""The list of alternates for this repo from which objects can be retrieved
 
@@ -455,14 +455,14 @@ def _set_alternates(self, alts):
 		"""Sets the alternates
 
 		:parm alts:
-			is the array of string paths representing the alternates at which 
+			is the array of string paths representing the alternates at which
 			git should look for objects, i.e. /home/user/repo/.git/objects
 
 		:raise NoSuchPathError:
 		:note:
 			The method does not check for the existance of the paths in alts
 			as the caller is responsible."""
-		alternates_path = self._alternates_path() 
+		alternates_path = self._alternates_path()
 		if not alts:
 			if isfile(alternates_path):
 				os.remove(alternates_path)
@@ -472,10 +472,10 @@ def _set_alternates(self, alts):
 				f.write("\n".join(alts))
 			finally:
 				f.close()
-			# END file handling 
+			# END file handling
 		# END alts handling
 
 	alternates = property(_get_alternates, _set_alternates, doc="Retrieve a list of alternates paths or set a list paths to be used as alternates")
-	
+
 	#} END interface
-	
+
diff --git a/git/remote.py b/git/remote.py
index 47adedbf7..70b48e43e 100644
--- a/git/remote.py
+++ b/git/remote.py
@@ -25,31 +25,30 @@
 class PushInfo(object):
 	"""Wrapper for basic PushInfo to provide the previous interface which includes
 	resolved objects instead of plain shas
-	
+
 	old_commit	# object for the corresponding old_commit_sha"""
-	
-	
-	
+
+
 class FetchInfo(object):
-	"""Wrapper to restore the previous interface, resolving objects and wrapping 
+	"""Wrapper to restore the previous interface, resolving objects and wrapping
 	references"""
 
 
 class Remote(LazyMixin, Iterable):
 	"""Provides easy read and write access to a git remote.
-	
-	Everything not part of this interface is considered an option for the current 
+
+	Everything not part of this interface is considered an option for the current
 	remote, allowing constructs like remote.pushurl to query the pushurl.
-	
+
 	NOTE: When querying configuration, the configuration accessor will be cached
 	to speed up subsequent accesses."""
-	
+
 	__slots__ = ( "repo", "name", "_config_reader" )
 	_id_attribute_ = "name"
-	
+
 	def __init__(self, repo, name):
 		"""Initialize a remote instance
-		
+
 		:param repo: The repository we are a remote of
 		:param name: the name of the remote, i.e. 'origin'"""
 		if not hasattr(repo, 'git'):
@@ -57,30 +56,30 @@ def __init__(self, repo, name):
 			# but lets just be lazy for now
 			raise AssertionError("Require repository to provide a git command instance currently")
 		#END assert git cmd
-		
+
 		if not isinstance(repo, TransportDB):
 			raise AssertionError("Require TransportDB interface implementation")
 		#END verify interface
-		
+
 		self.repo = repo
 		self.name = name
-		
+
 		if os.name == 'nt':
 			# some oddity: on windows, python 2.5, it for some reason does not realize
 			# that it has the config_writer property, but instead calls __getattr__
 			# which will not yield the expected results. 'pinging' the members
-			# with a dir call creates the config_writer property that we require 
+			# with a dir call creates the config_writer property that we require
 			# ... bugs like these make me wonder wheter python really wants to be used
 			# for production. It doesn't happen on linux though.
 			dir(self)
 		# END windows special handling
-		
+
 	def __getattr__(self, attr):
-		"""Allows to call this instance like 
+		"""Allows to call this instance like
 		remote.special( *args, **kwargs) to call git-remote special self.name"""
 		if attr == "_config_reader":
 			return super(Remote, self).__getattr__(attr)
-		
+
 		# sometimes, probably due to a bug in python itself, we are being called
 		# even though a slot of the same name exists
 		try:
@@ -88,32 +87,32 @@ def __getattr__(self, attr):
 		except NoOptionError:
 			return super(Remote, self).__getattr__(attr)
 		# END handle exception
-	
+
 	def _config_section_name(self):
 		return 'remote "%s"' % self.name
-	
+
 	def _set_cache_(self, attr):
 		if attr == "_config_reader":
 			self._config_reader = SectionConstraint(self.repo.config_reader(), self._config_section_name())
 		else:
 			super(Remote, self)._set_cache_(attr)
-			
-	
+
+
 	def __str__(self):
-		return self.name 
-	
+		return self.name
+
 	def __repr__(self):
 		return '<git.%s "%s">' % (self.__class__.__name__, self.name)
-		
+
 	def __eq__(self, other):
 		return self.name == other.name
-		
+
 	def __ne__(self, other):
 		return not ( self == other )
-		
+
 	def __hash__(self):
 		return hash(self.name)
-	
+
 	@classmethod
 	def iter_items(cls, repo):
 		""":return: Iterator yielding Remote objects of the given repository"""
@@ -126,41 +125,41 @@ def iter_items(cls, repo):
 				raise ValueError("Remote-Section has invalid format: %r" % section)
 			yield Remote(repo, section[lbound+1:rbound])
 		# END for each configuration section
-		
+
 	@property
 	def refs(self):
 		"""
 		:return:
-			IterableList of RemoteReference objects. It is prefixed, allowing 
+			IterableList of RemoteReference objects. It is prefixed, allowing
 			you to omit the remote path portion, i.e.::
 			 remote.refs.master # yields RemoteReference('/refs/remotes/origin/master')"""
 		out_refs = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
 		out_refs.extend(RemoteReference.list_items(self.repo, remote=self.name))
 		assert out_refs, "Remote %s did not have any references" % self.name
 		return out_refs
-		
+
 	@property
 	def stale_refs(self):
 		"""
 		:return:
-			IterableList RemoteReference objects that do not have a corresponding 
-			head in the remote reference anymore as they have been deleted on the 
+			IterableList RemoteReference objects that do not have a corresponding
+			head in the remote reference anymore as they have been deleted on the
 			remote side, but are still available locally.
-			
+
 			The IterableList is prefixed, hence the 'origin' must be omitted. See
 			'refs' property for an example."""
 		out_refs = IterableList(RemoteReference._id_attribute_, "%s/" % self.name)
 		for line in self.repo.git.remote("prune", "--dry-run", self).splitlines()[2:]:
-			# expecting 
+			# expecting
 			# * [would prune] origin/new_branch
-			token = " * [would prune] " 
+			token = " * [would prune] "
 			if not line.startswith(token):
 				raise ValueError("Could not parse git-remote prune result: %r" % line)
 			fqhn = "%s/%s" % (RemoteReference._common_path_default,line.replace(token, ""))
 			out_refs.append(RemoteReference(self.repo, fqhn))
-		# END for each line 
+		# END for each line
 		return out_refs
-	
+
 	@classmethod
 	def create(cls, repo, name, url, **kwargs):
 		"""Create a new remote to the given repository
@@ -169,30 +168,30 @@ def create(cls, repo, name, url, **kwargs):
 		:param url: URL which corresponds to the remote's name
 		:param kwargs:
 			Additional arguments to be passed to the git-remote add command
-			
+
 		:return: New Remote instance
-			
+
 		:raise GitCommandError: in case an origin with that name already exists"""
 		repo.git.remote( "add", name, url, **kwargs )
 		return cls(repo, name)
-	
+
 	# add is an alias
 	add = create
-	
+
 	@classmethod
 	def remove(cls, repo, name ):
 		"""Remove the remote with the given name"""
 		repo.git.remote("rm", name)
-		
+
 	# alias
 	rm = remove
-		
+
 	def rename(self, new_name):
 		"""Rename self to the given new_name
 		:return: self """
 		if self.name == new_name:
 			return self
-		
+
 		self.repo.git.remote("rename", self.name, new_name)
 		self.name = new_name
 		try:
@@ -201,74 +200,74 @@ def rename(self, new_name):
 			pass
 		#END handle exception
 		return self
-		
+
 	def update(self, **kwargs):
-		"""Fetch all changes for this remote, including new branches which will 
+		"""Fetch all changes for this remote, including new branches which will
 		be forced in ( in case your local remote branch is not part the new remote branches
 		ancestry anymore ).
-		
+
 		:param kwargs:
 			Additional arguments passed to git-remote update
-		
+
 		:return: self """
 		self.repo.git.remote("update", self.name)
 		return self
-	
+
 	def fetch(self, refspec=None, progress=None, **kwargs):
 		"""Fetch the latest changes for this remote
-		
+
 		:param refspec:
-			A "refspec" is used by fetch and push to describe the mapping 
-			between remote ref and local ref. They are combined with a colon in 
-			the format <src>:<dst>, preceded by an optional plus sign, +. 
-			For example: git fetch $URL refs/heads/master:refs/heads/origin means 
-			"grab the master branch head from the $URL and store it as my origin 
-			branch head". And git push $URL refs/heads/master:refs/heads/to-upstream 
-			means "publish my master branch head as to-upstream branch at $URL". 
+			A "refspec" is used by fetch and push to describe the mapping
+			between remote ref and local ref. They are combined with a colon in
+			the format <src>:<dst>, preceded by an optional plus sign, +.
+			For example: git fetch $URL refs/heads/master:refs/heads/origin means
+			"grab the master branch head from the $URL and store it as my origin
+			branch head". And git push $URL refs/heads/master:refs/heads/to-upstream
+			means "publish my master branch head as to-upstream branch at $URL".
 			See also git-push(1).
-			
+
 			Taken from the git manual
 		:param progress: See 'push' method
 		:param kwargs: Additional arguments to be passed to git-fetch
 		:return:
-			IterableList(FetchInfo, ...) list of FetchInfo instances providing detailed 
+			IterableList(FetchInfo, ...) list of FetchInfo instances providing detailed
 			information about the fetch results
-			
+
 		:note:
-			As fetch does not provide progress information to non-ttys, we cannot make 
+			As fetch does not provide progress information to non-ttys, we cannot make
 			it available here unfortunately as in the 'push' method."""
 		return self.repo.fetch(self.name, refspec, progress, **kwargs)
-		
+
 	def pull(self, refspec=None, progress=None, **kwargs):
-		"""Pull changes from the given branch, being the same as a fetch followed 
+		"""Pull changes from the given branch, being the same as a fetch followed
 		by a merge of branch with your local branch.
-		
+
 		:param refspec: see 'fetch' method
 		:param progress: see 'push' method
 		:param kwargs: Additional arguments to be passed to git-pull
 		:return: Please see 'fetch' method """
 		return self.repo.pull(self.name, refspec, progress, **kwargs)
-		
+
 	def push(self, refspec=None, progress=None, **kwargs):
 		"""Push changes from source branch in refspec to target branch in refspec.
-		
+
 		:param refspec: see 'fetch' method
 		:param progress:
-			Instance of type RemoteProgress allowing the caller to receive 
+			Instance of type RemoteProgress allowing the caller to receive
 			progress information until the method returns.
 			If None, progress information will be discarded
-		
+
 		:param kwargs: Additional arguments to be passed to git-push
 		:return:
-			IterableList(PushInfo, ...) iterable list of PushInfo instances, each 
-			one informing about an individual head which had been updated on the remote 
+			IterableList(PushInfo, ...) iterable list of PushInfo instances, each
+			one informing about an individual head which had been updated on the remote
 			side.
 			If the push contains rejected heads, these will have the PushInfo.ERROR bit set
 			in their flags.
 			If the operation fails completely, the length of the returned IterableList will
 			be null."""
 		return self.repo.push(self.name, refspec, progress, **kwargs)
-		
+
 	@property
 	def config_reader(self):
 		"""
@@ -276,20 +275,20 @@ def config_reader(self):
 			GitConfigParser compatible object able to read options for only our remote.
 			Hence you may simple type config.get("pushurl") to obtain the information"""
 		return self._config_reader
-	
+
 	@property
 	def config_writer(self):
 		"""
 		:return: GitConfigParser compatible object able to write options for this remote.
 		:note:
-			You can only own one writer at a time - delete it to release the 
+			You can only own one writer at a time - delete it to release the
 			configuration file and make it useable by others.
-			
-			To assure consistent results, you should only query options through the 
-			writer. Once you are done writing, you are free to use the config reader 
+
+			To assure consistent results, you should only query options through the
+			writer. Once you are done writing, you are free to use the config reader
 			once again."""
 		writer = self.repo.config_writer()
-		
+
 		# clear our cache to assure we re-read the possibly changed configuration
 		try:
 			del(self._config_reader)
diff --git a/git/repo.py b/git/repo.py
index 8d5c4021e..b954fe60d 100644
--- a/git/repo.py
+++ b/git/repo.py
@@ -14,20 +14,20 @@
 
 
 class Repo(CmdCompatibilityGitDB):
-	"""Represents a git repository and allows you to query references, 
+	"""Represents a git repository and allows you to query references,
 	gather commit information, generate diffs, create and clone repositories query
 	the log.
-	
+
 	The following attributes are worth using:
-	
-	'working_dir' is the working directory of the git command, wich is the working tree 
+
+	'working_dir' is the working directory of the git command, wich is the working tree
 	directory if available or the .git directory in case of bare repositories
-	
+
 	'working_tree_dir' is the working tree directory, but will raise AssertionError
 	if we are a bare repository.
-	
+
 	'git_dir' is the .git repository directoy, which is always set."""
-	
+
 	def __init__(self, path=None, odbt = None):
 		"""Create a new Repo instance