Skip to content


Subversion checkout URL

You can clone with
Download ZIP
196 lines (155 sloc) 5.58 KB
from __future__ import unicode_literals
import os
from io import BytesIO, StringIO, UnsupportedOperation
from django.core.files.utils import FileProxyMixin
from django.utils import six
from django.utils.encoding import (
force_bytes, force_str, python_2_unicode_compatible, smart_text,
class File(FileProxyMixin):
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__(self, file, name=None):
self.file = file
if name is None:
name = getattr(file, 'name', None) = name
if hasattr(file, 'mode'):
self.mode = file.mode
def __str__(self):
return smart_text( or '')
def __repr__(self):
return force_str("<%s: %s>" % (self.__class__.__name__, self or "None"))
def __bool__(self):
return bool(
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def __len__(self):
return self.size
def _get_size_from_underlying_file(self):
if hasattr(self.file, 'size'):
return self.file.size
if hasattr(self.file, 'name'):
return os.path.getsize(
except (OSError, TypeError):
if hasattr(self.file, 'tell') and hasattr(self.file, 'seek'):
pos = self.file.tell(), os.SEEK_END)
size = self.file.tell()
return size
raise AttributeError("Unable to determine the file's size.")
def _get_size(self):
if hasattr(self, '_size'):
return self._size
self._size = self._get_size_from_underlying_file()
return self._size
def _set_size(self, size):
self._size = size
size = property(_get_size, _set_size)
def _get_closed(self):
return not self.file or self.file.closed
closed = property(_get_closed)
def chunks(self, chunk_size=None):
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
except (AttributeError, UnsupportedOperation):
while True:
data =
if not data:
yield data
def multiple_chunks(self, chunk_size=None):
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
def __iter__(self):
# Iterate over this file-like object by newlines
buffer_ = None
for chunk in self.chunks():
for line in chunk.splitlines(True):
if buffer_:
if endswith_cr(buffer_) and not equals_lf(line):
# Line split after a \r newline; yield buffer_.
yield buffer_
# Continue with line.
# Line either split without a newline (line
# continues after buffer_) or with \r\n
# newline (line == b'\n').
line = buffer_ + line
# buffer_ handled, clear it.
buffer_ = None
# If this is the end of a \n or \r\n line, yield.
if endswith_lf(line):
yield line
buffer_ = line
if buffer_ is not None:
yield buffer_
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
def open(self, mode=None):
if not self.closed:
elif and os.path.exists(
self.file = open(, mode or self.mode)
raise ValueError("The file cannot be reopened.")
def close(self):
class ContentFile(File):
A File-like object that takes just raw content, rather than an actual file.
def __init__(self, content, name=None):
if six.PY3:
stream_class = StringIO if isinstance(content, six.text_type) else BytesIO
stream_class = BytesIO
content = force_bytes(content)
super(ContentFile, self).__init__(stream_class(content), name=name)
self.size = len(content)
def __str__(self):
return 'Raw content'
def __bool__(self):
return True
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def open(self, mode=None):
def close(self):
def endswith_cr(line):
Return True if line (a text or byte string) ends with '\r'.
return line.endswith('\r' if isinstance(line, six.text_type) else b'\r')
def endswith_lf(line):
Return True if line (a text or byte string) ends with '\n'.
return line.endswith('\n' if isinstance(line, six.text_type) else b'\n')
def equals_lf(line):
Return True if line (a text or byte string) equals '\n'.
return line == ('\n' if isinstance(line, six.text_type) else b'\n')
Jump to Line
Something went wrong with that request. Please try again.