Skip to content
This repository has been archived by the owner on Mar 25, 2024. It is now read-only.

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
budurli committed May 28, 2012
0 parents commit 4f5b8fd
Show file tree
Hide file tree
Showing 10 changed files with 408 additions and 0 deletions.
Empty file added LICENSE.txt
Empty file.
9 changes: 9 additions & 0 deletions MANIFEST
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# file GENERATED by distutils, do NOT edit
README.txt
setup.py
django-google-storage/__init__.py
django-google-storage/exceptions.py
django-google-storage/file.py
django-google-storage/format.py
django-google-storage/storage.py
django-google-storage/utils.py
25 changes: 25 additions & 0 deletions README.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
=====================
django-google-storage
=====================

It' just a compilation of django-storages and boto to improve you
abilities to use Google Storage. It's easy and fast.

Install
=========

Steps

* pip install django-google-storage

* add 'django-google-storage' to your INSTALLED_APPS

* put 'django-google-storage.storage.GoogleStorage' in DEFAULT_FILE_STORAGE.
It's in settings.py too.

* add GS_ACCESS_KEY_ID, GS_SECRET_ACCESS_KEY and GS_STORAGE_BUCKET_NAME
to your settings.py

* ....

* PROFIT
Empty file.
Empty file.
47 changes: 47 additions & 0 deletions django-google-storage/file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from django.core.files.base import File

try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO



class GSBotoStorageFile(File):
def __init__(self, name, mode, storage):
self._storage = storage
self.name = name[len(self._storage.location):].lstrip('/')
self._mode = mode
self.key = storage.bucket.get_key(self._storage._encode_name(name))
self._is_dirty = False
self._file = None

@property
def size(self):
return self.key.size

@property
def file(self):
if self._file is None:
self._file = StringIO()
if 'r' in self._mode:
self._is_dirty = False
self.key.get_contents_to_file(self._file)
self._file.seek(0)
return self._file

def read(self, *args, **kwargs):
if 'r' not in self._mode:
raise AttributeError("File was not opened in read mode.")
return super(GSBotoStorageFile, self).read(*args, **kwargs)

def write(self, *args, **kwargs):
if 'w' not in self._mode:
raise AttributeError("File was opened for read-only access.")
self._is_dirty = True
return super(GSBotoStorageFile, self).write(*args, **kwargs)

def close(self):
if self._is_dirty:
self.key.set_contents_from_file(self._file, headers=self._storage.headers, policy=self._storage.acl)
self.key.close()
57 changes: 57 additions & 0 deletions django-google-storage/format.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import urllib

import boto

from boto.exception import BotoClientError


def check_lowercase_bucketname(n):
if not (n + 'a').islower():
raise BotoClientError("Bucket names cannot contain upper-case " \
"characters when using either the sub-domain or virtual " \
"hosting calling format.")
return True


def assert_case_insensitive(f):
def wrapper(*args, **kwargs):
if len(args) == 3 and check_lowercase_bucketname(args[2]):
pass
return f(*args, **kwargs)
return wrapper


class _CallingFormat(object):

def get_bucket_server(self, server, bucket):
return ''

def build_url_base(self, connection, protocol, server, bucket, key=''):
url_base = '%s://' % protocol
url_base += self.build_host(server, bucket)
url_base += connection.get_path(self.build_path_base(bucket, key))
return url_base

def build_host(self, server, bucket):
if bucket == '':
return server
else:
return self.get_bucket_server(server, bucket)

def build_auth_path(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
path = ''
if bucket != '':
path = '/' + bucket
return path + '/%s' % urllib.quote(key)

def build_path_base(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
return '/%s' % urllib.quote(key)


class SubdomainCallingFormat(_CallingFormat):

@assert_case_insensitive
def get_bucket_server(self, server, bucket):
return '%s.%s' % (bucket, server)
227 changes: 227 additions & 0 deletions django-google-storage/storage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
import os

import mimetypes

from django.conf import settings
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.utils.encoding import force_unicode, smart_str

from .format import SubdomainCallingFormat
from .file import GSBotoStorageFile
from .utils import safe_join

try:
from boto.gs.connection import GSConnection
from boto.gs.key import Key
except ImportError:
raise ImproperlyConfigured("Could not load Google Storage bindings.\n"
"See http://code.google.com/p/boto/")


ACCESS_KEY_NAME = getattr(settings, 'GS_ACCESS_KEY_ID', None)
SECRET_KEY_NAME = getattr(settings, 'GS_SECRET_ACCESS_KEY', None)
HEADERS = getattr(settings, 'GS_HEADERS', {})
STORAGE_BUCKET_NAME = getattr(settings, 'GS_STORAGE_BUCKET_NAME', None)
AUTO_CREATE_BUCKET = getattr(settings, 'GS_AUTO_CREATE_BUCKET', False)
DEFAULT_ACL = getattr(settings, 'GS_DEFAULT_ACL', 'public-read')
BUCKET_ACL = getattr(settings, 'GS_BUCKET_ACL', DEFAULT_ACL)
QUERYSTRING_AUTH = getattr(settings, 'GS_QUERYSTRING_AUTH', True)
QUERYSTRING_EXPIRE = getattr(settings, 'GS_QUERYSTRING_EXPIRE', 3600)
REDUCED_REDUNDANCY = getattr(settings, 'GS_REDUCED_REDUNDANCY', False)
LOCATION = getattr(settings, 'GS_LOCATION', '')
CUSTOM_DOMAIN = getattr(settings, 'GS_CUSTOM_DOMAIN', None)
CALLING_FORMAT = getattr(settings, 'GS_CALLING_FORMAT', SubdomainCallingFormat())
SECURE_URLS = getattr(settings, 'GS_SECURE_URLS', True)
FILE_NAME_CHARSET = getattr(settings, 'GS_FILE_NAME_CHARSET', 'utf-8')
FILE_OVERWRITE = getattr(settings, 'GS_FILE_OVERWRITE', True)
IS_GZIPPED = getattr(settings, 'GS_IS_GZIPPED', False)
PRELOAD_METADATA = getattr(settings, 'GS_PRELOAD_METADATA', False)
GZIP_CONTENT_TYPES = getattr(settings, 'GZIP_CONTENT_TYPES', (
'text/css',
'application/javascript',
'application/x-javascript'
))


class GoogleStorage(Storage):
def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
reduced_redundancy=REDUCED_REDUNDANCY,
custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

self.bucket_acl = bucket_acl
self.bucket_name = bucket
self.acl = acl
self.headers = headers
self.preload_metadata = preload_metadata
self.gzip = gzip
self.gzip_content_types = gzip_content_types
self.querystring_auth = querystring_auth
self.querystring_expire = querystring_expire
self.reduced_redundancy = reduced_redundancy
self.custom_domain = custom_domain
self.secure_urls = secure_urls
self.location = location or ''
self.location = self.location.lstrip('/')
self.file_name_charset = file_name_charset

if not access_key and not secret_key:
print u'where are no secret keys.'
access_key, secret_key = self._get_access_keys()

self.connection = GSConnection(access_key, secret_key)

@property
def bucket(self):
if not hasattr(self, '_bucket'):
self._bucket = self._get_or_create_bucket(self.bucket_name)
return self._bucket

@property
def entries(self):
if self.preload_metadata and not self._entries:
self._entries = dict((self._decode_name(entry.key), entry)
for entry in self.bucket.list())
return self._entries

def _get_access_keys(self):
print u'trying to get them.'
access_key = ACCESS_KEY_NAME
secret_key = SECRET_KEY_NAME
print u' here they are: %s, %s' % (access_key, secret_key)
if (access_key or secret_key) and (not access_key or not secret_key):
access_key = os.environ.get(ACCESS_KEY_NAME)
secret_key = os.environ.get(SECRET_KEY_NAME)

if access_key and secret_key:
# Both were provided, so use them
return access_key, secret_key

return None, None

def _get_or_create_bucket(self, name):
"""Retrieves a bucket if it exists, otherwise creates it."""
try:
return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
except Exception, e:
if AUTO_CREATE_BUCKET:
bucket = self.connection.create_bucket(name)
bucket.set_acl(self.bucket_acl)
return bucket
raise ImproperlyConfigured("%s" % str(e))

def _clean_name(self, name):
# Useful for windows' paths
return os.path.normpath(name).replace('\\', '/')

def _normalize_name(self, name):
try:
return safe_join(self.location, name).lstrip('/')
except ValueError:
raise SuspiciousOperation("Attempted access to '%s' denied." % name)

def _encode_name(self, name):
return smart_str(name, encoding=self.file_name_charset)

def _decode_name(self, name):
return force_unicode(name, encoding=self.file_name_charset)

def _open(self, name, mode='rb'):
name = self._normalize_name(self._clean_name(name))
f = GSBotoStorageFile(name, mode, self)
if not f.key:
raise IOError('File does not exist: %s' % name)
return f

def _save(self, name, content):
cleaned_name = self._clean_name(name)
name = self._normalize_name(cleaned_name)
headers = self.headers.copy()
content_type = getattr(content, 'content_type', mimetypes.guess_type(name)[0] or Key.DefaultContentType)

content.name = cleaned_name
k = self.bucket.get_key(self._encode_name(name))
if not k:
k = self.bucket.new_key(self._encode_name(name))

k.set_metadata('Content-Type', content_type)
k.set_contents_from_file(content, headers=headers, policy=self.acl)
#reduced_redundancy=self.reduced_redundancy)
return cleaned_name

def delete(self, name):
name = self._normalize_name(self._clean_name(name))
self.bucket.delete_key(self._encode_name(name))

def exists(self, name):
name = self._normalize_name(self._clean_name(name))
if self.entries:
return name in self.entries
k = self.bucket.new_key(self._encode_name(name))
return k.exists()

def listdir(self, name):
name = self._normalize_name(self._clean_name(name))
dirlist = self.bucket.list(self._encode_name(name))
files = []
dirs = set()
base_parts = name.split("/") if name else []
for item in dirlist:
parts = item.name.split("/")
parts = parts[len(base_parts):]
if len(parts) == 1:
# File
files.append(parts[0])
elif len(parts) > 1:
# Directory
dirs.add(parts[0])
return list(dirs), files

def size(self, name):
name = self._normalize_name(self._clean_name(name))
if self.entries:
entry = self.entries.get(name)
if entry:
return entry.size
return 0
return self.bucket.get_key(self._encode_name(name)).size

def modified_time(self, name):
try:
from dateutil import parser, tz
except ImportError:
raise NotImplementedError()
name = self._normalize_name(self._clean_name(name))
entry = self.entries.get(name)
# only call self.bucket.get_key() if the key is not found
# in the preloaded metadata.
if entry is None:
entry = self.bucket.get_key(self._encode_name(name))
# convert to string to date
last_modified_date = parser.parse(entry.last_modified)
# if the date has no timzone, assume UTC
if last_modified_date.tzinfo == None:
last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc())
# convert date to local time w/o timezone
return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None)

def url(self, name):
name = self._normalize_name(self._clean_name(name))
if self.custom_domain:
return "%s://%s/%s" % ('https' if self.secure_urls else 'http', self.custom_domain, name)
else:
return self.connection.generate_url(self.querystring_expire, method='GET', \
bucket=self.bucket.name, key=self._encode_name(name), query_auth=self.querystring_auth, \
force_http=not self.secure_urls)

def get_available_name(self, name):
""" Overwrite existing file with the same name. """
if FILE_OVERWRITE:
name = self._clean_name(name)
return name
return super(GoogleStorage, self).get_available_name(name)
26 changes: 26 additions & 0 deletions django-google-storage/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from django.utils.encoding import force_unicode

def safe_join(base, *paths):
"""
A version of django.utils._os.safe_join for S3 paths.
Joins one or more path components to the base path component intelligently.
Returns a normalized version of the final path.
The final path must be located inside of the base path component (otherwise
a ValueError is raised).
Paths outside the base path indicate a possible security sensitive operation.
"""
from urlparse import urljoin
base_path = force_unicode(base)
paths = map(lambda p: force_unicode(p), paths)
final_path = urljoin(base_path + ("/" if not base_path.endswith("/") else ""), *paths)
# Ensure final_path starts with base_path and that the next character after
# the final path is '/' (or nothing, in which case final_path must be
# equal to base_path).
base_path_len = len(base_path)
if not final_path.startswith(base_path) or final_path[base_path_len:base_path_len + 1] not in ('', '/'):
raise ValueError('the joined path is located outside of the base path'
' component')
return final_path
Loading

0 comments on commit 4f5b8fd

Please sign in to comment.