Permalink
Switch branches/tags
Nothing to show
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
257 lines (192 sloc) 8.46 KB
'''
Created on Dec 17, 2013
Shows how to perform object operations.
@version: 1.0
@author: mielem@gmail.com
'''
import tempfile
import time
import boto
import os
import StringIO
# Local imports
import Buckets
# URI scheme for Google Cloud Storage.
GOOGLE_STORAGE = "gs"
# URI scheme for accessing local files.
LOCAL_FILE = "file"
def get_object_metadata(bucket_name, object_name, debug_level):
'''
Performs a HEAD Object operation to list the specified object's metadata.
@param bucket_name: The name of the bucket that contains the object.
@param object_name: The name of the object that contains the metadata.
@param debug_level: The level of debug messages to be printed.
'''
try:
# Define the bucket URI
obj_uri = bucket_name + "/" + object_name
uri = boto.storage_uri(obj_uri, GOOGLE_STORAGE, debug_level)
# Get the object key.
key = uri.get_key()
print "\n" + object_name + " meta data information\n"
print " Object size:\t%s" % key.size
print " Last mod:\t%s" % key.last_modified
print " MIME type:\t%s" % key.content_type
# Remove surrounding quotes
print " MD5:\t%s" % key.etag.strip('"\'')
except boto.exception, e:
logging.error("get_object_metadata, error occurred: %s", e)
def delete_object(bucket_name, object_name, debug_level):
'''
Performs a DELETE Object operation.
@param bucket_name: The name of the bucket that contains the object.
@param object_name: The name of the object to delete.
@param debug_level: The level of debug messages to be printed.
'''
try:
# Define the bucket URI
uri = boto.storage_uri(bucket_name, GOOGLE_STORAGE, debug_level)
# Get the specified bucket.
bucket = uri.get_bucket()
# Iterate through the objects in the bucket to delete them.
obj_deleted = False
for obj in bucket:
if obj.name == object_name:
obj.delete()
obj_deleted = True
break
if obj_deleted:
print "Deleted object: %s " % object_name
else:
print "object: %s not found" % object_name
except boto.exception, e:
logging.error("delete_object, error occurred: %s", e)
def set_object_acls(bucket_name, object_name, acl, email, debug_level):
'''
Performs a PUT Object operation to set the specified object ACLs.
@param bucket_name: The name of the bucket that contains the object.
@param object_name: The name of the object whose ACLs must be modified.
@param debug_level: The level of debug messages to be printed.
'''
try:
# Define the object URI
uri = boto.storage_uri(bucket_name + "/" + object_name, GOOGLE_STORAGE, debug_level)
# Print current ACLs.
print "\n Current ACLs:\n"
print str(uri.get_acl())
# Set new ACLs.
uri.add_email_grant(acl, email)
print "\n New ACLs:\n"
# Get the object key-value pairs list.
key = uri.get_key()
for entry in key.get_acl().entries.entry_list:
entry_id = entry.scope.id
if not entry_id:
entry_id = entry.scope.email_address
print "SCOPE: %s" % entry_id
print "PERMISSION: %s\n" % entry.permission
except boto.exception, e:
logging.error("set_object_acls, error occurred: %s", e)
def get_object_acls(bucket_name, object_name, debug_level):
'''
Performs a GET Object operation to get the object ACLs.
@param bucket_name: The name of the bucket that contains the object.
@param object_name: The name of the object that contains the ACLs.
@param debug_level: The level of debug messages to be printed.
'''
try:
# Define the object URI
uri = boto.storage_uri(bucket_name + "/" + object_name, GOOGLE_STORAGE, debug_level)
# Get the object key.
key = uri.get_key()
print "\n" + object_name + " ACLs\n"
for entry in key.get_acl().entries.entry_list:
entry_id = entry.scope.id
if not entry_id:
entry_id = entry.scope.email_address
print "SCOPE: %s" % entry_id
print "PERMISSION: %s\n" % entry.permission
except boto.exception, e:
logging.error("get_object_acls, error occurred: %s", e)
def upload_object(bucket_name, object_name, debug_level):
'''
Performs a PUT Object operation to upload object to the specified bucket.
@param bucket_name: The name of the bucket where to load the object.
@param object_name: The name of the object to load.
@param debug_level: The level of debug messages to be printed. Input parameters:
'''
try:
# Source directory.
# Replace the directory with one of your choice.
src_dir = os.getenv("HOME") + "/tmp"
# Upload these files to the specified bucket.
# Open file for reading.
fileHandle = file(os.path.join(src_dir, object_name), "r")
# Define the object storage URI.
dst_uri = boto.storage_uri(
bucket_name + "/" + object_name, GOOGLE_STORAGE, debug_level)
# Read and store the object in Google Cloud Storage.
# The key-related functions are because of boto's
# working with Amazon S3 (which employs the
# concept of a key mapping to contents).
dst_uri.new_key().set_contents_from_file(fileHandle)
# Close the file.
fileHandle.close()
print 'Successfully created "%s/%s"' % (
dst_uri.bucket_name, dst_uri.object_name)
except boto.exception, e:
logging.error("upload_object, error occurred: %s", e)
def download_object(bucket_name, object_name, debug_level):
'''
Performs a GET Object operation to download an object from the specified bucket.
@param bucket_name: The name of the bucket that contains the object to download.
@param object_name: The name of the object to download.
@param debug_level: The level of debug messages to be printed. Input parameters:
'''
try:
# Destination directory.
# Replace the directory with one of your choice.
dest_dir = os.getenv("HOME") + "/tmp/downloads/"
# Define the object URI
uri = boto.storage_uri(bucket_name + "/" + object_name, GOOGLE_STORAGE, debug_level)
# Create a file-like object to hold the object contents.
object_contents = StringIO.StringIO()
# Get the object contents.
uri.get_key().get_file(object_contents)
# Set the local destination path.
local_dest_uri = boto.storage_uri(
os.path.join(dest_dir, object_name), LOCAL_FILE)
# Download the object to the local destination.
object_contents.seek(0)
local_dest_uri.new_key().set_contents_from_file(object_contents)
object_contents.close()
except boto.exception, e:
logging.error("download_object, error occurred: %s", e)
def copy_object(bucket_name, object_name, project_id, debug_level):
'''
Performs a PUT Object operation to copy the specified object
from the specified bucket into another bucket.
@param bucket_name: The name of the bucket that contains the object to copy.
@param object_name: The name of the object to copy.
@param debug_level: The level of debug messages to be printed.
'''
try:
# Create a destination bucket for test purposes.
# In a production application, you pass the destination bucket as an argument.
dest_bucket = Buckets.create_bucket(bucket_name, project_id, debug_level)
# Define the source object URI
source = bucket_name + "/" + object_name
uri = boto.storage_uri(source, GOOGLE_STORAGE, debug_level)
# Create a file-like object to hold the object contents.
object_contents = StringIO.StringIO()
# Get the object contents.
uri.get_key().get_file(object_contents)
# Set the destination bucket path.
destination = os.path.join(dest_bucket + "/" + object_name)
bucket_dest_uri = boto.storage_uri(destination, GOOGLE_STORAGE, debug_level)
# Copy the object to the destination bucket.
object_contents.seek(0)
bucket_dest_uri.new_key().set_contents_from_file(object_contents)
object_contents.close()
except boto.exception, e:
logging.error("copy_object, error occurred: %s", e)