Permalink
Browse files

Updated API to allow ephemeral files to have more than one file uploa…

…ded. It still supports supplying a path to existing files, and the two methods can also be mixed.
  • Loading branch information...
1 parent 4f458ff commit fb7221ec9a60101c2673aea7eca7f1d23b90a9d5 @jterrace jterrace committed Jun 29, 2012
Showing with 132 additions and 63 deletions.
  1. +66 −10 contrib/api/ephemeral.py
  2. +54 −44 sirikata-cdn/celery_tasks/import_upload.py
  3. +2 −2 sirikata-cdn/content/utils.py
  4. +10 −7 sirikata-cdn/content/views.py
View
@@ -16,6 +16,7 @@
UPLOAD_URL = BASE_OPEN3DHUB + '/api/upload'
UPLOAD_STATUS_URL = BASE_OPEN3DHUB + '/upload/processing/%TASK_ID%?api&username=%USERNAME%'
API_MODELINFO_URL = BASE_OPEN3DHUB + '/api/modelinfo%(path)s'
+DNS_URL = BASE_OPEN3DHUB + '/dns%(path)s'
KEEPALIVE_URL = BASE_OPEN3DHUB + '/api/keepalive%(path)s?username=%(username)s&ttl=%(ttl)d'
CONSUMER_KEY = 'lVk5aGvdzZpVP4oh34gE80qB6KW67LfJaBQBD9BB2ec='
@@ -38,18 +39,32 @@ def exitprint(resp, content):
print "Error code: %s" % resp['status']
sys.exit(1)
+def usage_exit():
+ print >> sys.stderr, 'Usage: python ephemeral.py main_file [[subfile1name subfile1path] | subfile1] ...]'
+ sys.exit(1)
+
def main():
- if len(sys.argv) < 2 or len(sys.argv) % 2 != 0:
- print >> sys.stderr, 'Usage: python ephemeral.py main_file [subfile1name subfile1path ...]'
- sys.exit(1)
+
+ if len(sys.argv) < 2:
+ usage_exit()
opts, args = getopt.getopt(sys.argv[1:], ':')
upload_files = [args[0]]
main_filename = os.path.basename(args[0])
subfile_map = {}
- for (name, path) in zip(args[1::2], args[2::2]):
- subfile_map[name] = path
+ i = 1
+ while i < len(args):
+ if os.path.isfile(args[i]):
+ upload_files.append(args[i])
+ else:
+ fname = args[i]
+ if i+1 >= len(args):
+ usage_exit()
+ fpath = args[i+1]
+ subfile_map[fname] = fpath
+ i += 1
+ i += 1
consumer = oauth2.Consumer(CONSUMER_KEY, CONSUMER_SECRET)
access_token = oauth2.Token(ACCESS_KEY, ACCESS_SECRET)
@@ -135,14 +150,14 @@ def main():
print
half_updated = False
-
+ subfile_names = set()
while time.time() - start_time < TTL_TIME * 1.5:
if not half_updated and time.time() - start_time > TTL_TIME / 2.0:
print
print 'Updating TTL value by 50%'
- print
+ print 'Updating TTL for main file', uploaded_path
toget = KEEPALIVE_URL % {'path': uploaded_path,
'username': USERNAME,
'ttl': TTL_TIME}
@@ -153,8 +168,23 @@ def main():
if resp['status'] != '200':
print 'Updating TTL failed'
exitprint(resp, content)
+
+ for subfile_name in subfile_names:
+ basename = subfile_name.split('/')[-2]
+ if basename not in subfile_map:
+ print 'Updating TTL for subfile', subfile_name
- print
+ toget = KEEPALIVE_URL % {'path': subfile_name,
+ 'username': USERNAME,
+ 'ttl': TTL_TIME}
+
+ client = oauth2.Client(consumer, token=access_token)
+ resp, content = client.request(toget, method='GET')
+
+ if resp['status'] != '200':
+ print 'Updating subfile TTL failed'
+ exitprint(resp, content)
+
print 'Updating TTL by 50% success'
print
@@ -167,6 +197,20 @@ def main():
if 'fullpath' not in result or result['fullpath'] != uploaded_path[1:]:
print 'Got wrong path in API request'
exitprint(resp, content)
+
+
+ subfile_names = set(result['metadata']['types']['original']['subfiles'])
+ for subfile_name in subfile_names:
+ lookup_url = DNS_URL % {'path': subfile_name}
+ resp, content = h.request(lookup_url, "GET")
+ if resp['status'] != '200':
+ print 'Got wrong status code for subfile DNS lookup', subfile_name
+ exitprint(resp, content)
+ result = json.loads(content)
+ if 'Hash' not in result:
+ print 'Got wrong JSON result for subfile DNS lookup', subfile_name
+ exitprint(resp, content)
+
print 'Model still there... %d seconds left' % int(start_time + TTL_TIME * 1.5 - time.time())
time.sleep(5)
@@ -177,13 +221,25 @@ def main():
print
print 'Checking that model no longer exists'
- print
resp, content = h.request(json_info_url, "GET")
if resp['status'] != '404':
exitprint(resp, content)
+ print 'Main file correctly returned 404'
+
+ for subfile_name in subfile_names:
+ basename = subfile_name.split('/')[-2]
+ lookup_url = DNS_URL % {'path': subfile_name}
+ resp, content = h.request(lookup_url, "GET")
+ correct_status = '200'
+ if basename not in subfile_map:
+ correct_status = '404'
+ if resp['status'] != correct_status:
+ print 'Got wrong status code for subfile DNS lookup', subfile_name
+ exitprint(resp, content)
+ print 'Subfile', subfile_name, 'correctly returned', correct_status
print
- print 'After TTL expired, API is now returning 404, the correct result. Done.'
+ print 'Everything checked out. Done.'
print
if __name__ == '__main__':
@@ -193,66 +193,76 @@ def import_upload(main_rowkey, subfiles, selected_dae=None):
return dae_zip_name
@task
-def place_upload(main_rowkey, subfiles, title, path, description, selected_dae=None, create_index=True, ephemeral_ttl=None):
+def place_upload(main_rowkey, subfiles, title, path, description, selected_dae=None, create_index=True, ephemeral_ttl=None, ephemeral_subfiles=None):
import_upload.update_state(state="LOADING")
file_data = get_temp_file(main_rowkey)
(zip, dae_zip_name, dae_data) = get_file_or_zip(file_data, selected_dae)
+ if ephemeral_subfiles is None:
+ ephemeral_subfiles = {}
+
if ephemeral_ttl is not None:
- eph_subfile_metadata = get_multi_file_metadata(subfiles.values())
+ eph_subfile_metadata = get_multi_file_metadata(ephemeral_subfiles.values())
eph_subfile_hashes = [m['hash'] for m in eph_subfile_metadata.itervalues()]
eph_subfile_data = multi_get_hash(eph_subfile_hashes)
+
def eph_subfile_getter(name):
- return eph_subfile_data[eph_subfile_metadata[subfiles[name]]['hash']]['data']
- (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, subfiles, subfile_getter=eph_subfile_getter)
+ if name in ephemeral_subfiles:
+ return eph_subfile_data[eph_subfile_metadata[ephemeral_subfiles[name]]['hash']]['data']
+ else:
+ return get_temp_file(subfiles[name])
+
+ combined_subfiles = dict(ephemeral_subfiles.items() + subfiles.items())
+ (collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, combined_subfiles, subfile_getter=eph_subfile_getter)
else:
import_upload.update_state(state="CHECKING_COLLADA")
(collada_obj, subfile_data, image_objs) = get_collada_and_images(zip, dae_zip_name, dae_data, subfiles)
import_upload.update_state(state="SAVING_ORIGINAL")
try: new_version_num = get_new_version_from_path(path, file_type="collada")
except cass.DatabaseError: raise DatabaseError()
-
- if ephemeral_ttl is not None:
- subfile_names = subfiles.values()
- else:
- #Make sure image paths are just the base name
- current_prefix = "original"
- subfile_names = []
- image_names = []
- for img in collada_obj.images:
- rel_path = img.path
- base_name = posixpath.basename(img.path)
- orig_base_name = base_name
-
- #strip out any character not allowed
- base_name = re.sub('[^\w\-\.]', '', base_name)
-
- #make sure that referenced texture files are unique
- while base_name in image_names:
- dot = base_name.rfind('.')
- ext = base_name[dot:] if dot != -1 else ''
- before_ext = base_name[0:dot] if dot != -1 else base_name
- base_name = "%s-x%s" % (before_ext, ext)
-
- if base_name != orig_base_name:
- subfile_data[base_name] = subfile_data[orig_base_name]
- del subfile_data[orig_base_name]
- image_objs[base_name] = image_objs[orig_base_name]
- del image_objs[orig_base_name]
+
+ #Make sure image paths are just the base name
+ current_prefix = "original"
+ subfile_names = []
+ image_names = []
+ for img in collada_obj.images:
+ rel_path = img.path
+ base_name = posixpath.basename(img.path)
+ orig_base_name = base_name
+
+ if base_name in ephemeral_subfiles:
+ subfile_names.append(ephemeral_subfiles[base_name])
+ continue
+
+ #strip out any character not allowed
+ base_name = re.sub('[^\w\-\.]', '', base_name)
+
+ #make sure that referenced texture files are unique
+ while base_name in image_names:
+ dot = base_name.rfind('.')
+ ext = base_name[dot:] if dot != -1 else ''
+ before_ext = base_name[0:dot] if dot != -1 else base_name
+ base_name = "%s-x%s" % (before_ext, ext)
- img.path = "./%s" % base_name
- img.save()
- img_hex_key = hashlib.sha256(subfile_data[base_name]).hexdigest()
- try: save_file_data(img_hex_key, subfile_data[base_name], "image/%s" % image_objs[base_name].format.lower())
- except: raise DatabaseError()
- img_path = "%s/%s/%s" % (path, current_prefix, base_name)
- img_len = len(subfile_data[base_name])
- try: img_version_num = get_new_version_from_path(img_path, file_type="image")
- except cass.DatabaseError: raise DatabaseError()
- try: save_file_name(img_path, img_version_num, img_hex_key, img_len)
- except cass.DatabaseError: raise DatabaseError()
- subfile_names.append("%s/%s" % (img_path, img_version_num))
+ if base_name != orig_base_name:
+ subfile_data[base_name] = subfile_data[orig_base_name]
+ del subfile_data[orig_base_name]
+ image_objs[base_name] = image_objs[orig_base_name]
+ del image_objs[orig_base_name]
+
+ img.path = "./%s" % base_name
+ img.save()
+ img_hex_key = hashlib.sha256(subfile_data[base_name]).hexdigest()
+ try: save_file_data(img_hex_key, subfile_data[base_name], "image/%s" % image_objs[base_name].format.lower())
+ except: raise DatabaseError()
+ img_path = "%s/%s/%s" % (path, current_prefix, base_name)
+ img_len = len(subfile_data[base_name])
+ try: img_version_num = get_new_version_from_path(img_path, file_type="image")
+ except cass.DatabaseError: raise DatabaseError()
+ try: save_file_name(img_path, img_version_num, img_hex_key, img_len, ttl=ephemeral_ttl)
+ except cass.DatabaseError: raise DatabaseError()
+ subfile_names.append("%s/%s" % (img_path, img_version_num))
str_buffer = StringIO()
collada_obj.write(str_buffer)
@@ -247,10 +247,10 @@ def list_file_keys(columns=None):
for r in recs:
yield r
-def save_file_name(path, version_num, hash_key, length):
+def save_file_name(path, version_num, hash_key, length, ttl=None):
dict = {'hash': hash_key, 'size': length}
col_val = json.dumps(dict)
- insertRecord(NAMES, path, columns={version_num: col_val})
+ insertRecord(NAMES, path, columns={version_num: col_val}, ttl=ttl)
def save_version_type(path, version_num, hash_key, length, subfile_names, zip_key, type_id, title=None, description=None, create_index=True, ttl=None):
try:
@@ -435,9 +435,6 @@ def clean(self):
if not ephemeral and subfiles:
raise forms.ValidationError("Subfiles parameter is only valid when uploading ephemeral files")
- if ephemeral and len(self.file_names) > 1:
- raise forms.ValidationError("Only one file can be uploaded when uploading an ephemeral file")
-
return self.cleaned_data
@csrf_exempt
@@ -494,11 +491,15 @@ def api_upload(request):
filename = main_filename
if ephemeral:
- task = place_upload.delay(main_rowkey, ephemeral_subfiles, title, path,
- description, create_index=False, ephemeral_ttl=ttl_time)
+ create_index = False
else:
- task = place_upload.delay(main_rowkey, subfiles, title, path,
- description, create_index=True)
+ ttl_time = None
+ create_index = True
+ ephemeral_subfiles = None
+
+ task = place_upload.delay(main_rowkey, subfiles, title, path,
+ description, create_index=create_index,
+ ephemeral_ttl=ttl_time, ephemeral_subfiles=ephemeral_subfiles)
save_upload_task(username=username,
task_id=task.task_id,
@@ -897,6 +898,8 @@ def dns(request, filename):
base_path = "/".join(parts[:-2])
type_id = parts[-2]
versions = get_versions('/' + base_path)
+ if versions is None:
+ return HttpResponseNotFound()
version_num = str(max(map(int, versions)))
else:
base_path = "/".join(parts[:-3])

0 comments on commit fb7221e

Please sign in to comment.