Skip to content

Commit

Permalink
+
Browse files Browse the repository at this point in the history
  • Loading branch information
KorayKirli committed Nov 29, 2016
1 parent 771bea6 commit 6bbbfff
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 9 deletions.
30 changes: 29 additions & 1 deletion tests/test_import_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,34 @@ def test_get_existing_uuid(connection, mocker, returned_vendor_existing_item):


def test_combine_set_replicates():
post_json = {"aliases": "sample_repset", "description": "sample description"}
existing_data = {}
dict_replicates = {'sample_repset': [{'replicate_exp': 'awesome_uuid1', 'bio_rep_no': 1.0, 'tec_rep_no': 1.0},
{'replicate_exp': 'awesome_uuid3', 'bio_rep_no': 1.0, 'tec_rep_no': 2.0}]}
post_json2, dict_replicates2 = imp.combine_set(post_json, existing_data, "ExperimentSetReplicate", dict_replicates)

response = {'experiments_in_set': [{'replicate_exp': 'awesome_uuid1', 'tec_rep_no': 1.0, 'bio_rep_no': 1.0},
{'replicate_exp': 'awesome_uuid3', 'tec_rep_no': 2.0, 'bio_rep_no': 1.0}],
'description': 'sample description',
'aliases': 'sample_repset'}
assert post_json2 == response
assert dict_replicates2 == {}


def test_combine_set_expsets():
post_json = {"aliases": "sample_expset", "description": "sample description"}
existing_data = {}
dict_expsets = {'sample_expset': ['awesome_uuid1', 'awesome_uuid4', 'awesome_uuid5']}
post_json2, dict_expsets2 = imp.combine_set(post_json, existing_data, "ExperimentSet", dict_expsets)

response = {'experiments_in_set': ['awesome_uuid4', 'awesome_uuid5', 'awesome_uuid1'],
'description': 'sample description',
'aliases': 'sample_expset'}
assert sorted(post_json2) == sorted(response)
assert dict_expsets2 == {}


def test_combine_set_replicates_with_existing():
post_json = {"aliases": "sample_repset", "description": "sample description"}
existing_data = {"uuid": "sampleuuid", "accession": "sample_accession",
'replicate_exps': [{'replicate_exp': 'awesome_uuid', 'bio_rep_no': 1.0, 'tec_rep_no': 6.0},
Expand All @@ -177,7 +205,7 @@ def test_combine_set_replicates():
assert dict_replicates2 == {}


def test_combine_set_expsets():
def test_combine_set_expsets_with_existing():
post_json = {"aliases": "sample_expset", "description": "sample description"}
existing_data = {"uuid": "sampleuuid", "accession": "sample_accession",
"experiments_in_set": ['awesome_uuid1', 'awesome_uuid2']}
Expand Down
11 changes: 3 additions & 8 deletions wranglertools/import_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ def excel_reader(datafile, sheet, update, connection, patchall, dict_patch_loadx
# if dict_replicates


def get_upload_creds(file_id, connection, file_info):
def get_upload_creds(file_id, connection, file_info): # pragma: no cover
url = "%s%s/upload/" % (connection.server, file_id)
req = requests.post(url,
auth=connection.auth,
Expand All @@ -511,24 +511,21 @@ def get_upload_creds(file_id, connection, file_info):
return req.json()['@graph'][0]['upload_credentials']


def upload_file(metadata_post_response, path):
def upload_file(metadata_post_response, path): # pragma: no cover
try:
item = metadata_post_response['@graph'][0]
creds = item['upload_credentials']
except Exception as e:
print(e)
return

####################
# POST file to S3

env = os.environ.copy() # pragma: no cover
env.update({
'AWS_ACCESS_KEY_ID': creds['access_key'],
'AWS_SECRET_ACCESS_KEY': creds['secret_key'],
'AWS_SECURITY_TOKEN': creds['session_token'],
}) # pragma: no cover

})
# ~10s/GB from Stanford - AWS Oregon
# ~12-15s/GB from AWS Ireland - AWS Oregon
print("Uploading file.")
Expand All @@ -547,8 +544,6 @@ def upload_file(metadata_post_response, path):

# the order to try to upload / update the items
# used to avoid dependencies... i.e. biosample needs the biosource to exist


def order_sorter(list_of_names):
ret_list = []
for i in sheet_order:
Expand Down

0 comments on commit 6bbbfff

Please sign in to comment.