Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix bug related to missing params. #2929

Merged
merged 11 commits into from
Aug 3, 2018
Merged
8 changes: 8 additions & 0 deletions refinery/core/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,14 @@ def test_get_data_set_pagination_limit_and_offset(self):
self.assertEqual(get_response.data.get('data_sets')[0].get('uuid'),
self.data_set.uuid)

def test_total_data_sets_returned_correctly(self):
create_dataset_with_necessary_models(user=self.user)
get_request = self.factory.get(self.url_root)
get_request.user = self.user
get_response = self.view(get_request)
self.assertEqual(len(get_response.data.get('data_sets')),
get_response.data.get('total_data_sets'))

def test_dataset_delete_successful(self):

self.assertEqual(DataSet.objects.all().count(), 2)
Expand Down
39 changes: 20 additions & 19 deletions refinery/core/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@

import boto3
import botocore
from guardian.shortcuts import get_groups_with_perms, get_objects_for_user, \
get_perms
from guardian.shortcuts import (get_groups_with_perms, get_objects_for_user,
get_perms)

from guardian.utils import get_anonymous_user
from registration import signals
Expand Down Expand Up @@ -774,25 +774,26 @@ def get(self, request):
request.user, 'dataset'
).order_by('-modification_date')

total_data_sets = len(user_data_sets)
if filters.get('is_owner') or filters.get('is_public') or \
filters.get('group'):
filtered_data_set = []
for data_set in user_data_sets:
if not data_set.is_valid:
logger.warning(
"DataSet with UUID: {} is invalid, and most likely is "
"still being created".format(data_set.uuid)
)
filtered_data_sets = []
filter_requested = filters.get('is_owner') \
or filters.get('is_public') \
or filters.get('group')
for data_set in user_data_sets:
if not data_set.is_valid:
logger.warning(
"DataSet with UUID: {} is invalid, and most likely is "
"still being created".format(data_set.uuid)
)
elif filter_requested:
if self.is_filtered_data_set(data_set, filters):
filtered_data_set.append(data_set)

total_data_sets = len(filtered_data_set)
data_sets = paginator.paginate_queryset(filtered_data_set, request)
else:
data_sets = paginator.paginate_queryset(user_data_sets, request)
filtered_data_sets.append(data_set)
else:
filtered_data_sets.append(data_set)

serializer = DataSetSerializer(data_sets, many=True,
total_data_sets = len(filtered_data_sets)
paged_data_sets = paginator.paginate_queryset(filtered_data_sets,
request)
serializer = DataSetSerializer(paged_data_sets, many=True,
context={'request': request})

return Response({'data_sets': serializer.data,
Expand Down
40 changes: 17 additions & 23 deletions refinery/data_set_manager/management/commands/process_isatab.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,23 +25,23 @@ class Command(BaseCommand):
"""

def add_arguments(self, parser):
parser.add_argumenet(
parser.add_argument('username')
parser.add_argument('base_isa_dir')
parser.add_argument(
'--base_pre_isa_dir',
action='store',
type='string'
)
parser.add_argumenet(
parser.add_argument(
'--file_base_path',
action='store',
type='string',
default=None
)
parser.add_argumenet(
parser.add_argument(
'--public',
action='store_true',
default=False
)
parser.add_argumenet(
parser.add_argument(
'--overwrite',
action='store_true',
default=False
Expand Down Expand Up @@ -123,28 +123,22 @@ def handle(self, username, base_isa_dir, **options):

task_num = 1
total = len(isatab_dict)
for (uuid, filename, skipped) in result.iterate():
for uuid in result.iterate():
try:
if not skipped:
if uuid is not None:
logger.info(
"%s / %s: Successfully parsed %s into "
"DataSet with UUID %s",
task_num, total, filename, uuid)
else:
logger.info(
"%s / %s: Import of %s failed. Please check "
"Celery log files.",
task_num, total, filename, uuid)
if uuid is not None:
logger.info(
"%s / %s: Successfully parsed file into "
"DataSet with UUID %s",
task_num, total, uuid)
else:
logger.info(
"%s / %s: Skipped %s as it has been "
"successfully parsed already. UUID %s",
task_num, total, filename, uuid)
"%s / %s: Import of %s failed. Please check "
"Celery log files.",
task_num, total, uuid)
task_num += 1
sys.stdout.flush()
except:
logger.info("%s / %s: Unsuccessful parsed %s",
task_num, total, filename)
logger.info("%s / %s: Unsuccessful parsed",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unsuccessfully*

task_num, total)
task_num += 1
sys.stdout.flush()