Skip to content

Commit

Permalink
Merge pull request #8726 from Johnetordoff/py3-2-2-3
Browse files Browse the repository at this point in the history
[PLAT-1085] Make backwards compatible changes using 2to3
  • Loading branch information
sloria committed Oct 9, 2018
2 parents 8fe6cbd + 3306c79 commit 1d11c3b
Show file tree
Hide file tree
Showing 115 changed files with 199 additions and 214 deletions.
10 changes: 5 additions & 5 deletions addons/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def grant_oauth_access(self, node, external_account, metadata=None):
self.oauth_grants[node._id][external_account._id] = {}

# update the metadata with the supplied values
for key, value in metadata.iteritems():
for key, value in metadata.items():
self.oauth_grants[node._id][external_account._id][key] = value

self.save()
Expand Down Expand Up @@ -264,15 +264,15 @@ def verify_oauth_access(self, node, external_account, metadata=None):
return False

# Verify every key/value pair is in the grants dict
for key, value in metadata.iteritems():
for key, value in metadata.items():
if key not in grants or grants[key] != value:
return False

return True

def get_nodes_with_oauth_grants(self, external_account):
# Generator of nodes which have grants for this external account
for node_id, grants in self.oauth_grants.iteritems():
for node_id, grants in self.oauth_grants.items():
node = AbstractNode.load(node_id)
if external_account._id in grants.keys() and not node.is_deleted:
yield node
Expand All @@ -294,11 +294,11 @@ def merge(self, user_settings):
if user_settings.__class__ is not self.__class__:
raise TypeError('Cannot merge different addons')

for node_id, data in user_settings.oauth_grants.iteritems():
for node_id, data in user_settings.oauth_grants.items():
if node_id not in self.oauth_grants:
self.oauth_grants[node_id] = data
else:
node_grants = user_settings.oauth_grants[node_id].iteritems()
node_grants = user_settings.oauth_grants[node_id].items()
for ext_acct, meta in node_grants:
if ext_acct not in self.oauth_grants[node_id]:
self.oauth_grants[node_id][ext_acct] = meta
Expand Down
2 changes: 1 addition & 1 deletion addons/bitbucket/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def ref_to_params(branch=None, sha=None):

params = urllib.urlencode({
key: value
for key, value in {'branch': branch, 'sha': sha}.iteritems()
for key, value in {'branch': branch, 'sha': sha}.items()
if value
})
if params:
Expand Down
2 changes: 1 addition & 1 deletion addons/bitbucket/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def bitbucket_download_starball(node_addon, **kwargs):
)

resp = make_response(data)
for key, value in headers.iteritems():
for key, value in headers.items():
resp.headers[key] = value

return resp
Expand Down
2 changes: 1 addition & 1 deletion addons/github/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def ref_to_params(branch=None, sha=None):
for key, value in {
'branch': branch,
'sha': sha,
}.iteritems()
}.items()
if value
})
if params:
Expand Down
2 changes: 1 addition & 1 deletion addons/github/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def github_download_starball(node_addon, **kwargs):
)

resp = make_response(data)
for key, value in headers.iteritems():
for key, value in headers.items():
resp.headers[key] = value

return resp
Expand Down
2 changes: 1 addition & 1 deletion addons/gitlab/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def ref_to_params(branch=None, sha=None):
for key, value in {
'branch': branch,
'sha': sha,
}.iteritems()
}.items()
if value
})
if params:
Expand Down
2 changes: 1 addition & 1 deletion addons/gitlab/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def gitlab_download_starball(node_addon, **kwargs):
)

resp = make_response(data)
for key, value in headers.iteritems():
for key, value in headers.items():
resp.headers[key] = value

return resp
Expand Down
2 changes: 1 addition & 1 deletion addons/mendeley/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def _citations_for_folder(self, list_id):
citation['id']: citation
for citation in self._citations_for_user()
}
return map(lambda id: citations[id], document_ids)
return [citations[id] for id in document_ids]

def _citations_for_user(self):

Expand Down
2 changes: 1 addition & 1 deletion addons/mendeley/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,4 +227,4 @@ def set_node_settings(self, settings):
]
}

mock_responses = {k:dumps(v) for k,v in mock_responses.iteritems()}
mock_responses = {k:dumps(v) for k,v in mock_responses.items()}
4 changes: 2 additions & 2 deletions addons/osfstorage/tests/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def test_append_to_file(self):
def test_children(self):
kids = [
self.node_settings.get_root().append_file('Foo{}Bar'.format(x))
for x in xrange(100)
for x in range(100)
]

assert_equals(sorted(kids, key=lambda kid: kid.name), list(self.node_settings.get_root().children.order_by('name')))
Expand Down Expand Up @@ -236,7 +236,7 @@ def test_delete_file(self):
assert_equal(trashed.path, '/' + child._id)
trashed_field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and
f.name not in ['id', '_materialized_path', 'content_type_pk', '_path', 'deleted_on', 'deleted_by', 'type', 'modified']]
for f, value in child_data.iteritems():
for f, value in child_data.items():
if f in trashed_field_names:
assert_equal(getattr(trashed, f), value)

Expand Down
2 changes: 1 addition & 1 deletion addons/wiki/migrations/0010_migrate_node_wiki_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def reverse_func(state, schema):
progress_bar = progressbar.ProgressBar(maxval=nodes.count() or 100).start()
for i, node in enumerate(nodes, 1):
progress_bar.update(i)
for wiki_key, version_list in node.wiki_pages_versions.iteritems():
for wiki_key, version_list in node.wiki_pages_versions.items():
if version_list:
for index, version in enumerate(version_list):
nwp = NodeWikiPage.objects.filter(former_guid=version).include(None)[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def modify_user_settings(state, add, library_name):
user_settings_pending_save = []

for user_setting in ZoteroUserSettings.objects.all():
for node, ext_accounts in user_setting.oauth_grants.iteritems():
for node, ext_accounts in user_setting.oauth_grants.items():
for ext_account in ext_accounts.keys():
if add:
user_setting.oauth_grants[node][ext_account]['library'] = library_name
Expand Down
4 changes: 2 additions & 2 deletions addons/zotero/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def set_node_settings(self, settings):
]
}

mock_responses = {k: dumps(v) for k, v in mock_responses.iteritems()}
mock_responses = {k: dumps(v) for k, v in mock_responses.items()}


mock_responses_with_filed_and_unfiled = {
Expand Down Expand Up @@ -365,4 +365,4 @@ def set_node_settings(self, settings):
]
}

mock_responses_with_filed_and_unfiled = {k: dumps(v) for k, v in mock_responses_with_filed_and_unfiled.iteritems()}
mock_responses_with_filed_and_unfiled = {k: dumps(v) for k, v in mock_responses_with_filed_and_unfiled.items()}
2 changes: 1 addition & 1 deletion admin/collection_providers/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def create_or_update_provider(self, provider_data):
provider_data.pop('additional_providers')

if provider:
for key, val in provider_data.iteritems():
for key, val in provider_data.items():
setattr(provider, key, val)
provider.save()
else:
Expand Down
4 changes: 2 additions & 2 deletions admin/meetings/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def get_context_data(self, **kwargs):
paginator, page, queryset, is_paginated = self.paginate_queryset(
queryset, page_size
)
kwargs.setdefault('meetings', map(serialize_meeting, queryset))
kwargs.setdefault('meetings', list(map(serialize_meeting, queryset)))
kwargs.setdefault('page', page)
return super(MeetingListView, self).get_context_data(**kwargs)

Expand Down Expand Up @@ -127,7 +127,7 @@ def get_custom_fields(data):
"""Return two dicts, one of field_names and the other regular fields."""
data_copy = deepcopy(data)
field_names = {}
for key, value in data.iteritems():
for key, value in data.items():
if key in DEFAULT_FIELD_NAMES:
field_names[key] = data_copy.pop(key)
return field_names, data_copy
Expand Down
4 changes: 2 additions & 2 deletions admin/nodes/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def serialize_node(node):
'embargo': embargo,
'embargo_formatted': embargo_formatted,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'children': list(map(serialize_simple_node, node.nodes)),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'registered_date': node.registered_date,
Expand All @@ -39,7 +39,7 @@ def serialize_node(node):
}

def serialize_log(log):
return log, list(log.params.iteritems())
return log, log.params.items()


def serialize_simple_user_and_node_permissions(node, user):
Expand Down
6 changes: 3 additions & 3 deletions admin/nodes/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'logs': map(serialize_log, query_set),
'logs': list(map(serialize_log, query_set)),
'page': page,
}

Expand All @@ -264,7 +264,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'nodes': map(serialize_node, query_set),
'nodes': list(map(serialize_node, query_set)),
'page': page,
}

Expand Down Expand Up @@ -328,7 +328,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'nodes': map(serialize_node, query_set),
'nodes': list(map(serialize_node, query_set)),
'page': page,
}

Expand Down
2 changes: 1 addition & 1 deletion admin/pre_reg/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def serialize_draft_registration(draft, json_safe=True):
'embargo': embargo,
'registered_node': node_url,
'status': get_approval_status(draft),
'logs': map(serialize_draft_logs, draft.status_logs),
'logs': list(map(serialize_draft_logs, draft.status_logs)),
}


Expand Down
2 changes: 1 addition & 1 deletion admin/pre_reg/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def post(self, request, *args, **kwargs):
draft.update_metadata(data)
draft.save()
log_message = list()
for key, value in data.iteritems():
for key, value in data.items():
comments = data.get(key, {}).get('comments', [])
for comment in comments:
log_message.append('{}: {}'.format(key, comment['value']))
Expand Down
4 changes: 2 additions & 2 deletions admin/preprint_providers/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def __init__(self, *args, **kwargs):
self.fields['default_license'].choices = defaultlicense_choices

def clean_subjects_acceptable(self, *args, **kwargs):
subject_ids = filter(None, self.data['subjects_chosen'].split(', '))
subject_ids = [_f for _f in self.data['subjects_chosen'].split(', ') if _f]
subjects_selected = Subject.objects.filter(id__in=subject_ids)
rules = get_subject_rules(subjects_selected)
return rules
Expand Down Expand Up @@ -85,7 +85,7 @@ class PreprintProviderCustomTaxonomyForm(forms.Form):
def __init__(self, *args, **kwargs):
super(PreprintProviderCustomTaxonomyForm, self).__init__(*args, **kwargs)
subject_choices = [(x, x) for x in Subject.objects.filter(bepress_subject__isnull=True).values_list('text', flat=True)]
for name, field in self.fields.iteritems():
for name, field in self.fields.items():
if hasattr(field, 'choices'):
if field.choices == []:
field.choices = subject_choices
6 changes: 3 additions & 3 deletions admin/preprint_providers/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def get(self, request, *args, **kwargs):
preprint_provider = PreprintProvider.objects.get(id=self.kwargs['preprint_provider_id'])
data = serializers.serialize('json', [preprint_provider])
cleaned_data = json.loads(data)[0]
cleaned_fields = {key: value for key, value in cleaned_data['fields'].iteritems() if key not in FIELDS_TO_NOT_IMPORT_EXPORT}
cleaned_fields = {key: value for key, value in cleaned_data['fields'].items() if key not in FIELDS_TO_NOT_IMPORT_EXPORT}
cleaned_fields['licenses_acceptable'] = [node_license.license_id for node_license in preprint_provider.licenses_acceptable.all()]
cleaned_fields['default_license'] = preprint_provider.default_license.license_id if preprint_provider.default_license else ''
cleaned_fields['subjects'] = self.serialize_subjects(preprint_provider)
Expand Down Expand Up @@ -287,7 +287,7 @@ def post(self, request, *args, **kwargs):
file_json = json.loads(file_str)
current_fields = [f.name for f in PreprintProvider._meta.get_fields()]
# make sure not to import an exported access token for SHARE
cleaned_result = {key: value for key, value in file_json['fields'].iteritems() if key not in FIELDS_TO_NOT_IMPORT_EXPORT and key in current_fields}
cleaned_result = {key: value for key, value in file_json['fields'].items() if key not in FIELDS_TO_NOT_IMPORT_EXPORT and key in current_fields}
preprint_provider = self.create_or_update_provider(cleaned_result)
return redirect('preprint_providers:detail', preprint_provider_id=preprint_provider.id)

Expand All @@ -312,7 +312,7 @@ def create_or_update_provider(self, provider_data):
subject_data = provider_data.pop('subjects', False)

if provider:
for key, val in provider_data.iteritems():
for key, val in provider_data.items():
setattr(provider, key, val)
else:
provider = PreprintProvider(**provider_data)
Expand Down
4 changes: 2 additions & 2 deletions admin/preprints/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'preprints': map(serialize_preprint, query_set),
'preprints': list(map(serialize_preprint, query_set)),
'page': page,
}

Expand Down Expand Up @@ -140,7 +140,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'requests': map(serialize_withdrawal_request, query_set),
'requests': list(map(serialize_withdrawal_request, query_set)),
'page': page,
}

Expand Down
2 changes: 1 addition & 1 deletion admin/registration_providers/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class RegistrationProviderCustomTaxonomyForm(forms.Form):
def __init__(self, *args, **kwargs):
super(RegistrationProviderCustomTaxonomyForm, self).__init__(*args, **kwargs)
subject_choices = [(x, x) for x in Subject.objects.filter(bepress_subject__isnull=True).values_list('text', flat=True)]
for name, field in self.fields.iteritems():
for name, field in self.fields.items():
if hasattr(field, 'choices'):
if field.choices == []:
field.choices = subject_choices
2 changes: 1 addition & 1 deletion admin/registration_providers/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def create_or_update_provider(self, provider_data):
provider_data.pop('additional_providers')

if provider:
for key, val in provider_data.iteritems():
for key, val in provider_data.items():
setattr(provider, key, val)
provider.save()
else:
Expand Down
2 changes: 1 addition & 1 deletion admin/spam/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
def serialize_comment(comment):
reports = [
serialize_report(user, report)
for user, report in comment.reports.iteritems()
for user, report in comment.reports.items()
]
author_abs_url = furl(OSF_DOMAIN)
author_abs_url.path.add(comment.user.url)
Expand Down
2 changes: 1 addition & 1 deletion admin/spam/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def get_context_data(self, **kwargs):
page_size = self.get_paginate_by(queryset)
paginator, page, queryset, is_paginated = self.paginate_queryset(
queryset, page_size)
kwargs.setdefault('spam', map(serialize_comment, queryset))
kwargs.setdefault('spam', list(map(serialize_comment, queryset)))
kwargs.setdefault('page', page)
kwargs.setdefault('status', self.request.GET.get('status', '1'))
kwargs.setdefault('page_number', page.number)
Expand Down
2 changes: 1 addition & 1 deletion admin/users/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def serialize_user(user):
'username': user.username,
'name': user.fullname,
'id': user._id,
'nodes': map(serialize_simple_node, user.contributor_to),
'nodes': list(map(serialize_simple_node, user.contributor_to)),
'emails': user.emails.values_list('address', flat=True),
'last_login': user.date_last_login,
'confirmed': user.date_confirmed,
Expand Down
4 changes: 2 additions & 2 deletions admin/users/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def get_context_data(self, **kwargs):
paginator, page, query_set, is_paginated = self.paginate_queryset(
query_set, page_size)
return {
'users': map(serialize_user, query_set),
'users': list(map(serialize_user, query_set)),
'page': page,
}

Expand Down Expand Up @@ -514,7 +514,7 @@ class GetUserClaimLinks(GetUserLink):
def get_claim_links(self, user):
links = []

for guid, value in user.unclaimed_records.iteritems():
for guid, value in user.unclaimed_records.items():
node = Node.load(guid)
url = '{base_url}user/{uid}/{project_id}/claim/?token={token}'.format(
base_url=DOMAIN,
Expand Down

0 comments on commit 1d11c3b

Please sign in to comment.