From 5c40b980e201fe3b7ee899c6d271574009504a25 Mon Sep 17 00:00:00 2001 From: Oliver Sauder Date: Fri, 13 Nov 2020 16:56:41 +0100 Subject: [PATCH 1/2] Add configuration to format with black --- .travis.yml | 2 ++ requirements/requirements-codestyle.txt | 1 + setup.cfg | 25 +++++++++++++++---------- tox.ini | 6 ++++++ 4 files changed, 24 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 05feb314..65ea1cc8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,8 @@ matrix: - env: TOXENV=py39-django31-drfmaster include: + - python: 3.6 + env: TOXENV=black - python: 3.6 env: TOXENV=lint - python: 3.6 diff --git a/requirements/requirements-codestyle.txt b/requirements/requirements-codestyle.txt index c144f975..cdb8b514 100644 --- a/requirements/requirements-codestyle.txt +++ b/requirements/requirements-codestyle.txt @@ -1,3 +1,4 @@ +black==20.8b1 flake8==3.8.4 flake8-isort==4.0.0 isort==5.6.4 diff --git a/setup.cfg b/setup.cfg index 4f483e02..07b279c8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,29 +5,34 @@ test = pytest universal = 1 [flake8] -ignore = F405,W504 -max-line-length = 100 +max-line-length = 88 +extend-ignore = + # whitespace before ':' - disabled as not PEP8 compliant + E203, + # line too long (managed by black) + E501, + # usage of star imports + # TODO mark star imports directly in code to ignore this error + F405 exclude = - snapshots build/lib, - docs/conf.py, - migrations, .eggs .tox, env .venv [isort] -indent = 4 +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 88 known_first_party = rest_framework_json_api # This is to "trick" isort into putting example below DJA imports. known_localfolder = example -line_length = 100 -multi_line_output = 3 skip= build/lib, - docs/conf.py, - migrations, .eggs .tox, env diff --git a/tox.ini b/tox.ini index c0ebf760..dc808c01 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,12 @@ setenv = commands = pytest --cov --no-cov-on-fail --cov-report xml {posargs} +[testenv:black] +basepython = python3.6 +deps = + -rrequirements/requirements-codestyle.txt +commands = black --check . + [testenv:lint] basepython = python3.6 deps = From 0146890ecf4ba62d242c7981fdf932c01a7c9f8f Mon Sep 17 00:00:00 2001 From: Oliver Sauder Date: Fri, 13 Nov 2020 17:00:05 +0100 Subject: [PATCH 2/2] Format code with black --- docs/conf.py | 173 ++-- example/api/resources/identity.py | 23 +- example/api/serializers/identity.py | 15 +- example/api/serializers/post.py | 1 + example/factories.py | 8 +- example/migrations/0001_initial.py | 157 ++-- example/migrations/0002_taggeditem.py | 36 +- example/migrations/0003_polymorphics.py | 114 ++- example/migrations/0004_auto_20171011_0631.py | 64 +- example/migrations/0005_auto_20180922_1508.py | 46 +- example/migrations/0006_auto_20181228_0752.py | 45 +- .../migrations/0007_artproject_description.py | 6 +- example/migrations/0008_labresults.py | 29 +- example/models.py | 40 +- example/serializers.py | 257 +++--- example/settings/dev.py | 116 ++- example/settings/test.py | 20 +- example/tests/__init__.py | 16 +- example/tests/conftest.py | 6 +- .../tests/integration/test_browsable_api.py | 14 +- example/tests/integration/test_includes.py | 272 ++++--- example/tests/integration/test_meta.py | 62 +- .../integration/test_model_resource_name.py | 169 ++-- .../test_non_paginated_responses.py | 76 +- example/tests/integration/test_pagination.py | 66 +- .../tests/integration/test_polymorphism.py | 261 +++--- .../integration/test_sparse_fieldsets.py | 24 +- example/tests/snapshots/snap_test_errors.py | 123 ++- example/tests/snapshots/snap_test_openapi.py | 31 +- example/tests/test_errors.py | 180 ++--- example/tests/test_filters.py | 610 ++++++++------ example/tests/test_format_keys.py | 56 +- example/tests/test_generic_validation.py | 18 +- example/tests/test_generic_viewset.py | 112 +-- example/tests/test_model_viewsets.py | 203 ++--- example/tests/test_openapi.py | 84 +- example/tests/test_parsers.py | 87 +- example/tests/test_performance.py | 40 +- example/tests/test_relations.py | 199 +++-- example/tests/test_serializers.py | 149 ++-- example/tests/test_sideload_resources.py | 9 +- example/tests/test_views.py | 613 +++++++------- .../unit/test_default_drf_serializers.py | 127 ++- example/tests/unit/test_factories.py | 34 +- .../tests/unit/test_filter_schema_params.py | 107 ++- example/tests/unit/test_pagination.py | 62 +- .../tests/unit/test_renderer_class_methods.py | 92 +-- example/tests/unit/test_renderers.py | 94 +-- .../unit/test_serializer_method_field.py | 11 +- example/tests/unit/test_settings.py | 4 +- example/urls.py | 125 +-- example/urls_test.py | 129 +-- example/utils.py | 2 +- example/views.py | 96 ++- rest_framework_json_api/__init__.py | 10 +- .../django_filters/backends.py | 36 +- rest_framework_json_api/exceptions.py | 7 +- rest_framework_json_api/filters.py | 43 +- rest_framework_json_api/metadata.py | 160 ++-- rest_framework_json_api/pagination.py | 88 +- rest_framework_json_api/parsers.py | 98 ++- rest_framework_json_api/relations.py | 192 +++-- rest_framework_json_api/renderers.py | 376 +++++---- rest_framework_json_api/schemas/openapi.py | 760 +++++++++--------- rest_framework_json_api/serializers.py | 157 ++-- rest_framework_json_api/settings.py | 20 +- rest_framework_json_api/utils.py | 166 ++-- rest_framework_json_api/views.py | 128 +-- setup.py | 98 +-- tests/models.py | 9 +- tests/test_utils.py | 246 +++--- 71 files changed, 4453 insertions(+), 3654 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ee435d36..0b8caa69 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,7 +15,6 @@ import datetime import os -import shlex import sys import django @@ -26,44 +25,44 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) -os.environ['DJANGO_SETTINGS_MODULE'] = 'example.settings' +sys.path.insert(0, os.path.abspath("..")) +os.environ["DJANGO_SETTINGS_MODULE"] = "example.settings" django.setup() # Auto-generate API documentation. -main(['-o', 'apidoc', '-f', '-e', '-T', '-M', '../rest_framework_json_api']) +main(["-o", "apidoc", "-f", "-e", "-T", "-M", "../rest_framework_json_api"]) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', 'recommonmark'] -autodoc_member_order = 'bysource' +extensions = ["sphinx.ext.autodoc", "recommonmark"] +autodoc_member_order = "bysource" autodoc_inherit_docstrings = False # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Django REST Framework JSON API' +project = "Django REST Framework JSON API" year = datetime.date.today().year -copyright = '{}, Django REST Framework JSON API contributors'.format(year) -author = 'Django REST Framework JSON API contributors' +copyright = "{}, Django REST Framework JSON API contributors".format(year) +author = "Django REST Framework JSON API contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -83,37 +82,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build', 'pull_request_template.md'] +exclude_patterns = ["_build", "pull_request_template.md"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'default' +pygments_style = "default" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -123,150 +122,153 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' +on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' + + html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'DjangoRESTFrameworkJSONAPIdoc' +htmlhelp_basename = "DjangoRESTFrameworkJSONAPIdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'DjangoRESTFrameworkJSONAPI.tex', 'Django REST Framework JSON API Documentation', - 'Django REST Framework JSON API contributors', 'manual'), + ( + master_doc, + "DjangoRESTFrameworkJSONAPI.tex", + "Django REST Framework JSON API Documentation", + "Django REST Framework JSON API contributors", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -274,12 +276,17 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'djangorestframeworkjsonapi', 'Django REST Framework JSON API Documentation', - [author], 1) + ( + master_doc, + "djangorestframeworkjsonapi", + "Django REST Framework JSON API Documentation", + [author], + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -288,19 +295,25 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'DjangoRESTFrameworkJSONAPI', 'Django REST Framework JSON API Documentation', - author, 'DjangoRESTFrameworkJSONAPI', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "DjangoRESTFrameworkJSONAPI", + "Django REST Framework JSON API Documentation", + author, + "DjangoRESTFrameworkJSONAPI", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff --git a/example/api/resources/identity.py b/example/api/resources/identity.py index 6785e5d9..a291ba4f 100644 --- a/example/api/resources/identity.py +++ b/example/api/resources/identity.py @@ -11,7 +11,7 @@ class Identity(viewsets.ModelViewSet): - queryset = auth_models.User.objects.all().order_by('pk') + queryset = auth_models.User.objects.all().order_by("pk") serializer_class = IdentitySerializer # demonstrate sideloading data for use at app boot time @@ -20,22 +20,24 @@ def posts(self, request): self.resource_name = False identities = self.queryset - posts = [{'id': 1, 'title': 'Test Blog Post'}] + posts = [{"id": 1, "title": "Test Blog Post"}] data = { - encoding.force_str('identities'): IdentitySerializer(identities, many=True).data, - encoding.force_str('posts'): PostSerializer(posts, many=True).data, + encoding.force_str("identities"): IdentitySerializer( + identities, many=True + ).data, + encoding.force_str("posts"): PostSerializer(posts, many=True).data, } - return Response(utils.format_field_names(data, format_type='camelize')) + return Response(utils.format_field_names(data, format_type="camelize")) @action(detail=True) def manual_resource_name(self, request, *args, **kwargs): - self.resource_name = 'data' + self.resource_name = "data" return super(Identity, self).retrieve(request, args, kwargs) @action(detail=True) def validation(self, request, *args, **kwargs): - raise serializers.ValidationError('Oh nohs!') + raise serializers.ValidationError("Oh nohs!") class GenericIdentity(generics.GenericAPIView): @@ -44,10 +46,11 @@ class GenericIdentity(generics.GenericAPIView): GET /identities/generic """ + serializer_class = IdentitySerializer - allowed_methods = ['GET'] - renderer_classes = (renderers.JSONRenderer, ) - parser_classes = (parsers.JSONParser, ) + allowed_methods = ["GET"] + renderer_classes = (renderers.JSONRenderer,) + parser_classes = (parsers.JSONParser,) def get_queryset(self): return auth_models.User.objects.all() diff --git a/example/api/serializers/identity.py b/example/api/serializers/identity.py index 2538c6df..069259d2 100644 --- a/example/api/serializers/identity.py +++ b/example/api/serializers/identity.py @@ -9,17 +9,16 @@ class IdentitySerializer(serializers.ModelSerializer): def validate_first_name(self, data): if len(data) > 10: - raise serializers.ValidationError( - 'There\'s a problem with first name') + raise serializers.ValidationError("There's a problem with first name") return data def validate_last_name(self, data): if len(data) > 10: raise serializers.ValidationError( { - 'id': 'armageddon101', - 'detail': 'Hey! You need a last name!', - 'meta': 'something', + "id": "armageddon101", + "detail": "Hey! You need a last name!", + "meta": "something", } ) return data @@ -27,4 +26,8 @@ def validate_last_name(self, data): class Meta: model = auth_models.User fields = ( - 'id', 'first_name', 'last_name', 'email', ) + "id", + "first_name", + "last_name", + "email", + ) diff --git a/example/api/serializers/post.py b/example/api/serializers/post.py index bf0cf463..dbd78cfc 100644 --- a/example/api/serializers/post.py +++ b/example/api/serializers/post.py @@ -5,4 +5,5 @@ class PostSerializer(serializers.Serializer): """ Blog post serializer """ + title = serializers.CharField(max_length=50) diff --git a/example/factories.py b/example/factories.py index 0639a0dd..0de561f6 100644 --- a/example/factories.py +++ b/example/factories.py @@ -15,7 +15,7 @@ Entry, ProjectType, ResearchProject, - TaggedItem + TaggedItem, ) faker = FakerFactory.create() @@ -43,7 +43,7 @@ class Meta: name = factory.LazyAttribute(lambda x: faker.name()) email = factory.LazyAttribute(lambda x: faker.email()) - bio = factory.RelatedFactory('example.factories.AuthorBioFactory', 'author') + bio = factory.RelatedFactory("example.factories.AuthorBioFactory", "author") type = factory.SubFactory(AuthorTypeFactory) @@ -54,7 +54,9 @@ class Meta: author = factory.SubFactory(AuthorFactory) body = factory.LazyAttribute(lambda x: faker.text()) - metadata = factory.RelatedFactory('example.factories.AuthorBioMetadataFactory', 'bio') + metadata = factory.RelatedFactory( + "example.factories.AuthorBioMetadataFactory", "bio" + ) class AuthorBioMetadataFactory(factory.django.DjangoModelFactory): diff --git a/example/migrations/0001_initial.py b/example/migrations/0001_initial.py index 38cc0be9..35e01afe 100644 --- a/example/migrations/0001_initial.py +++ b/example/migrations/0001_initial.py @@ -2,93 +2,154 @@ # Generated by Django 1.9.5 on 2016-05-02 08:26 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Author', + name="Author", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('name', models.CharField(max_length=50)), - ('email', models.EmailField(max_length=254)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=50)), + ("email", models.EmailField(max_length=254)), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='AuthorBio', + name="AuthorBio", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('body', models.TextField()), - ('author', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='bio', to='example.Author')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("body", models.TextField()), + ( + "author", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="bio", + to="example.Author", + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Blog', + name="Blog", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('name', models.CharField(max_length=100)), - ('tagline', models.TextField()), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=100)), + ("tagline", models.TextField()), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Comment', + name="Comment", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('body', models.TextField()), - ('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='example.Author')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("body", models.TextField()), + ( + "author", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="example.Author", + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Entry', + name="Entry", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('headline', models.CharField(max_length=255)), - ('body_text', models.TextField(null=True)), - ('pub_date', models.DateField(null=True)), - ('mod_date', models.DateField(null=True)), - ('n_comments', models.IntegerField(default=0)), - ('n_pingbacks', models.IntegerField(default=0)), - ('rating', models.IntegerField(default=0)), - ('authors', models.ManyToManyField(to='example.Author')), - ('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='example.Blog')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("headline", models.CharField(max_length=255)), + ("body_text", models.TextField(null=True)), + ("pub_date", models.DateField(null=True)), + ("mod_date", models.DateField(null=True)), + ("n_comments", models.IntegerField(default=0)), + ("n_pingbacks", models.IntegerField(default=0)), + ("rating", models.IntegerField(default=0)), + ("authors", models.ManyToManyField(to="example.Author")), + ( + "blog", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="example.Blog" + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.AddField( - model_name='comment', - name='entry', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='example.Entry'), + model_name="comment", + name="entry", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="example.Entry" + ), ), ] diff --git a/example/migrations/0002_taggeditem.py b/example/migrations/0002_taggeditem.py index 46a79de9..3a57d22f 100644 --- a/example/migrations/0002_taggeditem.py +++ b/example/migrations/0002_taggeditem.py @@ -2,30 +2,44 @@ # Generated by Django 1.10.5 on 2017-02-01 08:34 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('contenttypes', '0002_remove_content_type_name'), - ('example', '0001_initial'), + ("contenttypes", "0002_remove_content_type_name"), + ("example", "0001_initial"), ] operations = [ migrations.CreateModel( - name='TaggedItem', + name="TaggedItem", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('tag', models.SlugField()), - ('object_id', models.PositiveIntegerField()), - ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("tag", models.SlugField()), + ("object_id", models.PositiveIntegerField()), + ( + "content_type", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="contenttypes.ContentType", + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), ] diff --git a/example/migrations/0003_polymorphics.py b/example/migrations/0003_polymorphics.py index 9020176b..46919dbb 100644 --- a/example/migrations/0003_polymorphics.py +++ b/example/migrations/0003_polymorphics.py @@ -2,75 +2,125 @@ # Generated by Django 1.11.1 on 2017-05-17 14:49 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('contenttypes', '0002_remove_content_type_name'), - ('example', '0002_taggeditem'), + ("contenttypes", "0002_remove_content_type_name"), + ("example", "0002_taggeditem"), ] operations = [ migrations.CreateModel( - name='Company', + name="Company", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("name", models.CharField(max_length=100)), ], ), migrations.CreateModel( - name='Project', + name="Project", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('topic', models.CharField(max_length=30)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("topic", models.CharField(max_length=30)), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.AlterField( - model_name='comment', - name='entry', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='example.Entry'), + model_name="comment", + name="entry", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="comments", + to="example.Entry", + ), ), migrations.CreateModel( - name='ArtProject', + name="ArtProject", fields=[ - ('project_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='example.Project')), - ('artist', models.CharField(max_length=30)), + ( + "project_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="example.Project", + ), + ), + ("artist", models.CharField(max_length=30)), ], options={ - 'abstract': False, + "abstract": False, }, - bases=('example.project',), + bases=("example.project",), ), migrations.CreateModel( - name='ResearchProject', + name="ResearchProject", fields=[ - ('project_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='example.Project')), - ('supervisor', models.CharField(max_length=30)), + ( + "project_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="example.Project", + ), + ), + ("supervisor", models.CharField(max_length=30)), ], options={ - 'abstract': False, + "abstract": False, }, - bases=('example.project',), + bases=("example.project",), ), migrations.AddField( - model_name='project', - name='polymorphic_ctype', - field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_example.project_set+', to='contenttypes.ContentType'), + model_name="project", + name="polymorphic_ctype", + field=models.ForeignKey( + editable=False, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="polymorphic_example.project_set+", + to="contenttypes.ContentType", + ), ), migrations.AddField( - model_name='company', - name='current_project', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='companies', to='example.Project'), + model_name="company", + name="current_project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="companies", + to="example.Project", + ), ), migrations.AddField( - model_name='company', - name='future_projects', - field=models.ManyToManyField(to='example.Project'), + model_name="company", + name="future_projects", + field=models.ManyToManyField(to="example.Project"), ), ] diff --git a/example/migrations/0004_auto_20171011_0631.py b/example/migrations/0004_auto_20171011_0631.py index 96df2aa7..b1035dfe 100644 --- a/example/migrations/0004_auto_20171011_0631.py +++ b/example/migrations/0004_auto_20171011_0631.py @@ -2,61 +2,73 @@ # Generated by Django 1.11.6 on 2017-10-11 06:31 from __future__ import unicode_literals -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('example', '0003_polymorphics'), + ("example", "0003_polymorphics"), ] operations = [ migrations.CreateModel( - name='AuthorType', + name="AuthorType", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('name', models.CharField(max_length=50)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=50)), ], options={ - 'ordering': ('id',), + "ordering": ("id",), }, ), migrations.AlterModelOptions( - name='author', - options={'ordering': ('id',)}, + name="author", + options={"ordering": ("id",)}, ), migrations.AlterModelOptions( - name='authorbio', - options={'ordering': ('id',)}, + name="authorbio", + options={"ordering": ("id",)}, ), migrations.AlterModelOptions( - name='blog', - options={'ordering': ('id',)}, + name="blog", + options={"ordering": ("id",)}, ), migrations.AlterModelOptions( - name='comment', - options={'ordering': ('id',)}, + name="comment", + options={"ordering": ("id",)}, ), migrations.AlterModelOptions( - name='entry', - options={'ordering': ('id',)}, + name="entry", + options={"ordering": ("id",)}, ), migrations.AlterModelOptions( - name='taggeditem', - options={'ordering': ('id',)}, + name="taggeditem", + options={"ordering": ("id",)}, ), migrations.AlterField( - model_name='entry', - name='authors', - field=models.ManyToManyField(related_name='entries', to='example.Author'), + model_name="entry", + name="authors", + field=models.ManyToManyField(related_name="entries", to="example.Author"), ), migrations.AddField( - model_name='author', - name='type', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='example.AuthorType'), + model_name="author", + name="type", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="example.AuthorType", + ), ), ] diff --git a/example/migrations/0005_auto_20180922_1508.py b/example/migrations/0005_auto_20180922_1508.py index 99d397f6..58b2808d 100644 --- a/example/migrations/0005_auto_20180922_1508.py +++ b/example/migrations/0005_auto_20180922_1508.py @@ -1,43 +1,55 @@ # Generated by Django 2.1.1 on 2018-09-22 15:08 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('example', '0004_auto_20171011_0631'), + ("example", "0004_auto_20171011_0631"), ] operations = [ migrations.CreateModel( - name='ProjectType', + name="ProjectType", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('name', models.CharField(max_length=50)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("name", models.CharField(max_length=50)), ], options={ - 'ordering': ('id',), + "ordering": ("id",), }, ), migrations.AlterModelOptions( - name='artproject', - options={'base_manager_name': 'objects'}, + name="artproject", + options={"base_manager_name": "objects"}, ), migrations.AlterModelOptions( - name='project', - options={'base_manager_name': 'objects'}, + name="project", + options={"base_manager_name": "objects"}, ), migrations.AlterModelOptions( - name='researchproject', - options={'base_manager_name': 'objects'}, + name="researchproject", + options={"base_manager_name": "objects"}, ), migrations.AddField( - model_name='project', - name='project_type', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='example.ProjectType'), + model_name="project", + name="project_type", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="example.ProjectType", + ), ), ] diff --git a/example/migrations/0006_auto_20181228_0752.py b/example/migrations/0006_auto_20181228_0752.py index 2cfb0c29..4126c797 100644 --- a/example/migrations/0006_auto_20181228_0752.py +++ b/example/migrations/0006_auto_20181228_0752.py @@ -1,32 +1,53 @@ # Generated by Django 2.1.4 on 2018-12-28 07:52 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('example', '0005_auto_20180922_1508'), + ("example", "0005_auto_20180922_1508"), ] operations = [ migrations.CreateModel( - name='AuthorBioMetadata', + name="AuthorBioMetadata", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('modified_at', models.DateTimeField(auto_now=True)), - ('body', models.TextField()), - ('bio', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='metadata', to='example.AuthorBio')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("modified_at", models.DateTimeField(auto_now=True)), + ("body", models.TextField()), + ( + "bio", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="metadata", + to="example.AuthorBio", + ), + ), ], options={ - 'ordering': ('id',), + "ordering": ("id",), }, ), migrations.AlterField( - model_name='comment', - name='author', - field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='example.Author'), + model_name="comment", + name="author", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="comments", + to="example.Author", + ), ), ] diff --git a/example/migrations/0007_artproject_description.py b/example/migrations/0007_artproject_description.py index 8dec0124..20f9d42e 100644 --- a/example/migrations/0007_artproject_description.py +++ b/example/migrations/0007_artproject_description.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('example', '0006_auto_20181228_0752'), + ("example", "0006_auto_20181228_0752"), ] operations = [ migrations.AddField( - model_name='artproject', - name='description', + model_name="artproject", + name="description", field=models.CharField(max_length=100, null=True), ), ] diff --git a/example/migrations/0008_labresults.py b/example/migrations/0008_labresults.py index 89323d77..e0a1d6ba 100644 --- a/example/migrations/0008_labresults.py +++ b/example/migrations/0008_labresults.py @@ -1,23 +1,38 @@ # Generated by Django 3.0.3 on 2020-02-06 10:24 -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('example', '0007_artproject_description'), + ("example", "0007_artproject_description"), ] operations = [ migrations.CreateModel( - name='LabResults', + name="LabResults", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('date', models.DateField()), - ('measurements', models.TextField()), - ('research_project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lab_results', to='example.ResearchProject')), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("date", models.DateField()), + ("measurements", models.TextField()), + ( + "research_project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="lab_results", + to="example.ResearchProject", + ), + ), ], ), ] diff --git a/example/models.py b/example/models.py index 4df4dc27..47537b57 100644 --- a/example/models.py +++ b/example/models.py @@ -11,6 +11,7 @@ class BaseModel(models.Model): """ I hear RoR has this by default, who doesn't need these two fields! """ + created_at = models.DateTimeField(auto_now_add=True) modified_at = models.DateTimeField(auto_now=True) @@ -22,13 +23,13 @@ class TaggedItem(BaseModel): tag = models.SlugField() content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() - content_object = GenericForeignKey('content_type', 'object_id') + content_object = GenericForeignKey("content_type", "object_id") def __str__(self): return self.tag class Meta: - ordering = ('id',) + ordering = ("id",) class Blog(BaseModel): @@ -40,7 +41,7 @@ def __str__(self): return self.name class Meta: - ordering = ('id',) + ordering = ("id",) class AuthorType(BaseModel): @@ -50,7 +51,7 @@ def __str__(self): return self.name class Meta: - ordering = ('id',) + ordering = ("id",) class Author(BaseModel): @@ -62,32 +63,35 @@ def __str__(self): return self.name class Meta: - ordering = ('id',) + ordering = ("id",) class AuthorBio(BaseModel): - author = models.OneToOneField(Author, related_name='bio', on_delete=models.CASCADE) + author = models.OneToOneField(Author, related_name="bio", on_delete=models.CASCADE) body = models.TextField() def __str__(self): return self.author.name class Meta: - ordering = ('id',) + ordering = ("id",) class AuthorBioMetadata(BaseModel): """ Just a class to have a relation with author bio """ - bio = models.OneToOneField(AuthorBio, related_name='metadata', on_delete=models.CASCADE) + + bio = models.OneToOneField( + AuthorBio, related_name="metadata", on_delete=models.CASCADE + ) body = models.TextField() def __str__(self): return self.bio.author.name class Meta: - ordering = ('id',) + ordering = ("id",) class Entry(BaseModel): @@ -96,7 +100,7 @@ class Entry(BaseModel): body_text = models.TextField(null=True) pub_date = models.DateField(null=True) mod_date = models.DateField(null=True) - authors = models.ManyToManyField(Author, related_name='entries') + authors = models.ManyToManyField(Author, related_name="entries") n_comments = models.IntegerField(default=0) n_pingbacks = models.IntegerField(default=0) rating = models.IntegerField(default=0) @@ -106,25 +110,25 @@ def __str__(self): return self.headline class Meta: - ordering = ('id',) + ordering = ("id",) class Comment(BaseModel): - entry = models.ForeignKey(Entry, related_name='comments', on_delete=models.CASCADE) + entry = models.ForeignKey(Entry, related_name="comments", on_delete=models.CASCADE) body = models.TextField() author = models.ForeignKey( Author, null=True, blank=True, on_delete=models.CASCADE, - related_name='comments', + related_name="comments", ) def __str__(self): return self.body class Meta: - ordering = ('id',) + ordering = ("id",) class ProjectType(BaseModel): @@ -134,7 +138,7 @@ def __str__(self): return self.name class Meta: - ordering = ('id',) + ordering = ("id",) class Project(PolymorphicModel): @@ -153,7 +157,8 @@ class ResearchProject(Project): class LabResults(models.Model): research_project = models.ForeignKey( - ResearchProject, related_name='lab_results', on_delete=models.CASCADE) + ResearchProject, related_name="lab_results", on_delete=models.CASCADE + ) date = models.DateField() measurements = models.TextField() @@ -161,7 +166,8 @@ class LabResults(models.Model): class Company(models.Model): name = models.CharField(max_length=100) current_project = models.ForeignKey( - Project, related_name='companies', on_delete=models.CASCADE) + Project, related_name="companies", on_delete=models.CASCADE + ) future_projects = models.ManyToManyField(Project) def __str__(self): diff --git a/example/serializers.py b/example/serializers.py index 566f39d5..7a923d4e 100644 --- a/example/serializers.py +++ b/example/serializers.py @@ -19,23 +19,24 @@ Project, ProjectType, ResearchProject, - TaggedItem + TaggedItem, ) class TaggedItemSerializer(serializers.ModelSerializer): class Meta: model = TaggedItem - fields = ('tag',) + fields = ("tag",) class TaggedItemDRFSerializer(drf_serilazers.ModelSerializer): """ DRF default serializer to test default DRF functionalities """ + class Meta: model = TaggedItem - fields = ('tag',) + fields = ("tag",) class BlogSerializer(serializers.ModelSerializer): @@ -43,28 +44,27 @@ class BlogSerializer(serializers.ModelSerializer): tags = relations.ResourceRelatedField(many=True, read_only=True) included_serializers = { - 'tags': 'example.serializers.TaggedItemSerializer', + "tags": "example.serializers.TaggedItemSerializer", } def get_copyright(self, resource): return datetime.now().year def get_root_meta(self, resource, many): - return { - 'api_docs': '/docs/api/blogs' - } + return {"api_docs": "/docs/api/blogs"} class Meta: model = Blog - fields = ('name', 'url', 'tags') - read_only_fields = ('tags',) - meta_fields = ('copyright',) + fields = ("name", "url", "tags") + read_only_fields = ("tags",) + meta_fields = ("copyright",) class BlogDRFSerializer(drf_serilazers.ModelSerializer): """ DRF default serializer to test default DRF functionalities """ + copyright = serializers.SerializerMethodField() tags = TaggedItemDRFSerializer(many=True, read_only=True) @@ -72,15 +72,13 @@ def get_copyright(self, resource): return datetime.now().year def get_root_meta(self, resource, many): - return { - 'api_docs': '/docs/api/blogs' - } + return {"api_docs": "/docs/api/blogs"} class Meta: model = Blog - fields = ('name', 'url', 'tags', 'copyright') - read_only_fields = ('tags',) - meta_fields = ('copyright',) + fields = ("name", "url", "tags", "copyright") + read_only_fields = ("tags",) + meta_fields = ("copyright",) class EntrySerializer(serializers.ModelSerializer): @@ -88,52 +86,51 @@ def __init__(self, *args, **kwargs): super(EntrySerializer, self).__init__(*args, **kwargs) # to make testing more concise we'll only output the # `featured` field when it's requested via `include` - request = kwargs.get('context', {}).get('request') - if request and 'featured' not in request.query_params.get('include', []): - self.fields.pop('featured', None) + request = kwargs.get("context", {}).get("request") + if request and "featured" not in request.query_params.get("include", []): + self.fields.pop("featured", None) included_serializers = { - 'authors': 'example.serializers.AuthorSerializer', - 'comments': 'example.serializers.CommentSerializer', - 'featured': 'example.serializers.EntrySerializer', - 'suggested': 'example.serializers.EntrySerializer', - 'tags': 'example.serializers.TaggedItemSerializer', + "authors": "example.serializers.AuthorSerializer", + "comments": "example.serializers.CommentSerializer", + "featured": "example.serializers.EntrySerializer", + "suggested": "example.serializers.EntrySerializer", + "tags": "example.serializers.TaggedItemSerializer", } body_format = serializers.SerializerMethodField() # single related from model blog_hyperlinked = relations.HyperlinkedRelatedField( - related_link_view_name='entry-blog', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-blog", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", read_only=True, - source='blog' + source="blog", ) # many related from model - comments = relations.ResourceRelatedField( - many=True, read_only=True) + comments = relations.ResourceRelatedField(many=True, read_only=True) # many related hyperlinked from model comments_hyperlinked = relations.HyperlinkedRelatedField( - related_link_view_name='entry-comments', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-comments", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", many=True, read_only=True, - source='comments' + source="comments", ) # many related from serializer suggested = relations.SerializerMethodResourceRelatedField( - related_link_view_name='entry-suggested', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-suggested", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", model=Entry, many=True, ) # many related hyperlinked from serializer suggested_hyperlinked = relations.SerializerMethodHyperlinkedRelatedField( - related_link_view_name='entry-suggested', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-suggested", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", model=Entry, many=True, ) @@ -141,11 +138,11 @@ def __init__(self, *args, **kwargs): featured = relations.SerializerMethodResourceRelatedField(model=Entry) # single related hyperlinked from serializer featured_hyperlinked = relations.SerializerMethodHyperlinkedRelatedField( - related_link_view_name='entry-featured', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-featured", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", model=Entry, - read_only=True + read_only=True, ) tags = relations.ResourceRelatedField(many=True, read_only=True) @@ -156,106 +153,126 @@ def get_featured(self, obj): return Entry.objects.exclude(pk=obj.pk).first() def get_body_format(self, obj): - return 'text' + return "text" class Meta: model = Entry - fields = ('blog', 'blog_hyperlinked', 'headline', 'body_text', 'pub_date', 'mod_date', - 'authors', 'comments', 'comments_hyperlinked', 'featured', 'suggested', - 'suggested_hyperlinked', 'tags', 'featured_hyperlinked') - read_only_fields = ('tags',) - meta_fields = ('body_format',) + fields = ( + "blog", + "blog_hyperlinked", + "headline", + "body_text", + "pub_date", + "mod_date", + "authors", + "comments", + "comments_hyperlinked", + "featured", + "suggested", + "suggested_hyperlinked", + "tags", + "featured_hyperlinked", + ) + read_only_fields = ("tags",) + meta_fields = ("body_format",) class JSONAPIMeta: - included_resources = ['comments'] + included_resources = ["comments"] class EntryDRFSerializers(drf_serilazers.ModelSerializer): tags = TaggedItemDRFSerializer(many=True, read_only=True) url = drf_serilazers.HyperlinkedIdentityField( - view_name='drf-entry-blog-detail', - lookup_url_kwarg='entry_pk', + view_name="drf-entry-blog-detail", + lookup_url_kwarg="entry_pk", read_only=True, ) class Meta: model = Entry - fields = ('tags', 'url',) - read_only_fields = ('tags',) + fields = ( + "tags", + "url", + ) + read_only_fields = ("tags",) class AuthorTypeSerializer(serializers.ModelSerializer): class Meta: model = AuthorType - fields = ('name', ) + fields = ("name",) class AuthorBioSerializer(serializers.ModelSerializer): class Meta: model = AuthorBio - fields = ('author', 'body', 'metadata') + fields = ("author", "body", "metadata") included_serializers = { - 'metadata': 'example.serializers.AuthorBioMetadataSerializer', + "metadata": "example.serializers.AuthorBioMetadataSerializer", } class AuthorBioMetadataSerializer(serializers.ModelSerializer): class Meta: model = AuthorBioMetadata - fields = ('body',) + fields = ("body",) class AuthorSerializer(serializers.ModelSerializer): bio = relations.ResourceRelatedField( - related_link_view_name='author-related', - self_link_view_name='author-relationships', + related_link_view_name="author-related", + self_link_view_name="author-relationships", queryset=AuthorBio.objects, ) entries = relations.ResourceRelatedField( - related_link_view_name='author-related', - self_link_view_name='author-relationships', + related_link_view_name="author-related", + self_link_view_name="author-relationships", queryset=Entry.objects, - many=True + many=True, ) first_entry = relations.SerializerMethodResourceRelatedField( - related_link_view_name='author-related', - self_link_view_name='author-relationships', + related_link_view_name="author-related", + self_link_view_name="author-relationships", model=Entry, ) comments = relations.HyperlinkedRelatedField( - related_link_view_name='author-related', - self_link_view_name='author-relationships', + related_link_view_name="author-related", + self_link_view_name="author-relationships", queryset=Comment.objects, - many=True - ) - secrets = serializers.HiddenField( - default='Shhhh!' + many=True, ) + secrets = serializers.HiddenField(default="Shhhh!") defaults = serializers.CharField( - default='default', + default="default", max_length=20, min_length=3, write_only=True, - help_text='help for defaults', + help_text="help for defaults", ) - included_serializers = { - 'bio': AuthorBioSerializer, - 'type': AuthorTypeSerializer - } + included_serializers = {"bio": AuthorBioSerializer, "type": AuthorTypeSerializer} related_serializers = { - 'bio': 'example.serializers.AuthorBioSerializer', - 'type': 'example.serializers.AuthorTypeSerializer', - 'comments': 'example.serializers.CommentSerializer', - 'entries': 'example.serializers.EntrySerializer', - 'first_entry': 'example.serializers.EntrySerializer' + "bio": "example.serializers.AuthorBioSerializer", + "type": "example.serializers.AuthorTypeSerializer", + "comments": "example.serializers.CommentSerializer", + "entries": "example.serializers.EntrySerializer", + "first_entry": "example.serializers.EntrySerializer", } class Meta: model = Author - fields = ('name', 'email', 'bio', 'entries', 'comments', 'first_entry', 'type', - 'secrets', 'defaults') + fields = ( + "name", + "email", + "bio", + "entries", + "comments", + "first_entry", + "type", + "secrets", + "defaults", + ) def get_first_entry(self, obj): return obj.entries.first() @@ -270,48 +287,52 @@ class AuthorDetailSerializer(AuthorSerializer): class WriterSerializer(serializers.ModelSerializer): - included_serializers = { - 'bio': AuthorBioSerializer - } + included_serializers = {"bio": AuthorBioSerializer} class Meta: model = Author - fields = ('name', 'email', 'bio') - resource_name = 'writers' + fields = ("name", "email", "bio") + resource_name = "writers" class CommentSerializer(serializers.ModelSerializer): # testing remapping of related name - writer = relations.ResourceRelatedField(source='author', read_only=True) + writer = relations.ResourceRelatedField(source="author", read_only=True) included_serializers = { - 'entry': EntrySerializer, - 'author': AuthorSerializer, - 'writer': WriterSerializer + "entry": EntrySerializer, + "author": AuthorSerializer, + "writer": WriterSerializer, } class Meta: model = Comment - exclude = ('created_at', 'modified_at',) + exclude = ( + "created_at", + "modified_at", + ) # fields = ('entry', 'body', 'author',) class ProjectTypeSerializer(serializers.ModelSerializer): class Meta: model = ProjectType - fields = ('name', 'url',) + fields = ( + "name", + "url", + ) class BaseProjectSerializer(serializers.ModelSerializer): included_serializers = { - 'project_type': ProjectTypeSerializer, + "project_type": ProjectTypeSerializer, } class ArtProjectSerializer(BaseProjectSerializer): class Meta: model = ArtProject - exclude = ('polymorphic_ctype',) + exclude = ("polymorphic_ctype",) class ResearchProjectSerializer(BaseProjectSerializer): @@ -320,37 +341,35 @@ class ResearchProjectSerializer(BaseProjectSerializer): class Meta: model = ResearchProject - exclude = ('polymorphic_ctype',) + exclude = ("polymorphic_ctype",) class LabResultsSerializer(serializers.ModelSerializer): class Meta: model = LabResults - fields = ('date', 'measurements') + fields = ("date", "measurements") class ProjectSerializer(serializers.PolymorphicModelSerializer): included_serializers = { - 'project_type': ProjectTypeSerializer, + "project_type": ProjectTypeSerializer, } polymorphic_serializers = [ArtProjectSerializer, ResearchProjectSerializer] class Meta: model = Project - exclude = ('polymorphic_ctype',) + exclude = ("polymorphic_ctype",) class CurrentProjectRelatedField(relations.PolymorphicResourceRelatedField): def get_attribute(self, instance): obj = super(CurrentProjectRelatedField, self).get_attribute(instance) - is_art = ( - self.field_name == 'current_art_project' and - isinstance(obj, ArtProject) + is_art = self.field_name == "current_art_project" and isinstance( + obj, ArtProject ) - is_res = ( - self.field_name == 'current_research_project' and - isinstance(obj, ResearchProject) + is_res = self.field_name == "current_research_project" and isinstance( + obj, ResearchProject ) if is_art or is_res: @@ -361,21 +380,25 @@ def get_attribute(self, instance): class CompanySerializer(serializers.ModelSerializer): current_project = relations.PolymorphicResourceRelatedField( - ProjectSerializer, queryset=Project.objects.all()) + ProjectSerializer, queryset=Project.objects.all() + ) current_art_project = CurrentProjectRelatedField( - ProjectSerializer, source='current_project', read_only=True) + ProjectSerializer, source="current_project", read_only=True + ) current_research_project = CurrentProjectRelatedField( - ProjectSerializer, source='current_project', read_only=True) + ProjectSerializer, source="current_project", read_only=True + ) future_projects = relations.PolymorphicResourceRelatedField( - ProjectSerializer, queryset=Project.objects.all(), many=True) + ProjectSerializer, queryset=Project.objects.all(), many=True + ) included_serializers = { - 'current_project': ProjectSerializer, - 'future_projects': ProjectSerializer, - 'current_art_project': ProjectSerializer, - 'current_research_project': ProjectSerializer + "current_project": ProjectSerializer, + "future_projects": ProjectSerializer, + "current_art_project": ProjectSerializer, + "current_research_project": ProjectSerializer, } class Meta: model = Company - fields = '__all__' + fields = "__all__" diff --git a/example/settings/dev.py b/example/settings/dev.py index d0e19fd2..2db2c259 100644 --- a/example/settings/dev.py +++ b/example/settings/dev.py @@ -4,100 +4,96 @@ DEBUG = True MEDIA_ROOT = os.path.normcase(os.path.dirname(os.path.abspath(__file__))) -MEDIA_URL = '/media/' +MEDIA_URL = "/media/" -DATABASE_ENGINE = 'sqlite3' +DATABASE_ENGINE = "sqlite3" DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': 'drf_example', + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": "drf_example", } } INSTALLED_APPS = [ - 'django.contrib.contenttypes', - 'django.contrib.staticfiles', - 'django.contrib.sites', - 'django.contrib.sessions', - 'django.contrib.auth', - 'rest_framework_json_api', - 'rest_framework', - 'polymorphic', - 'example', - 'debug_toolbar', - 'django_filters', + "django.contrib.contenttypes", + "django.contrib.staticfiles", + "django.contrib.sites", + "django.contrib.sessions", + "django.contrib.auth", + "rest_framework_json_api", + "rest_framework", + "polymorphic", + "example", + "debug_toolbar", + "django_filters", ] TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [ + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [ # insert your TEMPLATE_DIRS here ], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ # Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this # list if you haven't customized them: - 'django.contrib.auth.context_processors.auth', - 'django.template.context_processors.debug', - 'django.template.context_processors.i18n', - 'django.template.context_processors.media', - 'django.template.context_processors.static', - 'django.template.context_processors.tz', - 'django.contrib.messages.context_processors.messages', + "django.contrib.auth.context_processors.auth", + "django.template.context_processors.debug", + "django.template.context_processors.i18n", + "django.template.context_processors.media", + "django.template.context_processors.static", + "django.template.context_processors.tz", + "django.contrib.messages.context_processors.messages", ], }, }, ] -STATIC_URL = '/static/' +STATIC_URL = "/static/" -ROOT_URLCONF = 'example.urls' +ROOT_URLCONF = "example.urls" -SECRET_KEY = 'abc123' +SECRET_KEY = "abc123" -PASSWORD_HASHERS = ('django.contrib.auth.hashers.UnsaltedMD5PasswordHasher', ) +PASSWORD_HASHERS = ("django.contrib.auth.hashers.UnsaltedMD5PasswordHasher",) -MIDDLEWARE = ( - 'debug_toolbar.middleware.DebugToolbarMiddleware', -) +MIDDLEWARE = ("debug_toolbar.middleware.DebugToolbarMiddleware",) -INTERNAL_IPS = ('127.0.0.1', ) +INTERNAL_IPS = ("127.0.0.1",) -JSON_API_FORMAT_FIELD_NAMES = 'camelize' -JSON_API_FORMAT_TYPES = 'camelize' +JSON_API_FORMAT_FIELD_NAMES = "camelize" +JSON_API_FORMAT_TYPES = "camelize" REST_FRAMEWORK = { - 'PAGE_SIZE': 5, - 'EXCEPTION_HANDLER': 'rest_framework_json_api.exceptions.exception_handler', - 'DEFAULT_PAGINATION_CLASS': - 'rest_framework_json_api.pagination.JsonApiPageNumberPagination', - 'DEFAULT_PARSER_CLASSES': ( - 'rest_framework_json_api.parsers.JSONParser', - 'rest_framework.parsers.FormParser', - 'rest_framework.parsers.MultiPartParser' + "PAGE_SIZE": 5, + "EXCEPTION_HANDLER": "rest_framework_json_api.exceptions.exception_handler", + "DEFAULT_PAGINATION_CLASS": "rest_framework_json_api.pagination.JsonApiPageNumberPagination", + "DEFAULT_PARSER_CLASSES": ( + "rest_framework_json_api.parsers.JSONParser", + "rest_framework.parsers.FormParser", + "rest_framework.parsers.MultiPartParser", ), - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework_json_api.renderers.JSONRenderer', - + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework_json_api.renderers.JSONRenderer", # If you're performance testing, you will want to use the browseable API # without forms, as the forms can generate their own queries. # If performance testing, enable: # 'example.utils.BrowsableAPIRendererWithoutForms', # Otherwise, to play around with the browseable API, enable: - 'rest_framework_json_api.renderers.BrowsableAPIRenderer', + "rest_framework_json_api.renderers.BrowsableAPIRenderer", ), - 'DEFAULT_METADATA_CLASS': 'rest_framework_json_api.metadata.JSONAPIMetadata', - 'DEFAULT_SCHEMA_CLASS': 'rest_framework_json_api.schemas.openapi.AutoSchema', - 'DEFAULT_FILTER_BACKENDS': ( - 'rest_framework_json_api.filters.OrderingFilter', - 'rest_framework_json_api.django_filters.DjangoFilterBackend', - 'rest_framework.filters.SearchFilter', + "DEFAULT_METADATA_CLASS": "rest_framework_json_api.metadata.JSONAPIMetadata", + "DEFAULT_SCHEMA_CLASS": "rest_framework_json_api.schemas.openapi.AutoSchema", + "DEFAULT_FILTER_BACKENDS": ( + "rest_framework_json_api.filters.OrderingFilter", + "rest_framework_json_api.django_filters.DjangoFilterBackend", + "rest_framework.filters.SearchFilter", ), - 'SEARCH_PARAM': 'filter[search]', - 'TEST_REQUEST_RENDERER_CLASSES': ( - 'rest_framework_json_api.renderers.JSONRenderer', + "SEARCH_PARAM": "filter[search]", + "TEST_REQUEST_RENDERER_CLASSES": ( + "rest_framework_json_api.renderers.JSONRenderer", ), - 'TEST_REQUEST_DEFAULT_FORMAT': 'vnd.api+json' + "TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json", } diff --git a/example/settings/test.py b/example/settings/test.py index b47c3fe5..5fa040d6 100644 --- a/example/settings/test.py +++ b/example/settings/test.py @@ -1,18 +1,20 @@ from .dev import * # noqa DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': ':memory:', + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", } } -ROOT_URLCONF = 'example.urls_test' +ROOT_URLCONF = "example.urls_test" -JSON_API_FORMAT_FIELD_NAMES = 'camelize' -JSON_API_FORMAT_TYPES = 'camelize' +JSON_API_FORMAT_FIELD_NAMES = "camelize" +JSON_API_FORMAT_TYPES = "camelize" JSON_API_PLURALIZE_TYPES = True -REST_FRAMEWORK.update({ # noqa - 'PAGE_SIZE': 1, -}) +REST_FRAMEWORK.update( + { # noqa + "PAGE_SIZE": 1, + } +) diff --git a/example/tests/__init__.py b/example/tests/__init__.py index 30a04e22..daca2310 100644 --- a/example/tests/__init__.py +++ b/example/tests/__init__.py @@ -1,4 +1,3 @@ - from django.contrib.auth import get_user_model from rest_framework.test import APITestCase @@ -15,15 +14,12 @@ def setUp(self): super(TestBase, self).setUp() self.create_users() - def create_user(self, username, email, password="pw", - first_name='', last_name=''): + def create_user(self, username, email, password="pw", first_name="", last_name=""): """ Helper method to create a user """ User = get_user_model() - user = User.objects.create_user( - username, email, password=password - ) + user = User.objects.create_user(username, email, password=password) if first_name or last_name: user.first_name = first_name user.last_name = last_name @@ -35,8 +31,8 @@ def create_users(self): Create a couple users """ self.john = self.create_user( - 'trane', 'john@example.com', - first_name='John', last_name="Coltrane") + "trane", "john@example.com", first_name="John", last_name="Coltrane" + ) self.miles = self.create_user( - 'miles', 'miles@example.com', - first_name="Miles", last_name="Davis") + "miles", "miles@example.com", first_name="Miles", last_name="Davis" + ) diff --git a/example/tests/conftest.py b/example/tests/conftest.py index de2bea19..df5bbdfc 100644 --- a/example/tests/conftest.py +++ b/example/tests/conftest.py @@ -13,7 +13,7 @@ CompanyFactory, EntryFactory, ResearchProjectFactory, - TaggedItemFactory + TaggedItemFactory, ) register(BlogFactory) @@ -59,7 +59,9 @@ def single_comment(blog, author, entry_factory, comment_factory): @pytest.fixture def single_company(art_project_factory, research_project_factory, company_factory): - company = company_factory(future_projects=(research_project_factory(), art_project_factory())) + company = company_factory( + future_projects=(research_project_factory(), art_project_factory()) + ) return company diff --git a/example/tests/integration/test_browsable_api.py b/example/tests/integration/test_browsable_api.py index d4bc3fbb..9156eb92 100644 --- a/example/tests/integration/test_browsable_api.py +++ b/example/tests/integration/test_browsable_api.py @@ -8,22 +8,18 @@ def test_browsable_api_with_included_serializers(single_entry, client): response = client.get( - reverse( - "entry-detail", - kwargs={'pk': single_entry.pk, 'format': 'api'} - ) + reverse("entry-detail", kwargs={"pk": single_entry.pk, "format": "api"}) ) content = str(response.content) assert response.status_code == 200 - assert re.search(r'JSON:API includes', content) + assert re.search(r"JSON:API includes", content) assert re.search( - r']* value="authors.bio"', - content + r']* value="authors.bio"', content ) def test_browsable_api_with_no_included_serializers(client): - response = client.get(reverse("projecttype-list", kwargs={'format': 'api'})) + response = client.get(reverse("projecttype-list", kwargs={"format": "api"})) content = str(response.content) assert response.status_code == 200 - assert not re.search(r'JSON:API includes', content) + assert not re.search(r"JSON:API includes", content) diff --git a/example/tests/integration/test_includes.py b/example/tests/integration/test_includes.py index 953052de..0ee0d4fd 100644 --- a/example/tests/integration/test_includes.py +++ b/example/tests/integration/test_includes.py @@ -5,174 +5,256 @@ def test_included_data_on_list(multiple_entries, client): - response = client.get(reverse("entry-list"), data={'include': 'comments', 'page[size]': 5}) - included = response.json().get('included') - - assert len(response.json()['data']) == len(multiple_entries), ( - 'Incorrect entry count' + response = client.get( + reverse("entry-list"), data={"include": "comments", "page[size]": 5} ) - assert [x.get('type') for x in included] == ['comments', 'comments'], ( - 'List included types are incorrect' + included = response.json().get("included") + + assert len(response.json()["data"]) == len( + multiple_entries + ), "Incorrect entry count" + assert [x.get("type") for x in included] == [ + "comments", + "comments", + ], "List included types are incorrect" + + comment_count = len( + [resource for resource in included if resource["type"] == "comments"] ) - - comment_count = len([resource for resource in included if resource["type"] == "comments"]) expected_comment_count = sum([entry.comments.count() for entry in multiple_entries]) - assert comment_count == expected_comment_count, 'List comment count is incorrect' + assert comment_count == expected_comment_count, "List comment count is incorrect" def test_included_data_on_list_with_one_to_one_relations(multiple_entries, client): - response = client.get(reverse("entry-list"), - data={'include': 'authors.bio.metadata', 'page[size]': 5}) - included = response.json().get('included') - - assert len(response.json()['data']) == len(multiple_entries), ( - 'Incorrect entry count' + response = client.get( + reverse("entry-list"), data={"include": "authors.bio.metadata", "page[size]": 5} ) + included = response.json().get("included") + + assert len(response.json()["data"]) == len( + multiple_entries + ), "Incorrect entry count" expected_include_types = [ - 'authorBioMetadata', 'authorBioMetadata', - 'authorBios', 'authorBios', - 'authors', 'authors' + "authorBioMetadata", + "authorBioMetadata", + "authorBios", + "authorBios", + "authors", + "authors", ] - include_types = [x.get('type') for x in included] - assert include_types == expected_include_types, ( - 'List included types are incorrect' - ) + include_types = [x.get("type") for x in included] + assert include_types == expected_include_types, "List included types are incorrect" def test_default_included_data_on_detail(single_entry, client): - return test_included_data_on_detail(single_entry=single_entry, client=client, query='') + return test_included_data_on_detail( + single_entry=single_entry, client=client, query="" + ) -def test_included_data_on_detail(single_entry, client, query='?include=comments'): - response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + query) - included = response.json().get('included') +def test_included_data_on_detail(single_entry, client, query="?include=comments"): + response = client.get( + reverse("entry-detail", kwargs={"pk": single_entry.pk}) + query + ) + included = response.json().get("included") - assert [x.get('type') for x in included] == ['comments'], 'Detail included types are incorrect' + assert [x.get("type") for x in included] == [ + "comments" + ], "Detail included types are incorrect" - comment_count = len([resource for resource in included if resource["type"] == "comments"]) + comment_count = len( + [resource for resource in included if resource["type"] == "comments"] + ) expected_comment_count = single_entry.comments.count() - assert comment_count == expected_comment_count, 'Detail comment count is incorrect' + assert comment_count == expected_comment_count, "Detail comment count is incorrect" def test_dynamic_related_data_is_included(single_entry, entry_factory, client): entry_factory() response = client.get( - reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=featured' + reverse("entry-detail", kwargs={"pk": single_entry.pk}) + "?include=featured" ) - included = response.json().get('included') + included = response.json().get("included") - assert [x.get('type') for x in included] == ['entries'], 'Dynamic included types are incorrect' - assert len(included) == 1, 'The dynamically included blog entries are of an incorrect count' + assert [x.get("type") for x in included] == [ + "entries" + ], "Dynamic included types are incorrect" + assert ( + len(included) == 1 + ), "The dynamically included blog entries are of an incorrect count" def test_dynamic_many_related_data_is_included(single_entry, entry_factory, client): entry_factory() response = client.get( - reverse("entry-detail", kwargs={'pk': single_entry.pk}) + '?include=suggested' + reverse("entry-detail", kwargs={"pk": single_entry.pk}) + "?include=suggested" ) - included = response.json().get('included') + included = response.json().get("included") assert included - assert [x.get('type') for x in included] == ['entries'], 'Dynamic included types are incorrect' + assert [x.get("type") for x in included] == [ + "entries" + ], "Dynamic included types are incorrect" def test_missing_field_not_included(author_bio_factory, author_factory, client): # First author does not have a bio author = author_factory(bio=None) - response = client.get(reverse('author-detail', args=[author.pk]) + '?include=bio') - assert 'included' not in response.json() + response = client.get(reverse("author-detail", args=[author.pk]) + "?include=bio") + assert "included" not in response.json() # Second author does author = author_factory() - response = client.get(reverse('author-detail', args=[author.pk]) + '?include=bio') + response = client.get(reverse("author-detail", args=[author.pk]) + "?include=bio") data = response.json() - assert 'included' in data - assert len(data['included']) == 1 - assert data['included'][0]['attributes']['body'] == author.bio.body + assert "included" in data + assert len(data["included"]) == 1 + assert data["included"][0]["attributes"]["body"] == author.bio.body def test_deep_included_data_on_list(multiple_entries, client): - response = client.get(reverse("entry-list") + '?include=comments,comments.author,' - 'comments.author.bio,comments.writer&page[size]=5') - included = response.json().get('included') - - assert len(response.json()['data']) == len(multiple_entries), ( - 'Incorrect entry count' + response = client.get( + reverse("entry-list") + "?include=comments,comments.author," + "comments.author.bio,comments.writer&page[size]=5" + ) + included = response.json().get("included") + + assert len(response.json()["data"]) == len( + multiple_entries + ), "Incorrect entry count" + assert [x.get("type") for x in included] == [ + "authorBios", + "authorBios", + "authors", + "authors", + "comments", + "comments", + "writers", + "writers", + ], "List included types are incorrect" + + comment_count = len( + [resource for resource in included if resource["type"] == "comments"] ) - assert [x.get('type') for x in included] == [ - 'authorBios', 'authorBios', 'authors', 'authors', - 'comments', 'comments', 'writers', 'writers' - ], 'List included types are incorrect' - - comment_count = len([resource for resource in included if resource["type"] == "comments"]) expected_comment_count = sum([entry.comments.count() for entry in multiple_entries]) - assert comment_count == expected_comment_count, 'List comment count is incorrect' + assert comment_count == expected_comment_count, "List comment count is incorrect" - author_count = len([resource for resource in included if resource["type"] == "authors"]) + author_count = len( + [resource for resource in included if resource["type"] == "authors"] + ) expected_author_count = sum( - [entry.comments.filter(author__isnull=False).count() for entry in multiple_entries]) - assert author_count == expected_author_count, 'List author count is incorrect' + [ + entry.comments.filter(author__isnull=False).count() + for entry in multiple_entries + ] + ) + assert author_count == expected_author_count, "List author count is incorrect" - author_bio_count = len([resource for resource in included if resource["type"] == "authorBios"]) - expected_author_bio_count = sum([entry.comments.filter( - author__bio__isnull=False).count() for entry in multiple_entries]) - assert author_bio_count == expected_author_bio_count, 'List author bio count is incorrect' + author_bio_count = len( + [resource for resource in included if resource["type"] == "authorBios"] + ) + expected_author_bio_count = sum( + [ + entry.comments.filter(author__bio__isnull=False).count() + for entry in multiple_entries + ] + ) + assert ( + author_bio_count == expected_author_bio_count + ), "List author bio count is incorrect" writer_count = len( [resource for resource in included if resource["type"] == "writers"] ) expected_writer_count = sum( - [entry.comments.filter(author__isnull=False).count() for entry in multiple_entries]) - assert writer_count == expected_writer_count, 'List writer count is incorrect' + [ + entry.comments.filter(author__isnull=False).count() + for entry in multiple_entries + ] + ) + assert writer_count == expected_writer_count, "List writer count is incorrect" # Also include entry authors - response = client.get(reverse("entry-list") + '?include=authors,comments,comments.author,' - 'comments.author.bio&page[size]=5') - included = response.json().get('included') - - assert len(response.json()['data']) == len(multiple_entries), ( - 'Incorrect entry count' + response = client.get( + reverse("entry-list") + "?include=authors,comments,comments.author," + "comments.author.bio&page[size]=5" + ) + included = response.json().get("included") + + assert len(response.json()["data"]) == len( + multiple_entries + ), "Incorrect entry count" + assert [x.get("type") for x in included] == [ + "authorBios", + "authorBios", + "authors", + "authors", + "authors", + "authors", + "comments", + "comments", + ], "List included types are incorrect" + + author_count = len( + [resource for resource in included if resource["type"] == "authors"] ) - assert [x.get('type') for x in included] == [ - 'authorBios', 'authorBios', 'authors', 'authors', 'authors', 'authors', - 'comments', 'comments'], 'List included types are incorrect' - - author_count = len([resource for resource in included if resource["type"] == "authors"]) expected_author_count = sum( - [entry.authors.count() for entry in multiple_entries] + - [entry.comments.filter(author__isnull=False).count() for entry in multiple_entries]) - assert author_count == expected_author_count, 'List author count is incorrect' + [entry.authors.count() for entry in multiple_entries] + + [ + entry.comments.filter(author__isnull=False).count() + for entry in multiple_entries + ] + ) + assert author_count == expected_author_count, "List author count is incorrect" def test_deep_included_data_on_detail(single_entry, client): # Same test as in list but also ensures that intermediate resources (here comments' authors) # are returned along with the leaf nodes - response = client.get(reverse("entry-detail", kwargs={'pk': single_entry.pk}) + - '?include=comments,comments.author.bio') - included = response.json().get('included') + response = client.get( + reverse("entry-detail", kwargs={"pk": single_entry.pk}) + + "?include=comments,comments.author.bio" + ) + included = response.json().get("included") - assert [x.get('type') for x in included] == ['authorBios', 'authors', 'comments'], \ - 'Detail included types are incorrect' + assert [x.get("type") for x in included] == [ + "authorBios", + "authors", + "comments", + ], "Detail included types are incorrect" - comment_count = len([resource for resource in included if resource["type"] == "comments"]) + comment_count = len( + [resource for resource in included if resource["type"] == "comments"] + ) expected_comment_count = single_entry.comments.count() - assert comment_count == expected_comment_count, 'Detail comment count is incorrect' + assert comment_count == expected_comment_count, "Detail comment count is incorrect" - author_bio_count = len([resource for resource in included if resource["type"] == "authorBios"]) - expected_author_bio_count = single_entry.comments.filter(author__bio__isnull=False).count() - assert author_bio_count == expected_author_bio_count, 'Detail author bio count is incorrect' + author_bio_count = len( + [resource for resource in included if resource["type"] == "authorBios"] + ) + expected_author_bio_count = single_entry.comments.filter( + author__bio__isnull=False + ).count() + assert ( + author_bio_count == expected_author_bio_count + ), "Detail author bio count is incorrect" def test_data_resource_not_included_again(single_comment, client): # This test makes sure that the resource which is in the data field is excluded # from the included field. - response = client.get(reverse("comment-detail", kwargs={'pk': single_comment.pk}) + - '?include=entry.comments') + response = client.get( + reverse("comment-detail", kwargs={"pk": single_comment.pk}) + + "?include=entry.comments" + ) - included = response.json().get('included') + included = response.json().get("included") - included_comments = [resource for resource in included if resource["type"] == "comments"] - assert single_comment.pk not in [int(x.get('id')) for x in included_comments], \ - "Resource of the data field duplicated in included" + included_comments = [ + resource for resource in included if resource["type"] == "comments" + ] + assert single_comment.pk not in [ + int(x.get("id")) for x in included_comments + ], "Resource of the data field duplicated in included" comment_count = len(included_comments) expected_comment_count = single_comment.entry.comments.count() diff --git a/example/tests/integration/test_meta.py b/example/tests/integration/test_meta.py index 25457b1c..f54cda8e 100644 --- a/example/tests/integration/test_meta.py +++ b/example/tests/integration/test_meta.py @@ -9,30 +9,26 @@ def test_top_level_meta_for_list_view(blog, client): expected = { - "data": [{ - "type": "blogs", - "id": "1", - "attributes": { - "name": blog.name - }, - "links": { - "self": 'http://testserver/blogs/1' - }, - 'relationships': {'tags': {'data': [], 'meta': {'count': 0}}}, - "meta": { - "copyright": datetime.now().year - }, - }], - 'links': { - 'first': 'http://testserver/blogs?page%5Bnumber%5D=1', - 'last': 'http://testserver/blogs?page%5Bnumber%5D=1', - 'next': None, - 'prev': None + "data": [ + { + "type": "blogs", + "id": "1", + "attributes": {"name": blog.name}, + "links": {"self": "http://testserver/blogs/1"}, + "relationships": {"tags": {"data": [], "meta": {"count": 0}}}, + "meta": {"copyright": datetime.now().year}, + } + ], + "links": { + "first": "http://testserver/blogs?page%5Bnumber%5D=1", + "last": "http://testserver/blogs?page%5Bnumber%5D=1", + "next": None, + "prev": None, + }, + "meta": { + "pagination": {"count": 1, "page": 1, "pages": 1}, + "apiDocs": "/docs/api/blogs", }, - 'meta': { - 'pagination': {'count': 1, 'page': 1, 'pages': 1}, - 'apiDocs': '/docs/api/blogs' - } } response = client.get(reverse("blog-list")) @@ -46,22 +42,14 @@ def test_top_level_meta_for_detail_view(blog, client): "data": { "type": "blogs", "id": "1", - "attributes": { - "name": blog.name - }, - 'relationships': {'tags': {'data': [], 'meta': {'count': 0}}}, - "links": { - "self": "http://testserver/blogs/1" - }, - "meta": { - "copyright": datetime.now().year - }, - }, - "meta": { - "apiDocs": "/docs/api/blogs" + "attributes": {"name": blog.name}, + "relationships": {"tags": {"data": [], "meta": {"count": 0}}}, + "links": {"self": "http://testserver/blogs/1"}, + "meta": {"copyright": datetime.now().year}, }, + "meta": {"apiDocs": "/docs/api/blogs"}, } - response = client.get(reverse("blog-detail", kwargs={'pk': blog.pk})) + response = client.get(reverse("blog-detail", kwargs={"pk": blog.pk})) assert expected == response.json() diff --git a/example/tests/integration/test_model_resource_name.py b/example/tests/integration/test_model_resource_name.py index a69503ae..2dacd2d5 100644 --- a/example/tests/integration/test_model_resource_name.py +++ b/example/tests/integration/test_model_resource_name.py @@ -18,90 +18,100 @@ def _check_resource_and_relationship_comment_type_match(django_client): entry_response = django_client.get(reverse("entry-list")) comment_response = django_client.get(reverse("comment-list")) - comment_resource_type = comment_response.json().get('data')[0].get('type') - comment_relationship_type = entry_response.json().get( - 'data')[0].get('relationships').get('comments').get('data')[0].get('type') - - assert comment_resource_type == comment_relationship_type, ( - "The resource type seen in the relationships and head resource do not match" + comment_resource_type = comment_response.json().get("data")[0].get("type") + comment_relationship_type = ( + entry_response.json() + .get("data")[0] + .get("relationships") + .get("comments") + .get("data")[0] + .get("type") ) + assert ( + comment_resource_type == comment_relationship_type + ), "The resource type seen in the relationships and head resource do not match" + def _check_relationship_and_included_comment_type_are_the_same(django_client, url): response = django_client.get(url + "?include=comments") - data = response.json().get('data')[0] - comment = response.json().get('included')[0] - - comment_relationship_type = data.get('relationships').get('comments').get('data')[0].get('type') - comment_included_type = comment.get('type') + data = response.json().get("data")[0] + comment = response.json().get("included")[0] - assert comment_relationship_type == comment_included_type, ( - "The resource type seen in the relationships and included do not match" + comment_relationship_type = ( + data.get("relationships").get("comments").get("data")[0].get("type") ) + comment_included_type = comment.get("type") + + assert ( + comment_relationship_type == comment_included_type + ), "The resource type seen in the relationships and included do not match" @pytest.mark.usefixtures("single_entry") class TestModelResourceName: create_data = { - 'data': { - 'type': 'resource_name_from_JSONAPIMeta', - 'id': None, - 'attributes': { - 'body': 'example', + "data": { + "type": "resource_name_from_JSONAPIMeta", + "id": None, + "attributes": { + "body": "example", + }, + "relationships": { + "entry": {"data": {"type": "resource_name_from_JSONAPIMeta", "id": 1}} }, - 'relationships': { - 'entry': { - 'data': { - 'type': 'resource_name_from_JSONAPIMeta', - 'id': 1 - } - } - } } } def test_model_resource_name_on_list(self, client): models.Comment.__bases__ += (_PatchedModel,) response = client.get(reverse("comment-list")) - data = response.json()['data'][0] + data = response.json()["data"][0] # name should be super-author instead of model name RenamedAuthor - assert (data.get('type') == 'resource_name_from_JSONAPIMeta'), ( - 'resource_name from model incorrect on list') + assert ( + data.get("type") == "resource_name_from_JSONAPIMeta" + ), "resource_name from model incorrect on list" # Precedence tests def test_resource_name_precendence(self, client, monkeypatch): # default response = client.get(reverse("comment-list")) - data = response.json()['data'][0] - assert (data.get('type') == 'comments'), ( - 'resource_name from model incorrect on list') + data = response.json()["data"][0] + assert ( + data.get("type") == "comments" + ), "resource_name from model incorrect on list" # model > default models.Comment.__bases__ += (_PatchedModel,) response = client.get(reverse("comment-list")) - data = response.json()['data'][0] - assert (data.get('type') == 'resource_name_from_JSONAPIMeta'), ( - 'resource_name from model incorrect on list') + data = response.json()["data"][0] + assert ( + data.get("type") == "resource_name_from_JSONAPIMeta" + ), "resource_name from model incorrect on list" # serializer > model monkeypatch.setattr( serializers.CommentSerializer.Meta, - 'resource_name', - 'resource_name_from_serializer', - False + "resource_name", + "resource_name_from_serializer", + False, ) response = client.get(reverse("comment-list")) - data = response.json()['data'][0] - assert (data.get('type') == 'resource_name_from_serializer'), ( - 'resource_name from serializer incorrect on list') + data = response.json()["data"][0] + assert ( + data.get("type") == "resource_name_from_serializer" + ), "resource_name from serializer incorrect on list" # view > serializer > model - monkeypatch.setattr(views.CommentViewSet, 'resource_name', 'resource_name_from_view', False) + monkeypatch.setattr( + views.CommentViewSet, "resource_name", "resource_name_from_view", False + ) response = client.get(reverse("comment-list")) - data = response.json()['data'][0] - assert (data.get('type') == 'resource_name_from_view'), ( - 'resource_name from view incorrect on list') + data = response.json()["data"][0] + assert ( + data.get("type") == "resource_name_from_view" + ), "resource_name from view incorrect on list" def test_model_resource_name_create(self, client): models.Comment.__bases__ += (_PatchedModel,) @@ -113,19 +123,18 @@ def test_model_resource_name_create(self, client): def test_serializer_resource_name_create(self, client, monkeypatch): monkeypatch.setattr( serializers.CommentSerializer.Meta, - 'resource_name', - 'renamed_comments', - False + "resource_name", + "renamed_comments", + False, ) monkeypatch.setattr( - serializers.EntrySerializer.Meta, - 'resource_name', - 'renamed_entries', - False + serializers.EntrySerializer.Meta, "resource_name", "renamed_entries", False ) create_data = deepcopy(self.create_data) - create_data['data']['type'] = 'renamed_comments' - create_data['data']['relationships']['entry']['data']['type'] = 'renamed_entries' + create_data["data"]["type"] = "renamed_comments" + create_data["data"]["relationships"]["entry"]["data"][ + "type" + ] = "renamed_entries" response = client.post(reverse("comment-list"), create_data) @@ -141,37 +150,59 @@ class TestResourceNameConsistency: # Included rename tests def test_type_match_on_included_and_inline_base(self, client): - _check_relationship_and_included_comment_type_are_the_same(client, reverse("entry-list")) + _check_relationship_and_included_comment_type_are_the_same( + client, reverse("entry-list") + ) def test_type_match_on_included_and_inline_with_JSONAPIMeta(self, client): models.Comment.__bases__ += (_PatchedModel,) - _check_relationship_and_included_comment_type_are_the_same(client, reverse("entry-list")) + _check_relationship_and_included_comment_type_are_the_same( + client, reverse("entry-list") + ) - def test_type_match_on_included_and_inline_with_serializer_resource_name(self, client): - serializers.CommentSerializer.Meta.resource_name = "resource_name_from_serializer" + def test_type_match_on_included_and_inline_with_serializer_resource_name( + self, client + ): + serializers.CommentSerializer.Meta.resource_name = ( + "resource_name_from_serializer" + ) - _check_relationship_and_included_comment_type_are_the_same(client, reverse("entry-list")) + _check_relationship_and_included_comment_type_are_the_same( + client, reverse("entry-list") + ) - def test_type_match_on_included_and_inline_without_serializer_resource_name(self, client): + def test_type_match_on_included_and_inline_without_serializer_resource_name( + self, client + ): serializers.CommentSerializer.Meta.resource_name = None - _check_relationship_and_included_comment_type_are_the_same(client, reverse("entry-list")) + _check_relationship_and_included_comment_type_are_the_same( + client, reverse("entry-list") + ) def test_type_match_on_included_and_inline_with_serializer_resource_name_and_JSONAPIMeta( - self, client + self, client ): models.Comment.__bases__ += (_PatchedModel,) - serializers.CommentSerializer.Meta.resource_name = "resource_name_from_serializer" + serializers.CommentSerializer.Meta.resource_name = ( + "resource_name_from_serializer" + ) - _check_relationship_and_included_comment_type_are_the_same(client, reverse("entry-list")) + _check_relationship_and_included_comment_type_are_the_same( + client, reverse("entry-list") + ) # Relation rename tests def test_resource_and_relationship_type_match(self, client): _check_resource_and_relationship_comment_type_match(client) - def test_resource_and_relationship_type_match_with_serializer_resource_name(self, client): - serializers.CommentSerializer.Meta.resource_name = "resource_name_from_serializer" + def test_resource_and_relationship_type_match_with_serializer_resource_name( + self, client + ): + serializers.CommentSerializer.Meta.resource_name = ( + "resource_name_from_serializer" + ) _check_resource_and_relationship_comment_type_match(client) @@ -181,10 +212,12 @@ def test_resource_and_relationship_type_match_with_JSONAPIMeta(self, client): _check_resource_and_relationship_comment_type_match(client) def test_resource_and_relationship_type_match_with_serializer_resource_name_and_JSONAPIMeta( - self, client + self, client ): models.Comment.__bases__ += (_PatchedModel,) - serializers.CommentSerializer.Meta.resource_name = "resource_name_from_serializer" + serializers.CommentSerializer.Meta.resource_name = ( + "resource_name_from_serializer" + ) _check_resource_and_relationship_comment_type_match(client) diff --git a/example/tests/integration/test_non_paginated_responses.py b/example/tests/integration/test_non_paginated_responses.py index b73eae5e..5a2e59c8 100644 --- a/example/tests/integration/test_non_paginated_responses.py +++ b/example/tests/integration/test_non_paginated_responses.py @@ -7,9 +7,9 @@ @mock.patch( - 'rest_framework_json_api.utils' - '.get_default_included_resources_from_serializer', - new=lambda s: []) + "rest_framework_json_api.utils" ".get_default_included_resources_from_serializer", + new=lambda s: [], +) def test_multiple_entries_no_pagination(multiple_entries, client): expected = { @@ -17,82 +17,70 @@ def test_multiple_entries_no_pagination(multiple_entries, client): { "type": "posts", "id": "1", - "attributes": - { + "attributes": { "headline": multiple_entries[0].headline, "bodyText": multiple_entries[0].body_text, "pubDate": None, - "modDate": None + "modDate": None, }, - "meta": { - "bodyFormat": "text" - }, - "relationships": - { - "blog": { - "data": {"type": "blogs", "id": "1"} - }, + "meta": {"bodyFormat": "text"}, + "relationships": { + "blog": {"data": {"type": "blogs", "id": "1"}}, "blogHyperlinked": { "links": { "related": "http://testserver/entries/1/blog", - "self": "http://testserver/entries/1/relationships/blog_hyperlinked" + "self": "http://testserver/entries/1/relationships/blog_hyperlinked", } }, "authors": { "meta": {"count": 1}, - "data": [{"type": "authors", "id": "1"}] + "data": [{"type": "authors", "id": "1"}], }, "comments": { "meta": {"count": 1}, - "data": [{"type": "comments", "id": "1"}] + "data": [{"type": "comments", "id": "1"}], }, "commentsHyperlinked": { "links": { "related": "http://testserver/entries/1/comments", - "self": "http://testserver/entries/1/relationships/comments_hyperlinked" + "self": "http://testserver/entries/1/relationships/comments_hyperlinked", } }, "suggested": { "data": [{"type": "entries", "id": "2"}], "links": { "related": "http://testserver/entries/1/suggested/", - "self": "http://testserver/entries/1/relationships/suggested" - } + "self": "http://testserver/entries/1/relationships/suggested", + }, }, "suggestedHyperlinked": { "links": { "related": "http://testserver/entries/1/suggested/", "self": "http://testserver/entries/1" - "/relationships/suggested_hyperlinked" + "/relationships/suggested_hyperlinked", } }, "featuredHyperlinked": { "links": { "related": "http://testserver/entries/1/featured", - "self": "http://testserver/entries/1/relationships/featured_hyperlinked" + "self": "http://testserver/entries/1/relationships/featured_hyperlinked", } }, - 'tags': {'data': [], 'meta': {'count': 0}}, - } + "tags": {"data": [], "meta": {"count": 0}}, + }, }, { "type": "posts", "id": "2", - "attributes": - { + "attributes": { "headline": multiple_entries[1].headline, "bodyText": multiple_entries[1].body_text, "pubDate": None, - "modDate": None + "modDate": None, }, - "meta": { - "bodyFormat": "text" - }, - "relationships": - { - "blog": { - "data": {"type": "blogs", "id": "2"} - }, + "meta": {"bodyFormat": "text"}, + "relationships": { + "blog": {"data": {"type": "blogs", "id": "2"}}, "blogHyperlinked": { "links": { "related": "http://testserver/entries/2/blog", @@ -101,40 +89,40 @@ def test_multiple_entries_no_pagination(multiple_entries, client): }, "authors": { "meta": {"count": 1}, - "data": [{"type": "authors", "id": "2"}] + "data": [{"type": "authors", "id": "2"}], }, "comments": { "meta": {"count": 1}, - "data": [{"type": "comments", "id": "2"}] + "data": [{"type": "comments", "id": "2"}], }, "commentsHyperlinked": { "links": { "related": "http://testserver/entries/2/comments", - "self": "http://testserver/entries/2/relationships/comments_hyperlinked" + "self": "http://testserver/entries/2/relationships/comments_hyperlinked", } }, "suggested": { "data": [{"type": "entries", "id": "1"}], "links": { "related": "http://testserver/entries/2/suggested/", - "self": "http://testserver/entries/2/relationships/suggested" - } + "self": "http://testserver/entries/2/relationships/suggested", + }, }, "suggestedHyperlinked": { "links": { "related": "http://testserver/entries/2/suggested/", "self": "http://testserver/entries/2" - "/relationships/suggested_hyperlinked" + "/relationships/suggested_hyperlinked", } }, "featuredHyperlinked": { "links": { "related": "http://testserver/entries/2/featured", - "self": "http://testserver/entries/2/relationships/featured_hyperlinked" + "self": "http://testserver/entries/2/relationships/featured_hyperlinked", } }, - 'tags': {'data': [], 'meta': {'count': 0}}, - } + "tags": {"data": [], "meta": {"count": 0}}, + }, }, ] } diff --git a/example/tests/integration/test_pagination.py b/example/tests/integration/test_pagination.py index 1b12a0d2..aedf8c6c 100644 --- a/example/tests/integration/test_pagination.py +++ b/example/tests/integration/test_pagination.py @@ -7,9 +7,9 @@ @mock.patch( - 'rest_framework_json_api.utils' - '.get_default_included_resources_from_serializer', - new=lambda s: []) + "rest_framework_json_api.utils" ".get_default_included_resources_from_serializer", + new=lambda s: [], +) def test_pagination_with_single_entry(single_entry, client): expected = { @@ -17,21 +17,15 @@ def test_pagination_with_single_entry(single_entry, client): { "type": "posts", "id": "1", - "attributes": - { + "attributes": { "headline": single_entry.headline, "bodyText": single_entry.body_text, "pubDate": None, - "modDate": None + "modDate": None, }, - "meta": { - "bodyFormat": "text" - }, - "relationships": - { - "blog": { - "data": {"type": "blogs", "id": "1"} - }, + "meta": {"bodyFormat": "text"}, + "relationships": { + "blog": {"data": {"type": "blogs", "id": "1"}}, "blogHyperlinked": { "links": { "related": "http://testserver/entries/1/blog", @@ -40,64 +34,52 @@ def test_pagination_with_single_entry(single_entry, client): }, "authors": { "meta": {"count": 1}, - "data": [{"type": "authors", "id": "1"}] + "data": [{"type": "authors", "id": "1"}], }, "comments": { "meta": {"count": 1}, - "data": [{"type": "comments", "id": "1"}] + "data": [{"type": "comments", "id": "1"}], }, "commentsHyperlinked": { "links": { "related": "http://testserver/entries/1/comments", - "self": "http://testserver/entries/1/relationships/comments_hyperlinked" + "self": "http://testserver/entries/1/relationships/comments_hyperlinked", } }, "suggested": { "data": [], "links": { "related": "http://testserver/entries/1/suggested/", - "self": "http://testserver/entries/1/relationships/suggested" - } + "self": "http://testserver/entries/1/relationships/suggested", + }, }, "suggestedHyperlinked": { "links": { "related": "http://testserver/entries/1/suggested/", "self": "http://testserver/entries/1" - "/relationships/suggested_hyperlinked" + "/relationships/suggested_hyperlinked", } }, "featuredHyperlinked": { "links": { "related": "http://testserver/entries/1/featured", - "self": "http://testserver/entries/1/relationships/featured_hyperlinked" + "self": "http://testserver/entries/1/relationships/featured_hyperlinked", } }, "tags": { - 'meta': {'count': 1}, - "data": [ - { - "id": "1", - "type": "taggedItems" - } - ] - } - } - }], + "meta": {"count": 1}, + "data": [{"id": "1", "type": "taggedItems"}], + }, + }, + } + ], "links": { - 'first': 'http://testserver/entries?page%5Bnumber%5D=1', - 'last': 'http://testserver/entries?page%5Bnumber%5D=1', + "first": "http://testserver/entries?page%5Bnumber%5D=1", + "last": "http://testserver/entries?page%5Bnumber%5D=1", "next": None, "prev": None, }, - "meta": - { - "pagination": - { - "page": 1, - "pages": 1, - "count": 1 - } - } + "meta": {"pagination": {"page": 1, "pages": 1, "count": 1}}, } response = client.get(reverse("entry-list")) diff --git a/example/tests/integration/test_polymorphism.py b/example/tests/integration/test_polymorphism.py index 5c64776a..bd41f203 100644 --- a/example/tests/integration/test_polymorphism.py +++ b/example/tests/integration/test_polymorphism.py @@ -10,153 +10,157 @@ def test_polymorphism_on_detail(single_art_project, client): - response = client.get(reverse("project-detail", kwargs={'pk': single_art_project.pk})) + response = client.get( + reverse("project-detail", kwargs={"pk": single_art_project.pk}) + ) content = response.json() assert content["data"]["type"] == "artProjects" def test_polymorphism_on_detail_relations(single_company, client): - response = client.get(reverse("company-detail", kwargs={'pk': single_company.pk})) + response = client.get(reverse("company-detail", kwargs={"pk": single_company.pk})) content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "artProjects" assert ( - set([rel["type"] for rel in content["data"]["relationships"]["futureProjects"]["data"]]) == - set(["researchProjects", "artProjects"]) + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "artProjects" ) + assert set( + [ + rel["type"] + for rel in content["data"]["relationships"]["futureProjects"]["data"] + ] + ) == set(["researchProjects", "artProjects"]) def test_polymorphism_on_included_relations(single_company, client): response = client.get( - reverse("company-detail", kwargs={'pk': single_company.pk}) + - '?include=current_project,future_projects,current_art_project,current_research_project') + reverse("company-detail", kwargs={"pk": single_company.pk}) + + "?include=current_project,future_projects,current_art_project,current_research_project" + ) content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "artProjects" - assert content["data"]["relationships"]["currentArtProject"]["data"]["type"] == "artProjects" - assert content["data"]["relationships"]["currentResearchProject"]["data"] is None assert ( - set([rel["type"] for rel in content["data"]["relationships"]["futureProjects"]["data"]]) == - set(["researchProjects", "artProjects"]) + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "artProjects" ) - assert set([x.get('type') for x in content.get('included')]) == set([ - 'artProjects', 'artProjects', 'researchProjects']), 'Detail included types are incorrect' + assert ( + content["data"]["relationships"]["currentArtProject"]["data"]["type"] + == "artProjects" + ) + assert content["data"]["relationships"]["currentResearchProject"]["data"] is None + assert set( + [ + rel["type"] + for rel in content["data"]["relationships"]["futureProjects"]["data"] + ] + ) == set(["researchProjects", "artProjects"]) + assert set([x.get("type") for x in content.get("included")]) == set( + ["artProjects", "artProjects", "researchProjects"] + ), "Detail included types are incorrect" # Ensure that the child fields are present. - assert content.get('included')[0].get('attributes').get('artist') is not None - assert content.get('included')[1].get('attributes').get('artist') is not None - assert content.get('included')[2].get('attributes').get('supervisor') is not None + assert content.get("included")[0].get("attributes").get("artist") is not None + assert content.get("included")[1].get("attributes").get("artist") is not None + assert content.get("included")[2].get("attributes").get("supervisor") is not None def test_polymorphism_on_polymorphic_model_detail_patch(single_art_project, client): - url = reverse("project-detail", kwargs={'pk': single_art_project.pk}) + url = reverse("project-detail", kwargs={"pk": single_art_project.pk}) response = client.get(url) content = response.json() - test_topic = 'test-{}'.format(random.randint(0, 999999)) - test_artist = 'test-{}'.format(random.randint(0, 999999)) - content['data']['attributes']['topic'] = test_topic - content['data']['attributes']['artist'] = test_artist + test_topic = "test-{}".format(random.randint(0, 999999)) + test_artist = "test-{}".format(random.randint(0, 999999)) + content["data"]["attributes"]["topic"] = test_topic + content["data"]["attributes"]["artist"] = test_artist response = client.patch(url, data=content) new_content = response.json() - assert new_content['data']['type'] == "artProjects" - assert new_content['data']['attributes']['topic'] == test_topic - assert new_content['data']['attributes']['artist'] == test_artist + assert new_content["data"]["type"] == "artProjects" + assert new_content["data"]["attributes"]["topic"] == test_topic + assert new_content["data"]["attributes"]["artist"] == test_artist -def test_patch_on_polymorphic_model_without_including_required_field(single_art_project, client): - url = reverse("project-detail", kwargs={'pk': single_art_project.pk}) +def test_patch_on_polymorphic_model_without_including_required_field( + single_art_project, client +): + url = reverse("project-detail", kwargs={"pk": single_art_project.pk}) data = { - 'data': { - 'id': single_art_project.pk, - 'type': 'artProjects', - 'attributes': { - 'description': 'New description' - } + "data": { + "id": single_art_project.pk, + "type": "artProjects", + "attributes": {"description": "New description"}, } } response = client.patch(url, data) assert response.status_code == status.HTTP_200_OK - assert response.json()['data']['attributes']['description'] == 'New description' + assert response.json()["data"]["attributes"]["description"] == "New description" def test_polymorphism_on_polymorphic_model_list_post(client): - test_topic = 'New test topic {}'.format(random.randint(0, 999999)) - test_artist = 'test-{}'.format(random.randint(0, 999999)) + test_topic = "New test topic {}".format(random.randint(0, 999999)) + test_artist = "test-{}".format(random.randint(0, 999999)) test_project_type = ProjectTypeFactory() - url = reverse('project-list') + url = reverse("project-list") data = { - 'data': { - 'type': 'artProjects', - 'attributes': { - 'topic': test_topic, - 'artist': test_artist - }, - 'relationships': { - 'projectType': { - 'data': { - 'type': 'projectTypes', - 'id': test_project_type.pk - } + "data": { + "type": "artProjects", + "attributes": {"topic": test_topic, "artist": test_artist}, + "relationships": { + "projectType": { + "data": {"type": "projectTypes", "id": test_project_type.pk} } - } + }, } } response = client.post(url, data=data) content = response.json() - assert content['data']['id'] is not None - assert content['data']['type'] == "artProjects" - assert content['data']['attributes']['topic'] == test_topic - assert content['data']['attributes']['artist'] == test_artist - assert content['data']['relationships']['projectType']['data']['id'] == \ - str(test_project_type.pk) + assert content["data"]["id"] is not None + assert content["data"]["type"] == "artProjects" + assert content["data"]["attributes"]["topic"] == test_topic + assert content["data"]["attributes"]["artist"] == test_artist + assert content["data"]["relationships"]["projectType"]["data"]["id"] == str( + test_project_type.pk + ) def test_polymorphism_on_polymorphic_model_w_included_serializers(client): test_project = ArtProjectFactory() - query = '?include=projectType' - url = reverse('project-list') + query = "?include=projectType" + url = reverse("project-list") response = client.get(url + query) content = response.json() - assert content['data'][0]['id'] == str(test_project.pk) - assert content['data'][0]['type'] == 'artProjects' - assert content['data'][0]['relationships']['projectType']['data']['id'] == \ - str(test_project.project_type.pk) - assert content['included'][0]['type'] == 'projectTypes' - assert content['included'][0]['id'] == str(test_project.project_type.pk) + assert content["data"][0]["id"] == str(test_project.pk) + assert content["data"][0]["type"] == "artProjects" + assert content["data"][0]["relationships"]["projectType"]["data"]["id"] == str( + test_project.project_type.pk + ) + assert content["included"][0]["type"] == "projectTypes" + assert content["included"][0]["id"] == str(test_project.project_type.pk) def test_polymorphic_model_without_any_instance(client): expected = { "links": { - 'first': 'http://testserver/projects?page%5Bnumber%5D=1', - 'last': 'http://testserver/projects?page%5Bnumber%5D=1', + "first": "http://testserver/projects?page%5Bnumber%5D=1", + "last": "http://testserver/projects?page%5Bnumber%5D=1", "next": None, - "prev": None + "prev": None, }, "data": [], - "meta": { - "pagination": { - "page": 1, - "pages": 1, - "count": 0 - } - } + "meta": {"pagination": {"page": 1, "pages": 1, "count": 0}}, } - response = client.get(reverse('project-list')) + response = client.get(reverse("project-list")) assert response.status_code == 200 content = response.json() assert expected == content def test_invalid_type_on_polymorphic_model(client): - test_topic = 'New test topic {}'.format(random.randint(0, 999999)) - test_artist = 'test-{}'.format(random.randint(0, 999999)) - url = reverse('project-list') + test_topic = "New test topic {}".format(random.randint(0, 999999)) + test_artist = "test-{}".format(random.randint(0, 999999)) + url = reverse("project-list") data = { - 'data': { - 'type': 'invalidProjects', - 'attributes': { - 'topic': test_topic, - 'artist': test_artist - } + "data": { + "type": "invalidProjects", + "attributes": {"topic": test_topic, "artist": test_artist}, } } response = client.post(url, data=data) @@ -165,72 +169,103 @@ def test_invalid_type_on_polymorphic_model(client): assert len(content["errors"]) == 1 assert content["errors"][0]["status"] == "409" try: - assert content["errors"][0]["detail"] == \ - "The resource object's type (invalidProjects) is not the type that constitute the " \ + assert ( + content["errors"][0]["detail"] + == "The resource object's type (invalidProjects) is not the type that constitute the " "collection represented by the endpoint (one of [researchProjects, artProjects])." + ) except AssertionError: # Available type list order isn't enforced - assert content["errors"][0]["detail"] == \ - "The resource object's type (invalidProjects) is not the type that constitute the " \ + assert ( + content["errors"][0]["detail"] + == "The resource object's type (invalidProjects) is not the type that constitute the " "collection represented by the endpoint (one of [artProjects, researchProjects])." + ) -def test_polymorphism_relations_update(single_company, research_project_factory, client): - response = client.get(reverse("company-detail", kwargs={'pk': single_company.pk})) +def test_polymorphism_relations_update( + single_company, research_project_factory, client +): + response = client.get(reverse("company-detail", kwargs={"pk": single_company.pk})) content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "artProjects" + assert ( + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "artProjects" + ) research_project = research_project_factory() content["data"]["relationships"]["currentProject"]["data"] = { "type": "researchProjects", - "id": research_project.pk + "id": research_project.pk, } - response = client.patch(reverse("company-detail", kwargs={'pk': single_company.pk}), - data=content) + response = client.patch( + reverse("company-detail", kwargs={"pk": single_company.pk}), data=content + ) assert response.status_code == 200 content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "researchProjects" - assert int(content["data"]["relationships"]["currentProject"]["data"]["id"]) == \ - research_project.pk + assert ( + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "researchProjects" + ) + assert ( + int(content["data"]["relationships"]["currentProject"]["data"]["id"]) + == research_project.pk + ) -def test_polymorphism_relations_put_405(single_company, research_project_factory, client): - response = client.get(reverse("company-detail", kwargs={'pk': single_company.pk})) +def test_polymorphism_relations_put_405( + single_company, research_project_factory, client +): + response = client.get(reverse("company-detail", kwargs={"pk": single_company.pk})) content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "artProjects" + assert ( + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "artProjects" + ) research_project = research_project_factory() content["data"]["relationships"]["currentProject"]["data"] = { "type": "researchProjects", - "id": research_project.pk + "id": research_project.pk, } - response = client.put(reverse("company-detail", kwargs={'pk': single_company.pk}), - data=content) + response = client.put( + reverse("company-detail", kwargs={"pk": single_company.pk}), data=content + ) assert response.status_code == 405 -def test_invalid_type_on_polymorphic_relation(single_company, research_project_factory, client): - response = client.get(reverse("company-detail", kwargs={'pk': single_company.pk})) +def test_invalid_type_on_polymorphic_relation( + single_company, research_project_factory, client +): + response = client.get(reverse("company-detail", kwargs={"pk": single_company.pk})) content = response.json() - assert content["data"]["relationships"]["currentProject"]["data"]["type"] == "artProjects" + assert ( + content["data"]["relationships"]["currentProject"]["data"]["type"] + == "artProjects" + ) research_project = research_project_factory() content["data"]["relationships"]["currentProject"]["data"] = { "type": "invalidProjects", - "id": research_project.pk + "id": research_project.pk, } - response = client.patch(reverse("company-detail", kwargs={'pk': single_company.pk}), - data=content) + response = client.patch( + reverse("company-detail", kwargs={"pk": single_company.pk}), data=content + ) assert response.status_code == 409 content = response.json() assert len(content["errors"]) == 1 assert content["errors"][0]["status"] == "409" try: - assert content["errors"][0]["detail"] == \ - "Incorrect relation type. Expected one of [researchProjects, artProjects], " \ + assert ( + content["errors"][0]["detail"] + == "Incorrect relation type. Expected one of [researchProjects, artProjects], " "received invalidProjects." + ) except AssertionError: # Available type list order isn't enforced - assert content["errors"][0]["detail"] == \ - "Incorrect relation type. Expected one of [artProjects, researchProjects], " \ + assert ( + content["errors"][0]["detail"] + == "Incorrect relation type. Expected one of [artProjects, researchProjects], " "received invalidProjects." + ) diff --git a/example/tests/integration/test_sparse_fieldsets.py b/example/tests/integration/test_sparse_fieldsets.py index 83b8560a..605d218d 100644 --- a/example/tests/integration/test_sparse_fieldsets.py +++ b/example/tests/integration/test_sparse_fieldsets.py @@ -6,18 +6,20 @@ def test_sparse_fieldset_valid_fields(client, entry): - base_url = reverse('entry-list') - response = client.get(base_url, data={'fields[entries]': 'blog,headline'}) + base_url = reverse("entry-list") + response = client.get(base_url, data={"fields[entries]": "blog,headline"}) assert response.status_code == status.HTTP_200_OK - data = response.json()['data'] + data = response.json()["data"] assert len(data) == 1 entry = data[0] - assert entry['attributes'].keys() == {'headline'} - assert entry['relationships'].keys() == {'blog'} + assert entry["attributes"].keys() == {"headline"} + assert entry["relationships"].keys() == {"blog"} -@pytest.mark.parametrize("fields_param", ['invalidfields[entries]', 'fieldsinvalid[entries']) +@pytest.mark.parametrize( + "fields_param", ["invalidfields[entries]", "fieldsinvalid[entries"] +) def test_sparse_fieldset_invalid_fields_parameter(client, entry, fields_param): """ Test that invalid fields query parameter is not processed by sparse fieldset. @@ -25,12 +27,12 @@ def test_sparse_fieldset_invalid_fields_parameter(client, entry, fields_param): rest_framework_json_api.filters.QueryParameterValidationFilter takes care of error handling in such a case. """ - base_url = reverse('entry-list') - response = client.get(base_url, data={'invalidfields[entries]': 'blog,headline'}) + base_url = reverse("entry-list") + response = client.get(base_url, data={"invalidfields[entries]": "blog,headline"}) assert response.status_code == status.HTTP_200_OK - data = response.json()['data'] + data = response.json()["data"] assert len(data) == 1 entry = data[0] - assert entry['attributes'].keys() != {'headline'} - assert entry['relationships'].keys() != {'blog'} + assert entry["attributes"].keys() != {"headline"} + assert entry["relationships"].keys() != {"blog"} diff --git a/example/tests/snapshots/snap_test_errors.py b/example/tests/snapshots/snap_test_errors.py index 48b8e474..528b831b 100644 --- a/example/tests/snapshots/snap_test_errors.py +++ b/example/tests/snapshots/snap_test_errors.py @@ -4,116 +4,97 @@ from snapshottest import Snapshot - snapshots = Snapshot() -snapshots['test_first_level_attribute_error 1'] = { - 'errors': [ +snapshots["test_first_level_attribute_error 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/headline' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/headline"}, + "status": "400", } ] } -snapshots['test_first_level_custom_attribute_error 1'] = { - 'errors': [ +snapshots["test_first_level_custom_attribute_error 1"] = { + "errors": [ { - 'detail': 'Too short', - 'source': { - 'pointer': '/data/attributes/body-text' - }, - 'title': 'Too Short title' + "detail": "Too short", + "source": {"pointer": "/data/attributes/body-text"}, + "title": "Too Short title", } ] } -snapshots['test_many_third_level_dict_errors 1'] = { - 'errors': [ +snapshots["test_many_third_level_dict_errors 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comments/0/attachment/data' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comments/0/attachment/data"}, + "status": "400", }, { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comments/0/body' - }, - 'status': '400' - } + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comments/0/body"}, + "status": "400", + }, ] } -snapshots['test_second_level_array_error 1'] = { - 'errors': [ +snapshots["test_second_level_array_error 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comments/0/body' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comments/0/body"}, + "status": "400", } ] } -snapshots['test_second_level_dict_error 1'] = { - 'errors': [ +snapshots["test_second_level_dict_error 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comment/body' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comment/body"}, + "status": "400", } ] } -snapshots['test_third_level_array_error 1'] = { - 'errors': [ +snapshots["test_third_level_array_error 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comments/0/attachments/0/data' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comments/0/attachments/0/data"}, + "status": "400", } ] } -snapshots['test_third_level_custom_array_error 1'] = { - 'errors': [ +snapshots["test_third_level_custom_array_error 1"] = { + "errors": [ { - 'code': 'invalid', - 'detail': 'Too short data', - 'source': { - 'pointer': '/data/attributes/comments/0/attachments/0/data' - }, - 'status': '400' + "code": "invalid", + "detail": "Too short data", + "source": {"pointer": "/data/attributes/comments/0/attachments/0/data"}, + "status": "400", } ] } -snapshots['test_third_level_dict_error 1'] = { - 'errors': [ +snapshots["test_third_level_dict_error 1"] = { + "errors": [ { - 'code': 'required', - 'detail': 'This field is required.', - 'source': { - 'pointer': '/data/attributes/comments/0/attachment/data' - }, - 'status': '400' + "code": "required", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/comments/0/attachment/data"}, + "status": "400", } ] } diff --git a/example/tests/snapshots/snap_test_openapi.py b/example/tests/snapshots/snap_test_openapi.py index aca41d70..7c0b3b98 100644 --- a/example/tests/snapshots/snap_test_openapi.py +++ b/example/tests/snapshots/snap_test_openapi.py @@ -4,10 +4,11 @@ from snapshottest import Snapshot - snapshots = Snapshot() -snapshots['test_path_without_parameters 1'] = '''{ +snapshots[ + "test_path_without_parameters 1" +] = """{ "description": "", "operationId": "List/authors/", "parameters": [ @@ -121,9 +122,11 @@ "description": "not found" } } -}''' +}""" -snapshots['test_path_with_id_parameter 1'] = '''{ +snapshots[ + "test_path_with_id_parameter 1" +] = """{ "description": "", "operationId": "retrieve/authors/{id}/", "parameters": [ @@ -225,9 +228,11 @@ "description": "not found" } } -}''' +}""" -snapshots['test_post_request 1'] = '''{ +snapshots[ + "test_post_request 1" +] = """{ "description": "", "operationId": "create/authors/", "parameters": [], @@ -407,9 +412,11 @@ "description": "[Conflict](https://jsonapi.org/format/#crud-creating-responses-409)" } } -}''' +}""" -snapshots['test_patch_request 1'] = '''{ +snapshots[ + "test_patch_request 1" +] = """{ "description": "", "operationId": "update/authors/{id}", "parameters": [ @@ -583,9 +590,11 @@ "description": "[Conflict]([Conflict](https://jsonapi.org/format/#crud-updating-responses-409)" } } -}''' +}""" -snapshots['test_delete_request 1'] = '''{ +snapshots[ + "test_delete_request 1" +] = """{ "description": "", "operationId": "destroy/authors/{id}", "parameters": [ @@ -644,4 +653,4 @@ "description": "[Resource does not exist](https://jsonapi.org/format/#crud-deleting-responses-404)" } } -}''' +}""" diff --git a/example/tests/test_errors.py b/example/tests/test_errors.py index ff2e8f95..93cdb235 100644 --- a/example/tests/test_errors.py +++ b/example/tests/test_errors.py @@ -14,7 +14,7 @@ class CommentAttachmentSerializer(serializers.Serializer): def validate_data(self, value): if value and len(value) < 10: - raise serializers.ValidationError('Too short data') + raise serializers.ValidationError("Too short data") class CommentSerializer(serializers.Serializer): @@ -32,17 +32,17 @@ class EntrySerializer(serializers.Serializer): body_text = serializers.CharField() def validate(self, attrs): - body_text = attrs['body_text'] + body_text = attrs["body_text"] if len(body_text) < 5: - raise serializers.ValidationError({'body_text': { - 'title': 'Too Short title', 'detail': 'Too short'} - }) + raise serializers.ValidationError( + {"body_text": {"title": "Too Short title", "detail": "Too short"}} + ) # view class DummyTestView(views.APIView): serializer_class = EntrySerializer - resource_name = 'entries' + resource_name = "entries" def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) @@ -50,21 +50,18 @@ def post(self, request, *args, **kwargs): urlpatterns = [ - path('entries-nested', DummyTestView.as_view(), - name='entries-nested-list') + path("entries-nested", DummyTestView.as_view(), name="entries-nested-list") ] -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def some_blog(db): - return Blog.objects.create(name='Some Blog', tagline="It's a blog") + return Blog.objects.create(name="Some Blog", tagline="It's a blog") def perform_error_test(client, data): - with override_settings( - ROOT_URLCONF=__name__ - ): - url = reverse('entries-nested-list') + with override_settings(ROOT_URLCONF=__name__): + url = reverse("entries-nested-list") response = client.post(url, data=data) return response.json() @@ -72,12 +69,12 @@ def perform_error_test(client, data): def test_first_level_attribute_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + }, } } snapshot.assert_match(perform_error_test(client, data)) @@ -85,32 +82,29 @@ def test_first_level_attribute_error(client, some_blog, snapshot): def test_first_level_custom_attribute_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'body-text': 'body', - 'headline': 'headline' - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "body-text": "body", + "headline": "headline", + }, } } - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): snapshot.assert_match(perform_error_test(client, data)) def test_second_level_array_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comments': [ - { - } - ] - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comments": [{}], + }, } } @@ -119,14 +113,14 @@ def test_second_level_array_error(client, some_blog, snapshot): def test_second_level_dict_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comment': {} - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comment": {}, + }, } } @@ -135,22 +129,14 @@ def test_second_level_dict_error(client, some_blog, snapshot): def test_third_level_array_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comments': [ - { - 'body': 'test comment', - 'attachments': [ - { - } - ] - } - ] - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comments": [{"body": "test comment", "attachments": [{}]}], + }, } } @@ -159,23 +145,16 @@ def test_third_level_array_error(client, some_blog, snapshot): def test_third_level_custom_array_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comments': [ - { - 'body': 'test comment', - 'attachments': [ - { - 'data': 'text' - } - ] - } - ] - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comments": [ + {"body": "test comment", "attachments": [{"data": "text"}]} + ], + }, } } @@ -184,19 +163,14 @@ def test_third_level_custom_array_error(client, some_blog, snapshot): def test_third_level_dict_error(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comments': [ - { - 'body': 'test comment', - 'attachment': {} - } - ] - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comments": [{"body": "test comment", "attachment": {}}], + }, } } @@ -205,18 +179,14 @@ def test_third_level_dict_error(client, some_blog, snapshot): def test_many_third_level_dict_errors(client, some_blog, snapshot): data = { - 'data': { - 'type': 'entries', - 'attributes': { - 'blog': some_blog.pk, - 'bodyText': 'body_text', - 'headline': 'headline', - 'comments': [ - { - 'attachment': {} - } - ] - } + "data": { + "type": "entries", + "attributes": { + "blog": some_blog.pk, + "bodyText": "body_text", + "headline": "headline", + "comments": [{"attachment": {}}], + }, } } diff --git a/example/tests/test_filters.py b/example/tests/test_filters.py index c3b1c42d..68ad1452 100644 --- a/example/tests/test_filters.py +++ b/example/tests/test_filters.py @@ -8,24 +8,26 @@ class DJATestFilters(APITestCase): """ tests of JSON:API filter backends """ - fixtures = ('blogentry',) + + fixtures = ("blogentry",) def setUp(self): self.entries = Entry.objects.all() self.blogs = Blog.objects.all() - self.url = reverse('nopage-entry-list') - self.fs_url = reverse('filterset-entry-list') - self.no_fs_url = reverse('nofilterset-entry-list') + self.url = reverse("nopage-entry-list") + self.fs_url = reverse("filterset-entry-list") + self.no_fs_url = reverse("nofilterset-entry-list") def test_sort(self): """ test sort """ - response = self.client.get(self.url, data={'sort': 'headline'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"sort": "headline"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - headlines = [c['attributes']['headline'] for c in dja_response['data']] + headlines = [c["attributes"]["headline"] for c in dja_response["data"]] sorted_headlines = sorted(headlines) self.assertEqual(headlines, sorted_headlines) @@ -33,11 +35,12 @@ def test_sort_reverse(self): """ confirm switching the sort order actually works """ - response = self.client.get(self.url, data={'sort': '-headline'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"sort": "-headline"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - headlines = [c['attributes']['headline'] for c in dja_response['data']] + headlines = [c["attributes"]["headline"] for c in dja_response["data"]] sorted_headlines = sorted(headlines) self.assertNotEqual(headlines, sorted_headlines) @@ -45,11 +48,12 @@ def test_sort_double_negative(self): """ what if they provide multiple `-`'s? It's OK. """ - response = self.client.get(self.url, data={'sort': '--headline'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"sort": "--headline"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - headlines = [c['attributes']['headline'] for c in dja_response['data']] + headlines = [c["attributes"]["headline"] for c in dja_response["data"]] sorted_headlines = sorted(headlines) self.assertNotEqual(headlines, sorted_headlines) @@ -57,23 +61,28 @@ def test_sort_invalid(self): """ test sort of invalid field """ - response = self.client.get(self.url, - data={'sort': 'nonesuch,headline,-not_a_field'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get( + self.url, data={"sort": "nonesuch,headline,-not_a_field"} + ) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid sort parameters: nonesuch,-not_a_field") + self.assertEqual( + dja_response["errors"][0]["detail"], + "invalid sort parameters: nonesuch,-not_a_field", + ) def test_sort_camelcase(self): """ test sort of camelcase field name """ - response = self.client.get(self.url, data={'sort': 'bodyText'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"sort": "bodyText"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - blog_ids = [(c['attributes']['bodyText'] or '') for c in dja_response['data']] + blog_ids = [(c["attributes"]["bodyText"] or "") for c in dja_response["data"]] sorted_blog_ids = sorted(blog_ids) self.assertEqual(blog_ids, sorted_blog_ids) @@ -84,11 +93,12 @@ def test_sort_underscore(self): "Be conservative in what you send, be liberal in what you accept" -- https://en.wikipedia.org/wiki/Robustness_principle """ - response = self.client.get(self.url, data={'sort': 'body_text'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"sort": "body_text"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - blog_ids = [(c['attributes']['bodyText'] or '') for c in dja_response['data']] + blog_ids = [(c["attributes"]["bodyText"] or "") for c in dja_response["data"]] sorted_blog_ids = sorted(blog_ids) self.assertEqual(blog_ids, sorted_blog_ids) @@ -97,12 +107,15 @@ def test_sort_related(self): test sort via related field using jsonapi path `.` and django orm `__` notation. ORM relations must be predefined in the View's .ordering_fields attr """ - for datum in ('blog__id', 'blog.id'): - response = self.client.get(self.url, data={'sort': datum}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + for datum in ("blog__id", "blog.id"): + response = self.client.get(self.url, data={"sort": datum}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - blog_ids = [c['relationships']['blog']['data']['id'] for c in dja_response['data']] + blog_ids = [ + c["relationships"]["blog"]["data"]["id"] for c in dja_response["data"] + ] sorted_blog_ids = sorted(blog_ids) self.assertEqual(blog_ids, sorted_blog_ids) @@ -110,46 +123,50 @@ def test_filter_exact(self): """ filter for an exact match """ - response = self.client.get(self.url, data={'filter[headline]': 'CHEM3271X'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[headline]": "CHEM3271X"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), 1) + self.assertEqual(len(dja_response["data"]), 1) def test_filter_exact_fail(self): """ failed search for an exact match """ - response = self.client.get(self.url, data={'filter[headline]': 'XXXXX'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[headline]": "XXXXX"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), 0) + self.assertEqual(len(dja_response["data"]), 0) def test_filter_isnull(self): """ search for null value """ - response = self.client.get(self.url, data={'filter[bodyText.isnull]': 'true'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[bodyText.isnull]": "true"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() self.assertEqual( - len(dja_response['data']), - len([k for k in self.entries if k.body_text is None]) + len(dja_response["data"]), + len([k for k in self.entries if k.body_text is None]), ) def test_filter_not_null(self): """ search for not null """ - response = self.client.get(self.url, data={'filter[bodyText.isnull]': 'false'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[bodyText.isnull]": "false"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() self.assertEqual( - len(dja_response['data']), - len([k for k in self.entries if k.body_text is not None]) + len(dja_response["data"]), + len([k for k in self.entries if k.body_text is not None]), ) def test_filter_isempty(self): @@ -157,26 +174,35 @@ def test_filter_isempty(self): search for an empty value (different from null!) the easiest way to do this is search for r'^$' """ - response = self.client.get(self.url, data={'filter[bodyText.regex]': '^$'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[bodyText.regex]": "^$"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), - len([k for k in self.entries - if k.body_text is not None and - len(k.body_text) == 0])) + self.assertEqual( + len(dja_response["data"]), + len( + [ + k + for k in self.entries + if k.body_text is not None and len(k.body_text) == 0 + ] + ), + ) def test_filter_related(self): """ filter via a relationship chain """ - response = self.client.get(self.url, data={'filter[blog.name]': 'ANTB'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[blog.name]": "ANTB"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), - len([k for k in self.entries - if k.blog.name == 'ANTB'])) + self.assertEqual( + len(dja_response["data"]), + len([k for k in self.entries if k.blog.name == "ANTB"]), + ) def test_filter_related_fieldset_class(self): """ @@ -184,133 +210,164 @@ def test_filter_related_fieldset_class(self): This tests a shortcut for a longer ORM path: `bname` is a shortcut name for `blog.name`. """ - response = self.client.get(self.fs_url, data={'filter[bname]': 'ANTB'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get(self.fs_url, data={"filter[bname]": "ANTB"}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), - len([k for k in self.entries - if k.blog.name == 'ANTB'])) + self.assertEqual( + len(dja_response["data"]), + len([k for k in self.entries if k.blog.name == "ANTB"]), + ) def test_filter_related_missing_fieldset_class(self): """ filter via with neither filterset_fields nor filterset_class This should return an error for any filter[] """ - response = self.client.get(self.no_fs_url, data={'filter[bname]': 'ANTB'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.no_fs_url, data={"filter[bname]": "ANTB"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid filter[bname]") + self.assertEqual(dja_response["errors"][0]["detail"], "invalid filter[bname]") def test_filter_fields_union_list(self): """ test field for a list of values(ORed): ?filter[field.in]': 'val1,val2,val3 """ - response = self.client.get(self.url, - data={'filter[headline.in]': 'CLCV2442V,XXX,BIOL3594X'}) + response = self.client.get( + self.url, data={"filter[headline.in]": "CLCV2442V,XXX,BIOL3594X"} + ) dja_response = response.json() - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) self.assertEqual( - len(dja_response['data']), - len([k for k in self.entries if k.headline == 'CLCV2442V']) + - len([k for k in self.entries if k.headline == 'XXX']) + - len([k for k in self.entries if k.headline == 'BIOL3594X']), - msg="filter field list (union)") + response.status_code, 200, msg=response.content.decode("utf-8") + ) + self.assertEqual( + len(dja_response["data"]), + len([k for k in self.entries if k.headline == "CLCV2442V"]) + + len([k for k in self.entries if k.headline == "XXX"]) + + len([k for k in self.entries if k.headline == "BIOL3594X"]), + msg="filter field list (union)", + ) def test_filter_fields_intersection(self): """ test fields (ANDed): ?filter[field1]': 'val1&filter[field2]'='val2 """ # - response = self.client.get(self.url, - data={'filter[headline.regex]': '^A', - 'filter[body_text.icontains]': 'in'}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get( + self.url, + data={"filter[headline.regex]": "^A", "filter[body_text.icontains]": "in"}, + ) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertGreater(len(dja_response['data']), 1) + self.assertGreater(len(dja_response["data"]), 1) self.assertEqual( - len(dja_response['data']), - len([k for k in self.entries if k.headline.startswith('A') and - 'in' in k.body_text.lower()])) + len(dja_response["data"]), + len( + [ + k + for k in self.entries + if k.headline.startswith("A") and "in" in k.body_text.lower() + ] + ), + ) def test_filter_invalid_association_name(self): """ test for filter with invalid filter association name """ - response = self.client.get(self.url, data={'filter[nonesuch]': 'CHEM3271X'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[nonesuch]": "CHEM3271X"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid filter[nonesuch]") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid filter[nonesuch]" + ) def test_filter_empty_association_name(self): """ test for filter with missing association name error texts are different depending on whether QueryParameterValidationFilter is in use. """ - response = self.client.get(self.url, data={'filter[]': 'foobar'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[]": "foobar"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], "invalid query parameter: filter[]") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: filter[]" + ) def test_filter_no_brackets(self): """ test for `filter=foobar` with missing filter[association] name """ - response = self.client.get(self.url, data={'filter': 'foobar'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter": "foobar"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid query parameter: filter") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: filter" + ) def test_filter_missing_right_bracket(self): """ test for filter missing right bracket """ - response = self.client.get(self.url, data={'filter[headline': 'foobar'}) - self.assertEqual(response.status_code, 400, msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[headline": "foobar"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid query parameter: filter[headline") + self.assertEqual( + dja_response["errors"][0]["detail"], + "invalid query parameter: filter[headline", + ) def test_filter_no_brackets_rvalue(self): """ test for `filter=` with missing filter[association] and value """ - response = self.client.get(self.url + '?filter=') - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url + "?filter=") + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid query parameter: filter") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: filter" + ) def test_filter_no_brackets_equal(self): """ test for `filter` with missing filter[association] name and =value """ - response = self.client.get(self.url + '?filter') - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url + "?filter") + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid query parameter: filter") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: filter" + ) def test_filter_malformed_left_bracket(self): """ test for filter with invalid filter syntax """ - response = self.client.get(self.url, data={'filter[': 'foobar'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[": "foobar"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], "invalid query parameter: filter[") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: filter[" + ) def test_filter_missing_rvalue(self): """ @@ -318,24 +375,30 @@ def test_filter_missing_rvalue(self): this should probably be an error rather than ignoring the filter: https://django-filter.readthedocs.io/en/latest/guide/tips.html#filtering-by-an-empty-string """ - response = self.client.get(self.url, data={'filter[headline]': ''}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"filter[headline]": ""}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "missing value for query parameter filter[headline]") + self.assertEqual( + dja_response["errors"][0]["detail"], + "missing value for query parameter filter[headline]", + ) def test_filter_missing_rvalue_equal(self): """ test for filter with missing value to test against this should probably be an error rather than ignoring the filter: """ - response = self.client.get(self.url + '?filter[headline]') - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url + "?filter[headline]") + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "missing value for query parameter filter[headline]") + self.assertEqual( + dja_response["errors"][0]["detail"], + "missing value for query parameter filter[headline]", + ) def test_filter_single_relation(self): """ @@ -343,13 +406,14 @@ def test_filter_single_relation(self): e.g. filterset-entries?filter[authors.id]=1 looks for entries written by (at least) author.id=1 """ - response = self.client.get(self.fs_url, data={'filter[authors.id]': 1}) + response = self.client.get(self.fs_url, data={"filter[authors.id]": 1}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - ids = [k['id'] for k in dja_response['data']] + ids = [k["id"] for k in dja_response["data"]] expected_ids = [str(k.id) for k in self.entries.filter(authors__id=1)] @@ -361,15 +425,18 @@ def test_filter_repeated_relations(self): e.g. filterset-entries?filter[authors.id]=1&filter[authors.id]=2 looks for entries written by (at least) author.id=1 AND author.id=2 """ - response = self.client.get(self.fs_url, data={'filter[authors.id]': [1, 2]}) + response = self.client.get(self.fs_url, data={"filter[authors.id]": [1, 2]}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - ids = [k['id'] for k in dja_response['data']] + ids = [k["id"] for k in dja_response["data"]] - expected_ids = [str(k.id) for k in self.entries.filter(authors__id=1).filter(authors__id=2)] + expected_ids = [ + str(k.id) for k in self.entries.filter(authors__id=1).filter(authors__id=2) + ] self.assertEqual(set(ids), set(expected_ids)) @@ -379,13 +446,14 @@ def test_filter_in(self): e.g. filterset-entries?filter[authors.id.in]=1,2 looks for entries written by (at least) author.id=1 OR author.id=2 """ - response = self.client.get(self.fs_url, data={'filter[authors.id.in]': '1,2'}) + response = self.client.get(self.fs_url, data={"filter[authors.id.in]": "1,2"}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - ids = [k['id'] for k in dja_response['data']] + ids = [k["id"] for k in dja_response["data"]] expected_ids = [str(k.id) for k in self.entries.filter(authors__id__in=[1, 2])] @@ -396,82 +464,70 @@ def test_search_keywords(self): test for `filter[search]="keywords"` where some of the keywords are in the entry and others are in the related blog. """ - response = self.client.get(self.url, data={'filter[search]': 'barnard field research'}) + response = self.client.get( + self.url, data={"filter[search]": "barnard field research"} + ) expected_result = { - 'data': [ + "data": [ { - 'type': 'posts', - 'id': '7', - 'attributes': { - 'headline': 'ANTH3868X', - 'bodyText': 'ETHNOGRAPHIC FIELD RESEARCH IN NYC', - 'pubDate': None, - 'modDate': None}, - 'relationships': { - 'blog': { - 'data': { - 'type': 'blogs', - 'id': '1' + "type": "posts", + "id": "7", + "attributes": { + "headline": "ANTH3868X", + "bodyText": "ETHNOGRAPHIC FIELD RESEARCH IN NYC", + "pubDate": None, + "modDate": None, + }, + "relationships": { + "blog": {"data": {"type": "blogs", "id": "1"}}, + "blogHyperlinked": { + "links": { + "self": "http://testserver/entries/7/relationships/blog_hyperlinked", # noqa: E501 + "related": "http://testserver/entries/7/blog", } }, - 'blogHyperlinked': { - 'links': { - 'self': 'http://testserver/entries/7/relationships/blog_hyperlinked', # noqa: E501 - 'related': 'http://testserver/entries/7/blog'} - }, - 'authors': { - 'meta': { - 'count': 0 - }, - 'data': [] - }, - 'comments': { - 'meta': { - 'count': 0 - }, - 'data': [] - }, - 'commentsHyperlinked': { - 'links': { - 'self': 'http://testserver/entries/7/relationships/comments_hyperlinked', # noqa: E501 - 'related': 'http://testserver/entries/7/comments' + "authors": {"meta": {"count": 0}, "data": []}, + "comments": {"meta": {"count": 0}, "data": []}, + "commentsHyperlinked": { + "links": { + "self": "http://testserver/entries/7/relationships/comments_hyperlinked", # noqa: E501 + "related": "http://testserver/entries/7/comments", } }, - 'suggested': { - 'links': { - 'self': 'http://testserver/entries/7/relationships/suggested', - 'related': 'http://testserver/entries/7/suggested/' + "suggested": { + "links": { + "self": "http://testserver/entries/7/relationships/suggested", + "related": "http://testserver/entries/7/suggested/", }, - 'data': [ - {'type': 'entries', 'id': '1'}, - {'type': 'entries', 'id': '2'}, - {'type': 'entries', 'id': '3'}, - {'type': 'entries', 'id': '4'}, - {'type': 'entries', 'id': '5'}, - {'type': 'entries', 'id': '6'}, - {'type': 'entries', 'id': '8'}, - {'type': 'entries', 'id': '9'}, - {'type': 'entries', 'id': '10'}, - {'type': 'entries', 'id': '11'}, - {'type': 'entries', 'id': '12'} - ] + "data": [ + {"type": "entries", "id": "1"}, + {"type": "entries", "id": "2"}, + {"type": "entries", "id": "3"}, + {"type": "entries", "id": "4"}, + {"type": "entries", "id": "5"}, + {"type": "entries", "id": "6"}, + {"type": "entries", "id": "8"}, + {"type": "entries", "id": "9"}, + {"type": "entries", "id": "10"}, + {"type": "entries", "id": "11"}, + {"type": "entries", "id": "12"}, + ], }, - 'suggestedHyperlinked': { - 'links': { - 'self': 'http://testserver/entries/7/relationships/suggested_hyperlinked', # noqa: E501 - 'related': 'http://testserver/entries/7/suggested/'} + "suggestedHyperlinked": { + "links": { + "self": "http://testserver/entries/7/relationships/suggested_hyperlinked", # noqa: E501 + "related": "http://testserver/entries/7/suggested/", + } }, - 'tags': {'data': [], 'meta': {'count': 0}}, - 'featuredHyperlinked': { - 'links': { - 'self': 'http://testserver/entries/7/relationships/featured_hyperlinked', # noqa: E501 - 'related': 'http://testserver/entries/7/featured' + "tags": {"data": [], "meta": {"count": 0}}, + "featuredHyperlinked": { + "links": { + "self": "http://testserver/entries/7/relationships/featured_hyperlinked", # noqa: E501 + "related": "http://testserver/entries/7/featured", } - } + }, }, - 'meta': { - 'bodyFormat': 'text' - } + "meta": {"bodyFormat": "text"}, } ] } @@ -497,48 +553,68 @@ def test_search_multiple_keywords(self): See `example/fixtures/blogentry.json` for the test content that the searches are based on. The searches test for both direct entries and related blogs across multiple fields. """ - for searches in ("research", "chemistry", "nonesuch", - "research seminar", "research nonesuch", - "barnard classic", "barnard ethnographic field research"): - response = self.client.get(self.url, data={'filter[search]': searches}) - self.assertEqual(response.status_code, 200, msg=response.content.decode("utf-8")) + for searches in ( + "research", + "chemistry", + "nonesuch", + "research seminar", + "research nonesuch", + "barnard classic", + "barnard ethnographic field research", + ): + response = self.client.get(self.url, data={"filter[search]": searches}) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() keys = searches.split() # dicts keyed by the search keys for the 4 search_fields: - headline = {} # list of entry ids where key is in entry__headline - body_text = {} # list of entry ids where key is in entry__body_text - blog_name = {} # list of entry ids where key is in entry__blog__name + headline = {} # list of entry ids where key is in entry__headline + body_text = {} # list of entry ids where key is in entry__body_text + blog_name = {} # list of entry ids where key is in entry__blog__name blog_tagline = {} # list of entry ids where key is in entry__blog__tagline for key in keys: - headline[key] = [str(k.id) for k in - self.entries.filter(headline__icontains=key)] - body_text[key] = [str(k.id) for k in - self.entries.filter(body_text__icontains=key)] - blog_name[key] = [str(k.id) for k in - self.entries.filter(blog__name__icontains=key)] - blog_tagline[key] = [str(k.id) for k in - self.entries.filter(blog__tagline__icontains=key)] + headline[key] = [ + str(k.id) for k in self.entries.filter(headline__icontains=key) + ] + body_text[key] = [ + str(k.id) for k in self.entries.filter(body_text__icontains=key) + ] + blog_name[key] = [ + str(k.id) for k in self.entries.filter(blog__name__icontains=key) + ] + blog_tagline[key] = [ + str(k.id) for k in self.entries.filter(blog__tagline__icontains=key) + ] union = [] # each list item is a set of entry ids matching the given key for key in keys: - union.append(set(headline[key] + body_text[key] + - blog_name[key] + blog_tagline[key])) + union.append( + set( + headline[key] + + body_text[key] + + blog_name[key] + + blog_tagline[key] + ) + ) # all keywords must be present: intersect the keyword sets expected_ids = set.intersection(*union) expected_len = len(expected_ids) - self.assertEqual(len(dja_response['data']), expected_len) - returned_ids = set([k['id'] for k in dja_response['data']]) + self.assertEqual(len(dja_response["data"]), expected_len) + returned_ids = set([k["id"] for k in dja_response["data"]]) self.assertEqual(returned_ids, expected_ids) def test_param_invalid(self): """ Test a "wrong" query parameter """ - response = self.client.get(self.url, data={'garbage': 'foo'}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.url, data={"garbage": "foo"}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "invalid query parameter: garbage") + self.assertEqual( + dja_response["errors"][0]["detail"], "invalid query parameter: garbage" + ) def test_param_duplicate_sort(self): """ @@ -546,38 +622,48 @@ def test_param_duplicate_sort(self): `?sort=headline&page[size]=3&sort=bodyText` is not allowed. This is not so obvious when using a data dict.... """ - response = self.client.get(self.url, - data={'sort': ['headline', 'bodyText'], - 'page[size]': 3} - ) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get( + self.url, data={"sort": ["headline", "bodyText"], "page[size]": 3} + ) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "repeated query parameter not allowed: sort") + self.assertEqual( + dja_response["errors"][0]["detail"], + "repeated query parameter not allowed: sort", + ) def test_param_duplicate_page(self): """ test a duplicated page[size] query parameter """ - response = self.client.get(self.fs_url, data={'page[size]': [1, 2]}) - self.assertEqual(response.status_code, 400, - msg=response.content.decode("utf-8")) + response = self.client.get(self.fs_url, data={"page[size]": [1, 2]}) + self.assertEqual( + response.status_code, 400, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(dja_response['errors'][0]['detail'], - "repeated query parameter not allowed: page[size]") + self.assertEqual( + dja_response["errors"][0]["detail"], + "repeated query parameter not allowed: page[size]", + ) def test_many_params(self): """ Test that filter params aren't ignored when many params are present """ - response = self.client.get(self.url, - data={'filter[headline.regex]': '^A', - 'filter[body_text.regex]': '^IN', - 'filter[blog.name]': 'ANTB', - 'page[size]': 3}) - self.assertEqual(response.status_code, 200, - msg=response.content.decode("utf-8")) + response = self.client.get( + self.url, + data={ + "filter[headline.regex]": "^A", + "filter[body_text.regex]": "^IN", + "filter[blog.name]": "ANTB", + "page[size]": 3, + }, + ) + self.assertEqual( + response.status_code, 200, msg=response.content.decode("utf-8") + ) dja_response = response.json() - self.assertEqual(len(dja_response['data']), 1) - self.assertEqual(dja_response['data'][0]['id'], '1') + self.assertEqual(len(dja_response["data"]), 1) + self.assertEqual(dja_response["data"][0]["id"], "1") diff --git a/example/tests/test_format_keys.py b/example/tests/test_format_keys.py index 0fd76c67..23a8a325 100644 --- a/example/tests/test_format_keys.py +++ b/example/tests/test_format_keys.py @@ -10,11 +10,12 @@ class FormatKeysSetTests(TestBase): """ Test that camelization and underscoring of key names works if they are activated. """ - list_url = reverse('user-list') + + list_url = reverse("user-list") def setUp(self): super(FormatKeysSetTests, self).setUp() - self.detail_url = reverse('user-detail', kwargs={'pk': self.miles.pk}) + self.detail_url = reverse("user-detail", kwargs={"pk": self.miles.pk}) def test_camelization(self): """ @@ -25,39 +26,42 @@ def test_camelization(self): user = get_user_model().objects.all()[0] expected = { - 'data': [ + "data": [ { - 'type': 'users', - 'id': encoding.force_str(user.pk), - 'attributes': { - 'firstName': user.first_name, - 'lastName': user.last_name, - 'email': user.email + "type": "users", + "id": encoding.force_str(user.pk), + "attributes": { + "firstName": user.first_name, + "lastName": user.last_name, + "email": user.email, }, } ], - 'links': { - 'first': 'http://testserver/identities?page%5Bnumber%5D=1', - 'last': 'http://testserver/identities?page%5Bnumber%5D=2', - 'next': 'http://testserver/identities?page%5Bnumber%5D=2', - 'prev': None + "links": { + "first": "http://testserver/identities?page%5Bnumber%5D=1", + "last": "http://testserver/identities?page%5Bnumber%5D=2", + "next": "http://testserver/identities?page%5Bnumber%5D=2", + "prev": None, }, - 'meta': { - 'pagination': { - 'page': 1, - 'pages': 2, - 'count': 2 - } - } + "meta": {"pagination": {"page": 1, "pages": 2, "count": 2}}, } assert expected == response.json() def test_options_format_field_names(db, client): - response = client.options(reverse('author-list')) + response = client.options(reverse("author-list")) assert response.status_code == status.HTTP_200_OK - data = response.json()['data'] - expected_keys = {'name', 'email', 'bio', 'entries', 'firstEntry', 'type', - 'comments', 'secrets', 'defaults'} - assert expected_keys == data['actions']['POST'].keys() + data = response.json()["data"] + expected_keys = { + "name", + "email", + "bio", + "entries", + "firstEntry", + "type", + "comments", + "secrets", + "defaults", + } + assert expected_keys == data["actions"]["POST"].keys() diff --git a/example/tests/test_generic_validation.py b/example/tests/test_generic_validation.py index ff2aff4b..99d2d09a 100644 --- a/example/tests/test_generic_validation.py +++ b/example/tests/test_generic_validation.py @@ -10,7 +10,7 @@ class GenericValidationTest(TestBase): def setUp(self): super(GenericValidationTest, self).setUp() - self.url = reverse('user-validation', kwargs={'pk': self.miles.pk}) + self.url = reverse("user-validation", kwargs={"pk": self.miles.pk}) def test_generic_validation_error(self): """ @@ -20,14 +20,14 @@ def test_generic_validation_error(self): self.assertEqual(response.status_code, 400) expected = { - 'errors': [{ - 'status': '400', - 'source': { - 'pointer': '/data' - }, - 'detail': 'Oh nohs!', - 'code': 'invalid', - }] + "errors": [ + { + "status": "400", + "source": {"pointer": "/data"}, + "detail": "Oh nohs!", + "code": "invalid", + } + ] } assert expected == response.json() diff --git a/example/tests/test_generic_viewset.py b/example/tests/test_generic_viewset.py index ba315740..a07eb610 100644 --- a/example/tests/test_generic_viewset.py +++ b/example/tests/test_generic_viewset.py @@ -13,17 +13,17 @@ def test_default_rest_framework_behavior(self): """ This is more of an example really, showing default behavior """ - url = reverse('user-default', kwargs={'pk': self.miles.pk}) + url = reverse("user-default", kwargs={"pk": self.miles.pk}) response = self.client.get(url) self.assertEqual(200, response.status_code) expected = { - 'id': 2, - 'first_name': 'Miles', - 'last_name': 'Davis', - 'email': 'miles@example.com' + "id": 2, + "first_name": "Miles", + "last_name": "Davis", + "email": "miles@example.com", } assert expected == response.json() @@ -33,21 +33,21 @@ def test_ember_expected_renderer(self): The :class:`UserEmber` ViewSet has the ``resource_name`` of 'data' so that should be the key in the JSON response. """ - url = reverse('user-manual-resource-name', kwargs={'pk': self.miles.pk}) + url = reverse("user-manual-resource-name", kwargs={"pk": self.miles.pk}) - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): response = self.client.get(url) self.assertEqual(200, response.status_code) expected = { - 'data': { - 'type': 'data', - 'id': '2', - 'attributes': { - 'first-name': 'Miles', - 'last-name': 'Davis', - 'email': 'miles@example.com' - } + "data": { + "type": "data", + "id": "2", + "attributes": { + "first-name": "Miles", + "last-name": "Davis", + "email": "miles@example.com", + }, } } @@ -58,34 +58,38 @@ def test_default_validation_exceptions(self): Default validation exceptions should conform to json api spec """ expected = { - 'errors': [ + "errors": [ { - 'status': '400', - 'source': { - 'pointer': '/data/attributes/email', + "status": "400", + "source": { + "pointer": "/data/attributes/email", }, - 'detail': 'Enter a valid email address.', - 'code': 'invalid', + "detail": "Enter a valid email address.", + "code": "invalid", }, { - 'status': '400', - 'source': { - 'pointer': '/data/attributes/first-name', + "status": "400", + "source": { + "pointer": "/data/attributes/first-name", }, - 'detail': 'There\'s a problem with first name', - 'code': 'invalid', - } + "detail": "There's a problem with first name", + "code": "invalid", + }, ] } - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): - response = self.client.post('/identities', { - 'data': { - 'type': 'users', - 'attributes': { - 'email': 'bar', 'first_name': 'alajflajaljalajlfjafljalj' + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): + response = self.client.post( + "/identities", + { + "data": { + "type": "users", + "attributes": { + "email": "bar", + "first_name": "alajflajaljalajlfjafljalj", + }, } - } - }) + }, + ) assert expected == response.json() @@ -94,30 +98,34 @@ def test_custom_validation_exceptions(self): Exceptions should be able to be formatted manually """ expected = { - 'errors': [ + "errors": [ { - 'status': '400', - 'source': { - 'pointer': '/data/attributes/email', + "status": "400", + "source": { + "pointer": "/data/attributes/email", }, - 'detail': 'Enter a valid email address.', - 'code': 'invalid', + "detail": "Enter a valid email address.", + "code": "invalid", }, { - 'id': 'armageddon101', - 'detail': 'Hey! You need a last name!', - 'meta': 'something', - 'source': {'pointer': '/data/attributes/lastName'} + "id": "armageddon101", + "detail": "Hey! You need a last name!", + "meta": "something", + "source": {"pointer": "/data/attributes/lastName"}, }, ] } - response = self.client.post('/identities', { - 'data': { - 'type': 'users', - 'attributes': { - 'email': 'bar', 'last_name': 'alajflajaljalajlfjafljalj' + response = self.client.post( + "/identities", + { + "data": { + "type": "users", + "attributes": { + "email": "bar", + "last_name": "alajflajaljalajlfjafljalj", + }, } - } - }) + }, + ) assert expected == response.json() diff --git a/example/tests/test_model_viewsets.py b/example/tests/test_model_viewsets.py index 1ce8336d..9d4a610f 100644 --- a/example/tests/test_model_viewsets.py +++ b/example/tests/test_model_viewsets.py @@ -15,46 +15,41 @@ class ModelViewSetTests(TestBase): [, [^/]+)/$>] """ - list_url = reverse('user-list') + + list_url = reverse("user-list") def setUp(self): super(ModelViewSetTests, self).setUp() - self.detail_url = reverse('user-detail', kwargs={'pk': self.miles.pk}) + self.detail_url = reverse("user-detail", kwargs={"pk": self.miles.pk}) def test_key_in_list_result(self): """ Ensure the result has a 'user' key since that is the name of the model """ - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): response = self.client.get(self.list_url) self.assertEqual(response.status_code, 200) user = get_user_model().objects.all()[0] expected = { - 'data': [ + "data": [ { - 'type': 'users', - 'id': encoding.force_str(user.pk), - 'attributes': { - 'first-name': user.first_name, - 'last-name': user.last_name, - 'email': user.email + "type": "users", + "id": encoding.force_str(user.pk), + "attributes": { + "first-name": user.first_name, + "last-name": user.last_name, + "email": user.email, }, } ], - 'links': { - 'first': 'http://testserver/identities?page%5Bnumber%5D=1', - 'last': 'http://testserver/identities?page%5Bnumber%5D=2', - 'next': 'http://testserver/identities?page%5Bnumber%5D=2', - 'prev': None + "links": { + "first": "http://testserver/identities?page%5Bnumber%5D=1", + "last": "http://testserver/identities?page%5Bnumber%5D=2", + "next": "http://testserver/identities?page%5Bnumber%5D=2", + "prev": None, }, - 'meta': { - 'pagination': { - 'page': 1, - 'pages': 2, - 'count': 2 - } - } + "meta": {"pagination": {"page": 1, "pages": 2, "count": 2}}, } assert expected == response.json() @@ -63,36 +58,30 @@ def test_page_two_in_list_result(self): """ Ensure that the second page is reachable and is the correct data. """ - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): - response = self.client.get(self.list_url, {'page[number]': 2}) + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): + response = self.client.get(self.list_url, {"page[number]": 2}) self.assertEqual(response.status_code, 200) user = get_user_model().objects.all()[1] expected = { - 'data': [ + "data": [ { - 'type': 'users', - 'id': encoding.force_str(user.pk), - 'attributes': { - 'first-name': user.first_name, - 'last-name': user.last_name, - 'email': user.email + "type": "users", + "id": encoding.force_str(user.pk), + "attributes": { + "first-name": user.first_name, + "last-name": user.last_name, + "email": user.email, }, } ], - 'links': { - 'first': 'http://testserver/identities?page%5Bnumber%5D=1', - 'last': 'http://testserver/identities?page%5Bnumber%5D=2', - 'next': None, - 'prev': 'http://testserver/identities?page%5Bnumber%5D=1' + "links": { + "first": "http://testserver/identities?page%5Bnumber%5D=1", + "last": "http://testserver/identities?page%5Bnumber%5D=2", + "next": None, + "prev": "http://testserver/identities?page%5Bnumber%5D=1", }, - 'meta': { - 'pagination': { - 'page': 2, - 'pages': 2, - 'count': 2 - } - } + "meta": {"pagination": {"page": 2, "pages": 2, "count": 2}}, } assert expected == response.json() @@ -103,45 +92,39 @@ def test_page_range_in_list_result(self): tests pluralization as two objects means it converts ``user`` to ``users``. """ - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): - response = self.client.get(self.list_url, {'page[size]': 2}) + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): + response = self.client.get(self.list_url, {"page[size]": 2}) self.assertEqual(response.status_code, 200) users = get_user_model().objects.all() expected = { - 'data': [ + "data": [ { - 'type': 'users', - 'id': encoding.force_str(users[0].pk), - 'attributes': { - 'first-name': users[0].first_name, - 'last-name': users[0].last_name, - 'email': users[0].email + "type": "users", + "id": encoding.force_str(users[0].pk), + "attributes": { + "first-name": users[0].first_name, + "last-name": users[0].last_name, + "email": users[0].email, }, }, { - 'type': 'users', - 'id': encoding.force_str(users[1].pk), - 'attributes': { - 'first-name': users[1].first_name, - 'last-name': users[1].last_name, - 'email': users[1].email + "type": "users", + "id": encoding.force_str(users[1].pk), + "attributes": { + "first-name": users[1].first_name, + "last-name": users[1].last_name, + "email": users[1].email, }, - } + }, ], - 'links': { - 'first': 'http://testserver/identities?page%5Bnumber%5D=1&page%5Bsize%5D=2', - 'last': 'http://testserver/identities?page%5Bnumber%5D=1&page%5Bsize%5D=2', - 'next': None, - 'prev': None + "links": { + "first": "http://testserver/identities?page%5Bnumber%5D=1&page%5Bsize%5D=2", + "last": "http://testserver/identities?page%5Bnumber%5D=1&page%5Bsize%5D=2", + "next": None, + "prev": None, }, - 'meta': { - 'pagination': { - 'page': 1, - 'pages': 1, - 'count': 2 - } - } + "meta": {"pagination": {"page": 1, "pages": 1, "count": 2}}, } assert expected == response.json() @@ -150,18 +133,18 @@ def test_key_in_detail_result(self): """ Ensure the result has a 'user' key. """ - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): response = self.client.get(self.detail_url) self.assertEqual(response.status_code, 200) expected = { - 'data': { - 'type': 'users', - 'id': encoding.force_str(self.miles.pk), - 'attributes': { - 'first-name': self.miles.first_name, - 'last-name': self.miles.last_name, - 'email': self.miles.email + "data": { + "type": "users", + "id": encoding.force_str(self.miles.pk), + "attributes": { + "first-name": self.miles.first_name, + "last-name": self.miles.last_name, + "email": self.miles.email, }, } } @@ -173,12 +156,7 @@ def test_patch_requires_id(self): Verify that 'id' is required to be passed in an update request. """ data = { - 'data': { - 'type': 'users', - 'attributes': { - 'first-name': 'DifferentName' - } - } + "data": {"type": "users", "attributes": {"first-name": "DifferentName"}} } response = self.client.patch(self.detail_url, data=data) @@ -190,12 +168,10 @@ def test_patch_requires_correct_id(self): Verify that 'id' is the same then in url """ data = { - 'data': { - 'type': 'users', - 'id': self.miles.pk + 1, - 'attributes': { - 'first-name': 'DifferentName' - } + "data": { + "type": "users", + "id": self.miles.pk + 1, + "attributes": {"first-name": "DifferentName"}, } } @@ -207,36 +183,34 @@ def test_key_in_post(self): """ Ensure a key is in the post. """ - self.client.login(username='miles', password='pw') + self.client.login(username="miles", password="pw") data = { - 'data': { - 'type': 'users', - 'id': encoding.force_str(self.miles.pk), - 'attributes': { - 'first-name': self.miles.first_name, - 'last-name': self.miles.last_name, - 'email': 'miles@trumpet.org' + "data": { + "type": "users", + "id": encoding.force_str(self.miles.pk), + "attributes": { + "first-name": self.miles.first_name, + "last-name": self.miles.last_name, + "email": "miles@trumpet.org", }, } } - with override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize'): + with override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize"): response = self.client.put(self.detail_url, data=data) assert data == response.json() # is it updated? self.assertEqual( - get_user_model().objects.get(pk=self.miles.pk).email, - 'miles@trumpet.org') + get_user_model().objects.get(pk=self.miles.pk).email, "miles@trumpet.org" + ) def test_404_error_pointer(self): - self.client.login(username='miles', password='pw') - not_found_url = reverse('user-detail', kwargs={'pk': 12345}) + self.client.login(username="miles", password="pw") + not_found_url = reverse("user-detail", kwargs={"pk": 12345}) errors = { - 'errors': [ - {'detail': 'Not found.', 'status': '404', 'code': 'not_found'} - ] + "errors": [{"detail": "Not found.", "status": "404", "code": "not_found"}] } response = self.client.get(not_found_url) @@ -250,18 +224,13 @@ def test_patch_allow_field_type(author, author_type_factory, client): Verify that type field may be updated. """ author_type = author_type_factory() - url = reverse('author-detail', args=[author.id]) + url = reverse("author-detail", args=[author.id]) data = { - 'data': { - 'id': author.id, - 'type': 'authors', - 'relationships': { - 'data': { - 'id': author_type.id, - 'type': 'author-type' - } - } + "data": { + "id": author.id, + "type": "authors", + "relationships": {"data": {"id": author_type.id, "type": "author-type"}}, } } diff --git a/example/tests/test_openapi.py b/example/tests/test_openapi.py index e7a2b6ca..d32a9367 100644 --- a/example/tests/test_openapi.py +++ b/example/tests/test_openapi.py @@ -23,14 +23,11 @@ def create_view_with_kw(view_cls, method, request, initkwargs): def test_path_without_parameters(snapshot): - path = '/authors/' - method = 'GET' + path = "/authors/" + method = "GET" view = create_view_with_kw( - views.AuthorViewSet, - method, - create_request(path), - {'get': 'list'} + views.AuthorViewSet, method, create_request(path), {"get": "list"} ) inspector = AutoSchema() inspector.view = view @@ -40,14 +37,11 @@ def test_path_without_parameters(snapshot): def test_path_with_id_parameter(snapshot): - path = '/authors/{id}/' - method = 'GET' + path = "/authors/{id}/" + method = "GET" view = create_view_with_kw( - views.AuthorViewSet, - method, - create_request(path), - {'get': 'retrieve'} + views.AuthorViewSet, method, create_request(path), {"get": "retrieve"} ) inspector = AutoSchema() inspector.view = view @@ -57,14 +51,11 @@ def test_path_with_id_parameter(snapshot): def test_post_request(snapshot): - method = 'POST' - path = '/authors/' + method = "POST" + path = "/authors/" view = create_view_with_kw( - views.AuthorViewSet, - method, - create_request(path), - {'post': 'create'} + views.AuthorViewSet, method, create_request(path), {"post": "create"} ) inspector = AutoSchema() inspector.view = view @@ -74,14 +65,11 @@ def test_post_request(snapshot): def test_patch_request(snapshot): - method = 'PATCH' - path = '/authors/{id}' + method = "PATCH" + path = "/authors/{id}" view = create_view_with_kw( - views.AuthorViewSet, - method, - create_request(path), - {'patch': 'update'} + views.AuthorViewSet, method, create_request(path), {"patch": "update"} ) inspector = AutoSchema() inspector.view = view @@ -91,14 +79,11 @@ def test_patch_request(snapshot): def test_delete_request(snapshot): - method = 'DELETE' - path = '/authors/{id}' + method = "DELETE" + path = "/authors/{id}" view = create_view_with_kw( - views.AuthorViewSet, - method, - create_request(path), - {'delete': 'delete'} + views.AuthorViewSet, method, create_request(path), {"delete": "delete"} ) inspector = AutoSchema() inspector.view = view @@ -107,22 +92,25 @@ def test_delete_request(snapshot): snapshot.assert_match(json.dumps(operation, indent=2, sort_keys=True)) -@override_settings(REST_FRAMEWORK={ - 'DEFAULT_SCHEMA_CLASS': 'rest_framework_json_api.schemas.openapi.AutoSchema'}) +@override_settings( + REST_FRAMEWORK={ + "DEFAULT_SCHEMA_CLASS": "rest_framework_json_api.schemas.openapi.AutoSchema" + } +) def test_schema_construction(): """Construction of the top level dictionary.""" patterns = [ - re_path('^authors/?$', views.AuthorViewSet.as_view({'get': 'list'})), + re_path("^authors/?$", views.AuthorViewSet.as_view({"get": "list"})), ] generator = SchemaGenerator(patterns=patterns) - request = create_request('/') + request = create_request("/") schema = generator.get_schema(request=request) - assert 'openapi' in schema - assert 'info' in schema - assert 'paths' in schema - assert 'components' in schema + assert "openapi" in schema + assert "info" in schema + assert "paths" in schema + assert "components" in schema def test_schema_related_serializers(): @@ -135,15 +123,15 @@ def test_schema_related_serializers(): and confirm that the schema for the related field is properly rendered """ generator = SchemaGenerator() - request = create_request('/') + request = create_request("/") schema = generator.get_schema(request=request) # make sure the path's relationship and related {related_field}'s got expanded - assert '/authors/{id}/relationships/{related_field}' in schema['paths'] - assert '/authors/{id}/comments/' in schema['paths'] - assert '/authors/{id}/entries/' in schema['paths'] - assert '/authors/{id}/first_entry/' in schema['paths'] - first_get = schema['paths']['/authors/{id}/first_entry/']['get']['responses']['200'] - first_schema = first_get['content']['application/vnd.api+json']['schema'] - first_props = first_schema['properties']['data'] - assert '$ref' in first_props - assert first_props['$ref'] == '#/components/schemas/Entry' + assert "/authors/{id}/relationships/{related_field}" in schema["paths"] + assert "/authors/{id}/comments/" in schema["paths"] + assert "/authors/{id}/entries/" in schema["paths"] + assert "/authors/{id}/first_entry/" in schema["paths"] + first_get = schema["paths"]["/authors/{id}/first_entry/"]["get"]["responses"]["200"] + first_schema = first_get["content"]["application/vnd.api+json"]["schema"] + first_props = first_schema["properties"]["data"] + assert "$ref" in first_props + assert first_props["$ref"] == "#/components/schemas/Entry" diff --git a/example/tests/test_parsers.py b/example/tests/test_parsers.py index 38d7c6d7..b83f70a7 100644 --- a/example/tests/test_parsers.py +++ b/example/tests/test_parsers.py @@ -14,47 +14,41 @@ class TestJSONParser(TestCase): - def setUp(self): class MockRequest(object): - def __init__(self): - self.method = 'GET' + self.method = "GET" request = MockRequest() - self.parser_context = {'request': request, 'kwargs': {}, 'view': 'BlogViewSet'} + self.parser_context = {"request": request, "kwargs": {}, "view": "BlogViewSet"} data = { - 'data': { - 'id': 123, - 'type': 'Blog', - 'attributes': { - 'json-value': {'JsonKey': 'JsonValue'} - }, + "data": { + "id": 123, + "type": "Blog", + "attributes": {"json-value": {"JsonKey": "JsonValue"}}, }, - 'meta': { - 'random_key': 'random_value' - } + "meta": {"random_key": "random_value"}, } self.string = json.dumps(data) - @override_settings(JSON_API_FORMAT_FIELD_NAMES='dasherize') + @override_settings(JSON_API_FORMAT_FIELD_NAMES="dasherize") def test_parse_include_metadata_format_field_names(self): parser = JSONParser() - stream = BytesIO(self.string.encode('utf-8')) + stream = BytesIO(self.string.encode("utf-8")) data = parser.parse(stream, None, self.parser_context) - self.assertEqual(data['_meta'], {'random_key': 'random_value'}) - self.assertEqual(data['json_value'], {'JsonKey': 'JsonValue'}) + self.assertEqual(data["_meta"], {"random_key": "random_value"}) + self.assertEqual(data["json_value"], {"JsonKey": "JsonValue"}) def test_parse_invalid_data(self): parser = JSONParser() string = json.dumps([]) - stream = BytesIO(string.encode('utf-8')) + stream = BytesIO(string.encode("utf-8")) with self.assertRaises(ParseError): parser.parse(stream, None, self.parser_context) @@ -62,16 +56,18 @@ def test_parse_invalid_data(self): def test_parse_invalid_data_key(self): parser = JSONParser() - string = json.dumps({ - 'data': [{ - 'id': 123, - 'type': 'Blog', - 'attributes': { - 'json-value': {'JsonKey': 'JsonValue'} - }, - }] - }) - stream = BytesIO(string.encode('utf-8')) + string = json.dumps( + { + "data": [ + { + "id": 123, + "type": "Blog", + "attributes": {"json-value": {"JsonKey": "JsonValue"}}, + } + ] + } + ) + stream = BytesIO(string.encode("utf-8")) with self.assertRaises(ParseError): parser.parse(stream, None, self.parser_context) @@ -84,7 +80,7 @@ def __init__(self, response_dict): @property def pk(self): - return self.id if hasattr(self, 'id') else None + return self.id if hasattr(self, "id") else None class DummySerializer(serializers.Serializer): @@ -95,7 +91,7 @@ class DummySerializer(serializers.Serializer): class DummyAPIView(views.APIView): parser_classes = [JSONParser] renderer_classes = [JSONRenderer] - resource_name = 'dummy' + resource_name = "dummy" def patch(self, request, *args, **kwargs): serializer = DummySerializer(DummyDTO(request.data)) @@ -103,28 +99,25 @@ def patch(self, request, *args, **kwargs): urlpatterns = [ - path('repeater', DummyAPIView.as_view(), name='repeater'), + path("repeater", DummyAPIView.as_view(), name="repeater"), ] class TestParserOnAPIView(APITestCase): - def setUp(self): class MockRequest(object): def __init__(self): - self.method = 'PATCH' + self.method = "PATCH" request = MockRequest() # To be honest view string isn't resolved into actual view - self.parser_context = {'request': request, 'kwargs': {}, 'view': 'DummyAPIView'} + self.parser_context = {"request": request, "kwargs": {}, "view": "DummyAPIView"} self.data = { - 'data': { - 'id': 123, - 'type': 'strs', - 'attributes': { - 'body': 'hello' - }, + "data": { + "id": 123, + "type": "strs", + "attributes": {"body": "hello"}, } } @@ -133,20 +126,20 @@ def __init__(self): def test_patch_doesnt_raise_attribute_error(self): parser = JSONParser() - stream = BytesIO(self.string.encode('utf-8')) + stream = BytesIO(self.string.encode("utf-8")) data = parser.parse(stream, None, self.parser_context) - assert data['id'] == 123 - assert data['body'] == 'hello' + assert data["id"] == 123 + assert data["body"] == "hello" @override_settings(ROOT_URLCONF=__name__) def test_patch_request(self): - url = reverse('repeater') + url = reverse("repeater") data = self.data - data['data']['type'] = 'dummy' + data["data"]["type"] = "dummy" response = self.client.patch(url, data=data) data = response.json() - assert data['data']['id'] == str(123) - assert data['data']['attributes']['body'] == 'hello' + assert data["data"]["id"] == str(123) + assert data["data"]["attributes"]["body"] == "hello" diff --git a/example/tests/test_performance.py b/example/tests/test_performance.py index ac8b5956..e42afada 100644 --- a/example/tests/test_performance.py +++ b/example/tests/test_performance.py @@ -7,45 +7,49 @@ class PerformanceTestCase(APITestCase): def setUp(self): - self.author = Author.objects.create(name='Super powerful superhero', email='i.am@lost.com') - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") - self.other_blog = Blog.objects.create(name='Other blog', tagline="It's another blog") + self.author = Author.objects.create( + name="Super powerful superhero", email="i.am@lost.com" + ) + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") + self.other_blog = Blog.objects.create( + name="Other blog", tagline="It's another blog" + ) self.first_entry = Entry.objects.create( blog=self.blog, - headline='headline one', - body_text='body_text two', + headline="headline one", + body_text="body_text two", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) self.second_entry = Entry.objects.create( blog=self.blog, - headline='headline two', - body_text='body_text one', + headline="headline two", + body_text="body_text one", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=1 + rating=1, ) self.comment = Comment.objects.create(entry=self.first_entry) CommentFactory.create_batch(50) def test_query_count_no_includes(self): - """ We expect a simple list view to issue only two queries. + """We expect a simple list view to issue only two queries. 1. The number of results in the set (e.g. a COUNT query), only necessary because we're using PageNumberPagination 2. The SELECT query for the set """ with self.assertNumQueries(2): - response = self.client.get('/comments?page[size]=25') - self.assertEqual(len(response.data['results']), 25) + response = self.client.get("/comments?page[size]=25") + self.assertEqual(len(response.data["results"]), 25) def test_query_count_include_author(self): - """ We expect a list view with an include have three queries: + """We expect a list view with an include have three queries: 1. Primary resource COUNT query 2. Primary resource SELECT @@ -54,15 +58,15 @@ def test_query_count_include_author(self): 5. Entries prefetched """ with self.assertNumQueries(5): - response = self.client.get('/comments?include=author&page[size]=25') - self.assertEqual(len(response.data['results']), 25) + response = self.client.get("/comments?include=author&page[size]=25") + self.assertEqual(len(response.data["results"]), 25) def test_query_select_related_entry(self): - """ We expect a list view with an include have two queries: + """We expect a list view with an include have two queries: 1. Primary resource COUNT query 2. Primary resource SELECT + SELECT RELATED writer(author) and bio """ with self.assertNumQueries(2): - response = self.client.get('/comments?include=writer&page[size]=25') - self.assertEqual(len(response.data['results']), 25) + response = self.client.get("/comments?include=writer&page[size]=25") + self.assertEqual(len(response.data["results"]), 25) diff --git a/example/tests/test_relations.py b/example/tests/test_relations.py index ef1dfb02..b83bbef4 100644 --- a/example/tests/test_relations.py +++ b/example/tests/test_relations.py @@ -10,7 +10,7 @@ from rest_framework_json_api.relations import ( HyperlinkedRelatedField, ResourceRelatedField, - SerializerMethodHyperlinkedRelatedField + SerializerMethodHyperlinkedRelatedField, ) from rest_framework_json_api.utils import format_resource_type @@ -21,76 +21,66 @@ class TestResourceRelatedField(TestBase): - def setUp(self): super(TestResourceRelatedField, self).setUp() - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") self.entry = Entry.objects.create( blog=self.blog, - headline='headline', - body_text='body_text', + headline="headline", + body_text="body_text", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) for i in range(1, 6): - name = 'some_author{}'.format(i) + name = "some_author{}".format(i) self.entry.authors.add( - Author.objects.create(name=name, email='{}@example.org'.format(name)) + Author.objects.create(name=name, email="{}@example.org".format(name)) ) self.comment = Comment.objects.create( entry=self.entry, - body='testing one two three', - author=Author.objects.first() + body="testing one two three", + author=Author.objects.first(), ) def test_data_in_correct_format_when_instantiated_with_blog_object(self): - serializer = BlogFKSerializer(instance={'blog': self.blog}) + serializer = BlogFKSerializer(instance={"blog": self.blog}) - expected_data = { - 'type': format_resource_type('Blog'), - 'id': str(self.blog.id) - } + expected_data = {"type": format_resource_type("Blog"), "id": str(self.blog.id)} - actual_data = serializer.data['blog'] + actual_data = serializer.data["blog"] self.assertEqual(actual_data, expected_data) def test_data_in_correct_format_when_instantiated_with_entry_object(self): - serializer = EntryFKSerializer(instance={'entry': self.entry}) + serializer = EntryFKSerializer(instance={"entry": self.entry}) expected_data = { - 'type': format_resource_type('Entry'), - 'id': str(self.entry.id) + "type": format_resource_type("Entry"), + "id": str(self.entry.id), } - actual_data = serializer.data['entry'] + actual_data = serializer.data["entry"] self.assertEqual(actual_data, expected_data) def test_deserialize_primitive_data_blog(self): - serializer = BlogFKSerializer(data={ - 'blog': { - 'type': format_resource_type('Blog'), - 'id': str(self.blog.id) + serializer = BlogFKSerializer( + data={ + "blog": {"type": format_resource_type("Blog"), "id": str(self.blog.id)} } - } ) self.assertTrue(serializer.is_valid()) - self.assertEqual(serializer.validated_data['blog'], self.blog) + self.assertEqual(serializer.validated_data["blog"], self.blog) def test_validation_fails_for_wrong_type(self): with self.assertRaises(Conflict) as cm: - serializer = BlogFKSerializer(data={ - 'blog': { - 'type': 'Entries', - 'id': str(self.blog.id) - } - } + serializer = BlogFKSerializer( + data={"blog": {"type": "Entries", "id": str(self.blog.id)}} ) serializer.is_valid() the_exception = cm.exception @@ -99,150 +89,156 @@ def test_validation_fails_for_wrong_type(self): def test_serialize_many_to_many_relation(self): serializer = EntryModelSerializer(instance=self.entry) - type_string = format_resource_type('Author') - author_pks = Author.objects.values_list('pk', flat=True) - expected_data = [{'type': type_string, 'id': str(pk)} for pk in author_pks] + type_string = format_resource_type("Author") + author_pks = Author.objects.values_list("pk", flat=True) + expected_data = [{"type": type_string, "id": str(pk)} for pk in author_pks] - self.assertEqual( - serializer.data['authors'], - expected_data - ) + self.assertEqual(serializer.data["authors"], expected_data) def test_deserialize_many_to_many_relation(self): - type_string = format_resource_type('Author') - author_pks = Author.objects.values_list('pk', flat=True) - authors = [{'type': type_string, 'id': pk} for pk in author_pks] + type_string = format_resource_type("Author") + author_pks = Author.objects.values_list("pk", flat=True) + authors = [{"type": type_string, "id": pk} for pk in author_pks] - serializer = EntryModelSerializer(data={'authors': authors, 'comments': []}) + serializer = EntryModelSerializer(data={"authors": authors, "comments": []}) self.assertTrue(serializer.is_valid()) - self.assertEqual(len(serializer.validated_data['authors']), Author.objects.count()) - for author in serializer.validated_data['authors']: + self.assertEqual( + len(serializer.validated_data["authors"]), Author.objects.count() + ) + for author in serializer.validated_data["authors"]: self.assertIsInstance(author, Author) def test_read_only(self): serializer = EntryModelSerializer( - data={'authors': [], 'comments': [{'type': 'Comments', 'id': 2}]} + data={"authors": [], "comments": [{"type": "Comments", "id": 2}]} ) serializer.is_valid(raise_exception=True) - self.assertNotIn('comments', serializer.validated_data) + self.assertNotIn("comments", serializer.validated_data) def test_invalid_resource_id_object(self): - comment = {'body': 'testing 123', 'entry': {'type': 'entry'}, 'author': {'id': '5'}} + comment = { + "body": "testing 123", + "entry": {"type": "entry"}, + "author": {"id": "5"}, + } serializer = CommentSerializer(data=comment) assert not serializer.is_valid() assert serializer.errors == { - 'author': ["Invalid resource identifier object: missing 'type' attribute"], - 'entry': ["Invalid resource identifier object: missing 'id' attribute"] + "author": ["Invalid resource identifier object: missing 'type' attribute"], + "entry": ["Invalid resource identifier object: missing 'id' attribute"], } class TestHyperlinkedFieldBase(TestBase): - def setUp(self): super(TestHyperlinkedFieldBase, self).setUp() - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") self.entry = Entry.objects.create( blog=self.blog, - headline='headline', - body_text='body_text', + headline="headline", + body_text="body_text", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) self.comment = Comment.objects.create( entry=self.entry, - body='testing one two three', + body="testing one two three", ) - self.request = RequestFactory().get(reverse('entry-detail', kwargs={'pk': self.entry.pk})) - self.view = EntryViewSet(request=self.request, kwargs={'entry_pk': self.entry.id}) + self.request = RequestFactory().get( + reverse("entry-detail", kwargs={"pk": self.entry.pk}) + ) + self.view = EntryViewSet( + request=self.request, kwargs={"entry_pk": self.entry.id} + ) class TestHyperlinkedRelatedField(TestHyperlinkedFieldBase): - def test_single_hyperlinked_related_field(self): field = HyperlinkedRelatedField( - related_link_view_name='entry-blog', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-blog", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", read_only=True, ) - field._context = {'request': self.request, 'view': self.view} - field.field_name = 'blog' + field._context = {"request": self.request, "view": self.view} + field.field_name = "blog" self.assertRaises(NotImplementedError, field.to_representation, self.entry) self.assertRaises(SkipField, field.get_attribute, self.entry) links_expected = { - 'self': 'http://testserver/entries/{}/relationships/blog'.format(self.entry.pk), - 'related': 'http://testserver/entries/{}/blog'.format(self.entry.pk) + "self": "http://testserver/entries/{}/relationships/blog".format( + self.entry.pk + ), + "related": "http://testserver/entries/{}/blog".format(self.entry.pk), } got = field.get_links(self.entry) self.assertEqual(got, links_expected) def test_many_hyperlinked_related_field(self): field = HyperlinkedRelatedField( - related_link_view_name='entry-comments', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-comments", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", read_only=True, - many=True + many=True, ) - field._context = {'request': self.request, 'view': self.view} - field.field_name = 'comments' + field._context = {"request": self.request, "view": self.view} + field.field_name = "comments" - self.assertRaises(NotImplementedError, field.to_representation, self.entry.comments.all()) + self.assertRaises( + NotImplementedError, field.to_representation, self.entry.comments.all() + ) self.assertRaises(SkipField, field.get_attribute, self.entry) links_expected = { - 'self': 'http://testserver/entries/{}/relationships/comments'.format(self.entry.pk), - 'related': 'http://testserver/entries/{}/comments'.format(self.entry.pk) + "self": "http://testserver/entries/{}/relationships/comments".format( + self.entry.pk + ), + "related": "http://testserver/entries/{}/comments".format(self.entry.pk), } got = field.child_relation.get_links(self.entry) self.assertEqual(got, links_expected) class TestSerializerMethodHyperlinkedRelatedField(TestHyperlinkedFieldBase): - def test_single_serializer_method_hyperlinked_related_field(self): serializer = EntryModelSerializerWithHyperLinks( - instance=self.entry, - context={ - 'request': self.request, - 'view': self.view - } + instance=self.entry, context={"request": self.request, "view": self.view} ) - field = serializer.fields['blog'] + field = serializer.fields["blog"] self.assertRaises(NotImplementedError, field.to_representation, self.entry) self.assertRaises(SkipField, field.get_attribute, self.entry) expected = { - 'self': 'http://testserver/entries/{}/relationships/blog'.format(self.entry.pk), - 'related': 'http://testserver/entries/{}/blog'.format(self.entry.pk) + "self": "http://testserver/entries/{}/relationships/blog".format( + self.entry.pk + ), + "related": "http://testserver/entries/{}/blog".format(self.entry.pk), } got = field.get_links(self.entry) self.assertEqual(got, expected) def test_many_serializer_method_hyperlinked_related_field(self): serializer = EntryModelSerializerWithHyperLinks( - instance=self.entry, - context={ - 'request': self.request, - 'view': self.view - } + instance=self.entry, context={"request": self.request, "view": self.view} ) - field = serializer.fields['comments'] + field = serializer.fields["comments"] self.assertRaises(NotImplementedError, field.to_representation, self.entry) self.assertRaises(SkipField, field.get_attribute, self.entry) expected = { - 'self': 'http://testserver/entries/{}/relationships/comments'.format(self.entry.pk), - 'related': 'http://testserver/entries/{}/comments'.format(self.entry.pk) + "self": "http://testserver/entries/{}/relationships/comments".format( + self.entry.pk + ), + "related": "http://testserver/entries/{}/comments".format(self.entry.pk), } got = field.get_links(self.entry) self.assertEqual(got, expected) @@ -281,26 +277,29 @@ class EntryModelSerializer(serializers.ModelSerializer): class Meta: model = Entry - fields = ('authors', 'comments') + fields = ("authors", "comments") class EntryModelSerializerWithHyperLinks(serializers.ModelSerializer): blog = SerializerMethodHyperlinkedRelatedField( - related_link_view_name='entry-blog', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-blog", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", many=True, ) comments = SerializerMethodHyperlinkedRelatedField( - related_link_view_name='entry-comments', - related_link_url_kwarg='entry_pk', - self_link_view_name='entry-relationships', + related_link_view_name="entry-comments", + related_link_url_kwarg="entry_pk", + self_link_view_name="entry-relationships", many=True, ) class Meta: model = Entry - fields = ('blog', 'comments',) + fields = ( + "blog", + "comments", + ) def get_blog(self, obj): return obj.blog diff --git a/example/tests/test_serializers.py b/example/tests/test_serializers.py index de97dcf8..b3b1dc2d 100644 --- a/example/tests/test_serializers.py +++ b/example/tests/test_serializers.py @@ -11,7 +11,7 @@ DateField, ModelSerializer, ResourceIdentifierObjectSerializer, - empty + empty, ) from rest_framework_json_api.utils import format_resource_type @@ -25,37 +25,40 @@ class TestResourceIdentifierObjectSerializer(TestCase): def setUp(self): - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") now = timezone.now() self.entry = Entry.objects.create( blog=self.blog, - headline='headline', - body_text='body_text', + headline="headline", + body_text="body_text", pub_date=now.date(), mod_date=now.date(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) for i in range(1, 6): - name = 'some_author{}'.format(i) + name = "some_author{}".format(i) self.entry.authors.add( - Author.objects.create(name=name, email='{}@example.org'.format(name)) + Author.objects.create(name=name, email="{}@example.org".format(name)) ) def test_forward_relationship_not_loaded_when_not_included(self): - to_representation_method = 'example.serializers.TaggedItemSerializer.to_representation' + to_representation_method = ( + "example.serializers.TaggedItemSerializer.to_representation" + ) with mock.patch(to_representation_method) as mocked_serializer: + class EntrySerializer(ModelSerializer): blog = BlogSerializer() class Meta: model = Entry - fields = '__all__' + fields = "__all__" - request_without_includes = Request(request_factory.get('/')) - serializer = EntrySerializer(context={'request': request_without_includes}) + request_without_includes = Request(request_factory.get("/")) + serializer = EntrySerializer(context={"request": request_without_includes}) serializer.to_representation(self.entry) mocked_serializer.assert_not_called() @@ -66,62 +69,74 @@ class EntrySerializer(ModelSerializer): class Meta: model = Entry - fields = '__all__' + fields = "__all__" - request_without_includes = Request(request_factory.get('/')) - serializer = EntrySerializer(context={'request': request_without_includes}) + request_without_includes = Request(request_factory.get("/")) + serializer = EntrySerializer(context={"request": request_without_includes}) result = serializer.to_representation(self.entry) # Remove non deterministic fields - result.pop('created_at') - result.pop('modified_at') + result.pop("created_at") + result.pop("modified_at") expected = dict( [ - ('id', 1), - ('blog', dict([ - ('name', 'Some Blog'), - ('tags', []), - ('copyright', 2020), - ('url', 'http://testserver/blogs/1') - ])), - ('headline', 'headline'), - ('body_text', 'body_text'), - ('pub_date', DateField().to_representation(self.entry.pub_date)), - ('mod_date', DateField().to_representation(self.entry.mod_date)), - ('n_comments', 0), - ('n_pingbacks', 0), - ('rating', 3), - ('authors', + ("id", 1), + ( + "blog", + dict( + [ + ("name", "Some Blog"), + ("tags", []), + ("copyright", 2020), + ("url", "http://testserver/blogs/1"), + ] + ), + ), + ("headline", "headline"), + ("body_text", "body_text"), + ("pub_date", DateField().to_representation(self.entry.pub_date)), + ("mod_date", DateField().to_representation(self.entry.mod_date)), + ("n_comments", 0), + ("n_pingbacks", 0), + ("rating", 3), + ( + "authors", [ - dict([('type', 'authors'), ('id', '1')]), - dict([('type', 'authors'), ('id', '2')]), - dict([('type', 'authors'), ('id', '3')]), - dict([('type', 'authors'), ('id', '4')]), - dict([('type', 'authors'), ('id', '5')])])]) + dict([("type", "authors"), ("id", "1")]), + dict([("type", "authors"), ("id", "2")]), + dict([("type", "authors"), ("id", "3")]), + dict([("type", "authors"), ("id", "4")]), + dict([("type", "authors"), ("id", "5")]), + ], + ), + ] + ) self.assertDictEqual(expected, result) def test_data_in_correct_format_when_instantiated_with_blog_object(self): serializer = ResourceIdentifierObjectSerializer(instance=self.blog) - expected_data = {'type': format_resource_type('Blog'), 'id': str(self.blog.id)} + expected_data = {"type": format_resource_type("Blog"), "id": str(self.blog.id)} assert serializer.data == expected_data def test_data_in_correct_format_when_instantiated_with_entry_object(self): serializer = ResourceIdentifierObjectSerializer(instance=self.entry) - expected_data = {'type': format_resource_type('Entry'), 'id': str(self.entry.id)} + expected_data = { + "type": format_resource_type("Entry"), + "id": str(self.entry.id), + } assert serializer.data == expected_data def test_deserialize_primitive_data_blog(self): - initial_data = { - 'type': format_resource_type('Blog'), - 'id': str(self.blog.id) - } - serializer = ResourceIdentifierObjectSerializer(data=initial_data, model_class=Blog) + initial_data = {"type": format_resource_type("Blog"), "id": str(self.blog.id)} + serializer = ResourceIdentifierObjectSerializer( + data=initial_data, model_class=Blog + ) self.assertTrue(serializer.is_valid(), msg=serializer.errors) assert serializer.validated_data == self.blog @@ -131,29 +146,28 @@ def test_deserialize_primitive_data_blog_with_unexisting_pk(self): self.blog.delete() assert not Blog.objects.filter(id=unexisting_pk).exists() - initial_data = { - 'type': format_resource_type('Blog'), - 'id': str(unexisting_pk) - } - serializer = ResourceIdentifierObjectSerializer(data=initial_data, model_class=Blog) + initial_data = {"type": format_resource_type("Blog"), "id": str(unexisting_pk)} + serializer = ResourceIdentifierObjectSerializer( + data=initial_data, model_class=Blog + ) self.assertFalse(serializer.is_valid()) - self.assertEqual(serializer.errors[0].code, 'does_not_exist') + self.assertEqual(serializer.errors[0].code, "does_not_exist") def test_data_in_correct_format_when_instantiated_with_queryset(self): qs = Author.objects.all() serializer = ResourceIdentifierObjectSerializer(instance=qs, many=True) - type_string = format_resource_type('Author') - author_pks = Author.objects.values_list('pk', flat=True) - expected_data = [{'type': type_string, 'id': str(pk)} for pk in author_pks] + type_string = format_resource_type("Author") + author_pks = Author.objects.values_list("pk", flat=True) + expected_data = [{"type": type_string, "id": str(pk)} for pk in author_pks] assert serializer.data == expected_data def test_deserialize_many(self): - type_string = format_resource_type('Author') - author_pks = Author.objects.values_list('pk', flat=True) - initial_data = [{'type': type_string, 'id': str(pk)} for pk in author_pks] + type_string = format_resource_type("Author") + author_pks = Author.objects.values_list("pk", flat=True) + initial_data = [{"type": type_string, "id": str(pk)} for pk in author_pks] serializer = ResourceIdentifierObjectSerializer( data=initial_data, model_class=Author, many=True @@ -170,33 +184,20 @@ def test_model_serializer_with_implicit_fields(self, comment, client): "data": { "type": "comments", "id": str(comment.pk), - "attributes": { - "body": comment.body - }, + "attributes": {"body": comment.body}, "relationships": { - "entry": { - "data": { - "type": "entries", - "id": str(comment.entry.pk) - } - }, + "entry": {"data": {"type": "entries", "id": str(comment.entry.pk)}}, "author": { - "data": { - "type": "authors", - "id": str(comment.author.pk) - } + "data": {"type": "authors", "id": str(comment.author.pk)} }, "writer": { - "data": { - "type": "writers", - "id": str(comment.author.pk) - } + "data": {"type": "writers", "id": str(comment.author.pk)} }, - } + }, } } - response = client.get(reverse("comment-detail", kwargs={'pk': comment.pk})) + response = client.get(reverse("comment-detail", kwargs={"pk": comment.pk})) assert response.status_code == 200 assert expected == response.json() diff --git a/example/tests/test_sideload_resources.py b/example/tests/test_sideload_resources.py index 69641af7..5ca96afe 100644 --- a/example/tests/test_sideload_resources.py +++ b/example/tests/test_sideload_resources.py @@ -13,7 +13,8 @@ class SideloadResourceTest(TestBase): """ Test that sideloading resources returns expected output. """ - url = reverse('user-posts') + + url = reverse("user-posts") def test_get_sideloaded_data(self): """ @@ -21,9 +22,9 @@ def test_get_sideloaded_data(self): do not return a single root key. """ response = self.client.get(self.url) - content = json.loads(response.content.decode('utf8')) + content = json.loads(response.content.decode("utf8")) self.assertEqual( sorted(content.keys()), - [encoding.force_str('identities'), - encoding.force_str('posts')]) + [encoding.force_str("identities"), encoding.force_str("posts")], + ) diff --git a/example/tests/test_views.py b/example/tests/test_views.py index 25eeca89..4b494b52 100644 --- a/example/tests/test_views.py +++ b/example/tests/test_views.py @@ -16,142 +16,164 @@ from example.factories import AuthorFactory, CommentFactory, EntryFactory from example.models import Author, Blog, Comment, Entry -from example.serializers import AuthorBioSerializer, AuthorTypeSerializer, EntrySerializer +from example.serializers import ( + AuthorBioSerializer, + AuthorTypeSerializer, + EntrySerializer, +) from example.tests import TestBase from example.views import AuthorViewSet, BlogViewSet class TestRelationshipView(APITestCase): def setUp(self): - self.author = Author.objects.create(name='Super powerful superhero', email='i.am@lost.com') - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") - self.other_blog = Blog.objects.create(name='Other blog', tagline="It's another blog") + self.author = Author.objects.create( + name="Super powerful superhero", email="i.am@lost.com" + ) + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") + self.other_blog = Blog.objects.create( + name="Other blog", tagline="It's another blog" + ) self.first_entry = Entry.objects.create( blog=self.blog, - headline='headline one', - body_text='body_text two', + headline="headline one", + body_text="body_text two", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) self.second_entry = Entry.objects.create( blog=self.blog, - headline='headline two', - body_text='body_text one', + headline="headline two", + body_text="body_text one", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=1 + rating=1, ) self.first_comment = Comment.objects.create( entry=self.first_entry, body="This entry is cool", author=None ) self.second_comment = Comment.objects.create( - entry=self.second_entry, - body="This entry is not cool", - author=self.author + entry=self.second_entry, body="This entry is not cool", author=self.author ) def test_get_entry_relationship_blog(self): url = reverse( - 'entry-relationships', kwargs={'pk': self.first_entry.id, 'related_field': 'blog'} + "entry-relationships", + kwargs={"pk": self.first_entry.id, "related_field": "blog"}, ) response = self.client.get(url) - expected_data = {'type': format_resource_type('Blog'), 'id': str(self.first_entry.blog.id)} + expected_data = { + "type": format_resource_type("Blog"), + "id": str(self.first_entry.blog.id), + } assert response.data == expected_data def test_get_entry_relationship_invalid_field(self): response = self.client.get( - '/entries/{}/relationships/invalid_field'.format(self.first_entry.id) + "/entries/{}/relationships/invalid_field".format(self.first_entry.id) ) assert response.status_code == 404 def test_get_blog_relationship_entry_set(self): - response = self.client.get('/blogs/{}/relationships/entry_set'.format(self.blog.id)) - expected_data = [{'type': format_resource_type('Entry'), 'id': str(self.first_entry.id)}, - {'type': format_resource_type('Entry'), 'id': str(self.second_entry.id)}] + response = self.client.get( + "/blogs/{}/relationships/entry_set".format(self.blog.id) + ) + expected_data = [ + {"type": format_resource_type("Entry"), "id": str(self.first_entry.id)}, + {"type": format_resource_type("Entry"), "id": str(self.second_entry.id)}, + ] assert response.data == expected_data def test_put_entry_relationship_blog_returns_405(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) response = self.client.put(url, data={}) assert response.status_code == 405 def test_patch_invalid_entry_relationship_blog_returns_400(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) - response = self.client.patch(url, data={'data': {'invalid': ''}}) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) + response = self.client.patch(url, data={"data": {"invalid": ""}}) assert response.status_code == 400 def test_relationship_view_errors_format(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) - response = self.client.patch(url, data={'data': {'invalid': ''}}) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) + response = self.client.patch(url, data={"data": {"invalid": ""}}) assert response.status_code == 400 - result = json.loads(response.content.decode('utf-8')) + result = json.loads(response.content.decode("utf-8")) - assert 'data' not in result - assert 'errors' in result + assert "data" not in result + assert "errors" in result def test_get_empty_to_one_relationship(self): - url = '/comments/{}/relationships/author'.format(self.first_entry.id) + url = "/comments/{}/relationships/author".format(self.first_entry.id) response = self.client.get(url) expected_data = None assert response.data == expected_data def test_get_to_many_relationship_self_link(self): - url = '/authors/{}/relationships/comments'.format(self.author.id) + url = "/authors/{}/relationships/comments".format(self.author.id) response = self.client.get(url) expected_data = { - 'links': {'self': 'http://testserver/authors/1/relationships/comments'}, - 'data': [{'id': str(self.second_comment.id), 'type': format_resource_type('Comment')}] + "links": {"self": "http://testserver/authors/1/relationships/comments"}, + "data": [ + { + "id": str(self.second_comment.id), + "type": format_resource_type("Comment"), + } + ], } - assert json.loads(response.content.decode('utf-8')) == expected_data + assert json.loads(response.content.decode("utf-8")) == expected_data def test_patch_to_one_relationship(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) request_data = { - 'data': {'type': format_resource_type('Blog'), 'id': str(self.other_blog.id)} + "data": { + "type": format_resource_type("Blog"), + "id": str(self.other_blog.id), + } } response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data == request_data['data'] + assert response.data == request_data["data"] response = self.client.get(url) - assert response.data == request_data['data'] + assert response.data == request_data["data"] def test_patch_one_to_many_relationship(self): - url = '/blogs/{}/relationships/entry_set'.format(self.first_entry.id) + url = "/blogs/{}/relationships/entry_set".format(self.first_entry.id) request_data = { - 'data': [{'type': format_resource_type('Entry'), 'id': str(self.first_entry.id)}, ] + "data": [ + {"type": format_resource_type("Entry"), "id": str(self.first_entry.id)}, + ] } response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data == request_data['data'] + assert response.data == request_data["data"] response = self.client.get(url) - assert response.data == request_data['data'] + assert response.data == request_data["data"] # retry a second time should end up with same result response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data == request_data['data'] + assert response.data == request_data["data"] response = self.client.get(url) - assert response.data == request_data['data'] + assert response.data == request_data["data"] def test_patch_one_to_many_relaitonship_with_none(self): - url = '/blogs/{}/relationships/entry_set'.format(self.first_entry.id) - request_data = { - 'data': None - } + url = "/blogs/{}/relationships/entry_set".format(self.first_entry.id) + request_data = {"data": None} response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() assert response.data == [] @@ -160,103 +182,127 @@ def test_patch_one_to_many_relaitonship_with_none(self): assert response.data == [] def test_patch_many_to_many_relationship(self): - url = '/entries/{}/relationships/authors'.format(self.first_entry.id) + url = "/entries/{}/relationships/authors".format(self.first_entry.id) request_data = { - 'data': [ - { - 'type': format_resource_type('Author'), - 'id': str(self.author.id) - }, + "data": [ + {"type": format_resource_type("Author"), "id": str(self.author.id)}, ] } response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data == request_data['data'] + assert response.data == request_data["data"] response = self.client.get(url) - assert response.data == request_data['data'] + assert response.data == request_data["data"] # retry a second time should end up with same result response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data == request_data['data'] + assert response.data == request_data["data"] response = self.client.get(url) - assert response.data == request_data['data'] + assert response.data == request_data["data"] def test_post_to_one_relationship_should_fail(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) request_data = { - 'data': {'type': format_resource_type('Blog'), 'id': str(self.other_blog.id)} + "data": { + "type": format_resource_type("Blog"), + "id": str(self.other_blog.id), + } } response = self.client.post(url, data=request_data) assert response.status_code == 405, response.content.decode() def test_post_to_many_relationship_with_no_change(self): - url = '/entries/{}/relationships/comments'.format(self.first_entry.id) + url = "/entries/{}/relationships/comments".format(self.first_entry.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(self.first_comment.id)}, ] + "data": [ + { + "type": format_resource_type("Comment"), + "id": str(self.first_comment.id), + }, + ] } response = self.client.post(url, data=request_data) assert response.status_code == 204, response.content.decode() assert len(response.rendered_content) == 0, response.rendered_content.decode() def test_post_to_many_relationship_with_change(self): - url = '/entries/{}/relationships/comments'.format(self.first_entry.id) + url = "/entries/{}/relationships/comments".format(self.first_entry.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(self.second_comment.id)}, ] + "data": [ + { + "type": format_resource_type("Comment"), + "id": str(self.second_comment.id), + }, + ] } response = self.client.post(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert request_data['data'][0] in response.data + assert request_data["data"][0] in response.data def test_delete_to_one_relationship_should_fail(self): - url = '/entries/{}/relationships/blog'.format(self.first_entry.id) + url = "/entries/{}/relationships/blog".format(self.first_entry.id) request_data = { - 'data': {'type': format_resource_type('Blog'), 'id': str(self.other_blog.id)} + "data": { + "type": format_resource_type("Blog"), + "id": str(self.other_blog.id), + } } response = self.client.delete(url, data=request_data) assert response.status_code == 405, response.content.decode() def test_delete_relationship_overriding_with_none(self): - url = '/comments/{}'.format(self.second_comment.id) + url = "/comments/{}".format(self.second_comment.id) request_data = { - 'data': { - 'type': 'comments', - 'id': self.second_comment.id, - 'relationships': { - 'author': { - 'data': None - } - } + "data": { + "type": "comments", + "id": self.second_comment.id, + "relationships": {"author": {"data": None}}, } } response = self.client.patch(url, data=request_data) assert response.status_code == 200, response.content.decode() - assert response.data['author'] is None + assert response.data["author"] is None def test_delete_to_many_relationship_with_no_change(self): - url = '/entries/{}/relationships/comments'.format(self.first_entry.id) + url = "/entries/{}/relationships/comments".format(self.first_entry.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(self.second_comment.id)}, ] + "data": [ + { + "type": format_resource_type("Comment"), + "id": str(self.second_comment.id), + }, + ] } response = self.client.delete(url, data=request_data) assert response.status_code == 204, response.content.decode() assert len(response.rendered_content) == 0, response.rendered_content.decode() def test_delete_one_to_many_relationship_with_not_null_constraint(self): - url = '/entries/{}/relationships/comments'.format(self.first_entry.id) + url = "/entries/{}/relationships/comments".format(self.first_entry.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(self.first_comment.id)}, ] + "data": [ + { + "type": format_resource_type("Comment"), + "id": str(self.first_comment.id), + }, + ] } response = self.client.delete(url, data=request_data) assert response.status_code == 409, response.content.decode() def test_delete_to_many_relationship_with_change(self): - url = '/authors/{}/relationships/comments'.format(self.author.id) + url = "/authors/{}/relationships/comments".format(self.author.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(self.second_comment.id)}, ] + "data": [ + { + "type": format_resource_type("Comment"), + "id": str(self.second_comment.id), + }, + ] } response = self.client.delete(url, data=request_data) assert response.status_code == 200, response.content.decode() @@ -265,21 +311,19 @@ def test_new_comment_data_patch_to_many_relationship(self): entry = EntryFactory(blog=self.blog, authors=(self.author,)) comment = CommentFactory(entry=entry) - url = '/authors/{}/relationships/comments'.format(self.author.id) + url = "/authors/{}/relationships/comments".format(self.author.id) request_data = { - 'data': [{'type': format_resource_type('Comment'), 'id': str(comment.id)}, ] + "data": [ + {"type": format_resource_type("Comment"), "id": str(comment.id)}, + ] } previous_response = { - 'data': [ - {'type': 'comments', - 'id': str(self.second_comment.id) - } - ], - 'links': { - 'self': 'http://testserver/authors/{}/relationships/comments'.format( + "data": [{"type": "comments", "id": str(self.second_comment.id)}], + "links": { + "self": "http://testserver/authors/{}/relationships/comments".format( self.author.id ) - } + }, } response = self.client.get(url) @@ -287,16 +331,12 @@ def test_new_comment_data_patch_to_many_relationship(self): assert response.json() == previous_response new_patched_response = { - 'data': [ - {'type': 'comments', - 'id': str(comment.id) - } - ], - 'links': { - 'self': 'http://testserver/authors/{}/relationships/comments'.format( + "data": [{"type": "comments", "id": str(comment.id)}], + "links": { + "self": "http://testserver/authors/{}/relationships/comments".format( self.author.id ) - } + }, } response = self.client.patch(url, data=request_data) @@ -307,21 +347,19 @@ def test_new_comment_data_patch_to_many_relationship(self): def test_options_entry_relationship_blog(self): url = reverse( - 'entry-relationships', kwargs={'pk': self.first_entry.id, 'related_field': 'blog'} + "entry-relationships", + kwargs={"pk": self.first_entry.id, "related_field": "blog"}, ) response = self.client.options(url) expected_data = { "data": { "name": "Entry Relationship", "description": "", - "renders": [ - "application/vnd.api+json", - "text/html" - ], + "renders": ["application/vnd.api+json", "text/html"], "parses": [ "application/vnd.api+json", "application/x-www-form-urlencoded", - "multipart/form-data" + "multipart/form-data", ], "allowed_methods": [ "GET", @@ -329,99 +367,103 @@ def test_options_entry_relationship_blog(self): "PATCH", "DELETE", "HEAD", - "OPTIONS" + "OPTIONS", ], - "actions": { - "POST": {} - } + "actions": {"POST": {}}, } } assert response.json() == expected_data class TestRelatedMixin(APITestCase): - def setUp(self): self.author = AuthorFactory() def _get_view(self, kwargs): factory = APIRequestFactory() - request = Request(factory.get('', content_type='application/vnd.api+json')) + request = Request(factory.get("", content_type="application/vnd.api+json")) return AuthorViewSet(request=request, kwargs=kwargs) def test_get_related_field_name(self): - kwargs = {'pk': self.author.id, 'related_field': 'bio'} + kwargs = {"pk": self.author.id, "related_field": "bio"} view = self._get_view(kwargs) got = view.get_related_field_name() - self.assertEqual(got, kwargs['related_field']) + self.assertEqual(got, kwargs["related_field"]) def test_get_related_instance_serializer_field(self): - kwargs = {'pk': self.author.id, 'related_field': 'bio'} + kwargs = {"pk": self.author.id, "related_field": "bio"} view = self._get_view(kwargs) got = view.get_related_instance() self.assertEqual(got, self.author.bio) def test_get_related_instance_model_field(self): - kwargs = {'pk': self.author.id, 'related_field': 'id'} + kwargs = {"pk": self.author.id, "related_field": "id"} view = self._get_view(kwargs) got = view.get_related_instance() self.assertEqual(got, self.author.id) def test_get_related_serializer_class(self): - kwargs = {'pk': self.author.id, 'related_field': 'bio'} + kwargs = {"pk": self.author.id, "related_field": "bio"} view = self._get_view(kwargs) got = view.get_related_serializer_class() self.assertEqual(got, AuthorBioSerializer) def test_get_related_serializer_class_many(self): - kwargs = {'pk': self.author.id, 'related_field': 'entries'} + kwargs = {"pk": self.author.id, "related_field": "entries"} view = self._get_view(kwargs) got = view.get_related_serializer_class() self.assertEqual(got, EntrySerializer) def test_get_serializer_comes_from_included_serializers(self): - kwargs = {'pk': self.author.id, 'related_field': 'type'} + kwargs = {"pk": self.author.id, "related_field": "type"} view = self._get_view(kwargs) related_serializers = view.serializer_class.related_serializers - delattr(view.serializer_class, 'related_serializers') + delattr(view.serializer_class, "related_serializers") got = view.get_related_serializer_class() self.assertEqual(got, AuthorTypeSerializer) view.serializer_class.related_serializers = related_serializers def test_get_related_serializer_class_raises_error(self): - kwargs = {'pk': self.author.id, 'related_field': 'unknown'} + kwargs = {"pk": self.author.id, "related_field": "unknown"} view = self._get_view(kwargs) self.assertRaises(NotFound, view.get_related_serializer_class) def test_retrieve_related_single_reverse_lookup(self): - url = reverse('author-related', kwargs={'pk': self.author.pk, 'related_field': 'bio'}) + url = reverse( + "author-related", kwargs={"pk": self.author.pk, "related_field": "bio"} + ) resp = self.client.get(url) expected = { - 'data': { - 'type': 'authorBios', 'id': str(self.author.bio.id), - 'relationships': { - 'author': {'data': {'type': 'authors', 'id': str(self.author.id)}}, - 'metadata': {'data': {'id': str(self.author.bio.metadata.id), - 'type': 'authorBioMetadata'}} - }, - 'attributes': { - 'body': str(self.author.bio.body) + "data": { + "type": "authorBios", + "id": str(self.author.bio.id), + "relationships": { + "author": {"data": {"type": "authors", "id": str(self.author.id)}}, + "metadata": { + "data": { + "id": str(self.author.bio.metadata.id), + "type": "authorBioMetadata", + } + }, }, + "attributes": {"body": str(self.author.bio.body)}, } } self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), expected) def test_retrieve_related_single(self): - url = reverse('author-related', kwargs={'pk': self.author.type.pk, 'related_field': 'type'}) + url = reverse( + "author-related", + kwargs={"pk": self.author.type.pk, "related_field": "type"}, + ) resp = self.client.get(url) expected = { - 'data': { - 'type': 'authorTypes', 'id': str(self.author.type.id), - 'attributes': { - 'name': str(self.author.type.name) - }, + "data": { + "type": "authorTypes", + "id": str(self.author.type.id), + "attributes": {"name": str(self.author.type.name)}, } } self.assertEqual(resp.status_code, 200) @@ -429,211 +471,219 @@ def test_retrieve_related_single(self): def test_retrieve_related_many(self): entry = EntryFactory(authors=self.author) - url = reverse('author-related', kwargs={'pk': self.author.pk, 'related_field': 'entries'}) + url = reverse( + "author-related", kwargs={"pk": self.author.pk, "related_field": "entries"} + ) resp = self.client.get(url) self.assertEqual(resp.status_code, 200) - self.assertTrue(isinstance(resp.json()['data'], list)) - self.assertEqual(len(resp.json()['data']), 1) - self.assertEqual(resp.json()['data'][0]['id'], str(entry.id)) + self.assertTrue(isinstance(resp.json()["data"], list)) + self.assertEqual(len(resp.json()["data"]), 1) + self.assertEqual(resp.json()["data"][0]["id"], str(entry.id)) def test_retrieve_related_many_hyperlinked(self): comment = CommentFactory(author=self.author) - url = reverse('author-related', kwargs={'pk': self.author.pk, 'related_field': 'comments'}) + url = reverse( + "author-related", kwargs={"pk": self.author.pk, "related_field": "comments"} + ) resp = self.client.get(url) self.assertEqual(resp.status_code, 200) - self.assertTrue(isinstance(resp.json()['data'], list)) - self.assertEqual(len(resp.json()['data']), 1) - self.assertEqual(resp.json()['data'][0]['id'], str(comment.id)) + self.assertTrue(isinstance(resp.json()["data"], list)) + self.assertEqual(len(resp.json()["data"]), 1) + self.assertEqual(resp.json()["data"][0]["id"], str(comment.id)) def test_retrieve_related_None(self): - kwargs = {'pk': self.author.pk, 'related_field': 'first_entry'} - url = reverse('author-related', kwargs=kwargs) + kwargs = {"pk": self.author.pk, "related_field": "first_entry"} + url = reverse("author-related", kwargs=kwargs) resp = self.client.get(url) self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json(), {'data': None}) + self.assertEqual(resp.json(), {"data": None}) class TestValidationErrorResponses(TestBase): def test_if_returns_error_on_empty_post(self): - view = BlogViewSet.as_view({'post': 'create'}) + view = BlogViewSet.as_view({"post": "create"}) response = self._get_create_response("{}", view) self.assertEqual(400, response.status_code) - expected = [{ - 'detail': 'Received document does not contain primary data', - 'status': '400', - 'source': {'pointer': '/data'}, - 'code': 'parse_error', - }] + expected = [ + { + "detail": "Received document does not contain primary data", + "status": "400", + "source": {"pointer": "/data"}, + "code": "parse_error", + } + ] self.assertEqual(expected, response.data) def test_if_returns_error_on_missing_form_data_post(self): - view = BlogViewSet.as_view({'post': 'create'}) - response = self._get_create_response('{"data":{"attributes":{},"type":"blogs"}}', view) + view = BlogViewSet.as_view({"post": "create"}) + response = self._get_create_response( + '{"data":{"attributes":{},"type":"blogs"}}', view + ) self.assertEqual(400, response.status_code) - expected = [{ - 'status': '400', - 'detail': 'This field is required.', - 'source': {'pointer': '/data/attributes/name'}, - 'code': 'required', - }] + expected = [ + { + "status": "400", + "detail": "This field is required.", + "source": {"pointer": "/data/attributes/name"}, + "code": "required", + } + ] self.assertEqual(expected, response.data) def test_if_returns_error_on_bad_endpoint_name(self): - view = BlogViewSet.as_view({'post': 'create'}) - response = self._get_create_response('{"data":{"attributes":{},"type":"bad"}}', view) + view = BlogViewSet.as_view({"post": "create"}) + response = self._get_create_response( + '{"data":{"attributes":{},"type":"bad"}}', view + ) self.assertEqual(409, response.status_code) - expected = [{ - 'detail': ( - "The resource object's type (bad) is not the type that constitute the collection " - "represented by the endpoint (blogs)." - ), - 'source': {'pointer': '/data'}, - 'status': '409', - 'code': 'error', - }] + expected = [ + { + "detail": ( + "The resource object's type (bad) is not the type that constitute the collection " + "represented by the endpoint (blogs)." + ), + "source": {"pointer": "/data"}, + "status": "409", + "code": "error", + } + ] self.assertEqual(expected, response.data) def _get_create_response(self, data, view): factory = RequestFactory() - request = factory.post('/', data, content_type='application/vnd.api+json') - user = self.create_user('user', 'pass') + request = factory.post("/", data, content_type="application/vnd.api+json") + user = self.create_user("user", "pass") force_authenticate(request, user) return view(request) class TestModelViewSet(TestBase): def setUp(self): - self.author = Author.objects.create(name='Super powerful superhero', email='i.am@lost.com') - self.blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") + self.author = Author.objects.create( + name="Super powerful superhero", email="i.am@lost.com" + ) + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") def test_no_content_response(self): - url = '/blogs/{}'.format(self.blog.pk) + url = "/blogs/{}".format(self.blog.pk) response = self.client.delete(url) assert response.status_code == 204, response.rendered_content.decode() assert len(response.rendered_content) == 0, response.rendered_content.decode() class TestBlogViewSet(APITestCase): - def setUp(self): - self.blog = Blog.objects.create( - name='Some Blog', - tagline="It's a blog" - ) + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") self.entry = Entry.objects.create( blog=self.blog, - headline='headline one', - body_text='body_text two', + headline="headline one", + body_text="body_text two", ) def test_get_object_gives_correct_blog(self): - url = reverse('entry-blog', kwargs={'entry_pk': self.entry.id}) + url = reverse("entry-blog", kwargs={"entry_pk": self.entry.id}) resp = self.client.get(url) expected = { - 'data': { - 'attributes': {'name': self.blog.name}, - 'id': '{}'.format(self.blog.id), - 'links': {'self': 'http://testserver/blogs/{}'.format(self.blog.id)}, - 'meta': {'copyright': datetime.now().year}, - 'relationships': {'tags': {'data': [], 'meta': {'count': 0}}}, - 'type': 'blogs' + "data": { + "attributes": {"name": self.blog.name}, + "id": "{}".format(self.blog.id), + "links": {"self": "http://testserver/blogs/{}".format(self.blog.id)}, + "meta": {"copyright": datetime.now().year}, + "relationships": {"tags": {"data": [], "meta": {"count": 0}}}, + "type": "blogs", }, - 'meta': {'apiDocs': '/docs/api/blogs'} + "meta": {"apiDocs": "/docs/api/blogs"}, } got = resp.json() self.assertEqual(got, expected) class TestEntryViewSet(APITestCase): - def setUp(self): - self.blog = Blog.objects.create( - name='Some Blog', - tagline="It's a blog" - ) + self.blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") self.first_entry = Entry.objects.create( blog=self.blog, - headline='headline two', - body_text='body_text two', + headline="headline two", + body_text="body_text two", ) self.second_entry = Entry.objects.create( blog=self.blog, - headline='headline two', - body_text='body_text two', + headline="headline two", + body_text="body_text two", ) self.maxDiff = None def test_get_object_gives_correct_entry(self): - url = reverse('entry-featured', kwargs={'entry_pk': self.first_entry.id}) + url = reverse("entry-featured", kwargs={"entry_pk": self.first_entry.id}) resp = self.client.get(url) expected = { - 'data': { - 'attributes': { - 'bodyText': self.second_entry.body_text, - 'headline': self.second_entry.headline, - 'modDate': self.second_entry.mod_date, - 'pubDate': self.second_entry.pub_date + "data": { + "attributes": { + "bodyText": self.second_entry.body_text, + "headline": self.second_entry.headline, + "modDate": self.second_entry.mod_date, + "pubDate": self.second_entry.pub_date, }, - 'id': '{}'.format(self.second_entry.id), - 'meta': {'bodyFormat': 'text'}, - 'relationships': { - 'authors': {'data': [], 'meta': {'count': 0}}, - 'blog': { - 'data': { - 'id': '{}'.format(self.second_entry.blog_id), - 'type': 'blogs' + "id": "{}".format(self.second_entry.id), + "meta": {"bodyFormat": "text"}, + "relationships": { + "authors": {"data": [], "meta": {"count": 0}}, + "blog": { + "data": { + "id": "{}".format(self.second_entry.blog_id), + "type": "blogs", } }, - 'blogHyperlinked': { - 'links': { - 'related': 'http://testserver/entries/{}' - '/blog'.format(self.second_entry.id), - 'self': 'http://testserver/entries/{}' - '/relationships/blog_hyperlinked'.format(self.second_entry.id) + "blogHyperlinked": { + "links": { + "related": "http://testserver/entries/{}" + "/blog".format(self.second_entry.id), + "self": "http://testserver/entries/{}" + "/relationships/blog_hyperlinked".format( + self.second_entry.id + ), } }, - 'comments': { - 'data': [], - 'meta': {'count': 0} - }, - 'commentsHyperlinked': { - 'links': { - 'related': 'http://testserver/entries/{}' - '/comments'.format(self.second_entry.id), - 'self': 'http://testserver/entries/{}/relationships' - '/comments_hyperlinked'.format(self.second_entry.id) + "comments": {"data": [], "meta": {"count": 0}}, + "commentsHyperlinked": { + "links": { + "related": "http://testserver/entries/{}" + "/comments".format(self.second_entry.id), + "self": "http://testserver/entries/{}/relationships" + "/comments_hyperlinked".format(self.second_entry.id), } }, - 'featuredHyperlinked': { - 'links': { - 'related': 'http://testserver/entries/{}' - '/featured'.format(self.second_entry.id), - 'self': 'http://testserver/entries/{}/relationships' - '/featured_hyperlinked'.format(self.second_entry.id) + "featuredHyperlinked": { + "links": { + "related": "http://testserver/entries/{}" + "/featured".format(self.second_entry.id), + "self": "http://testserver/entries/{}/relationships" + "/featured_hyperlinked".format(self.second_entry.id), } }, - 'suggested': { - 'data': [{'id': '1', 'type': 'entries'}], - 'links': { - 'related': 'http://testserver/entries/{}' - '/suggested/'.format(self.second_entry.id), - 'self': 'http://testserver/entries/{}' - '/relationships/suggested'.format(self.second_entry.id) - } + "suggested": { + "data": [{"id": "1", "type": "entries"}], + "links": { + "related": "http://testserver/entries/{}" + "/suggested/".format(self.second_entry.id), + "self": "http://testserver/entries/{}" + "/relationships/suggested".format(self.second_entry.id), + }, }, - 'suggestedHyperlinked': { - 'links': { - 'related': 'http://testserver/entries/{}' - '/suggested/'.format(self.second_entry.id), - 'self': 'http://testserver/entries/{}/relationships' - '/suggested_hyperlinked'.format(self.second_entry.id) + "suggestedHyperlinked": { + "links": { + "related": "http://testserver/entries/{}" + "/suggested/".format(self.second_entry.id), + "self": "http://testserver/entries/{}/relationships" + "/suggested_hyperlinked".format(self.second_entry.id), } }, - 'tags': {'data': [], 'meta': {'count': 0}}}, - 'type': 'posts' + "tags": {"data": [], "meta": {"count": 0}}, + }, + "type": "posts", } } got = resp.json() @@ -643,74 +693,75 @@ def test_get_object_gives_correct_entry(self): class BasicAuthorSerializer(serializers.ModelSerializer): class Meta: model = Author - fields = ('name',) + fields = ("name",) class ReadOnlyViewSetWithCustomActions(views.ReadOnlyModelViewSet): queryset = Author.objects.all() serializer_class = BasicAuthorSerializer - @action(detail=False, methods=['get', 'post', 'patch', 'delete']) + @action(detail=False, methods=["get", "post", "patch", "delete"]) def group_action(self, request): return Response(status=status.HTTP_204_NO_CONTENT) - @action(detail=True, methods=['get', 'post', 'patch', 'delete']) + @action(detail=True, methods=["get", "post", "patch", "delete"]) def item_action(self, request, pk): return Response(status=status.HTTP_204_NO_CONTENT) class TestReadonlyModelViewSet(TestBase): """ - Test if ReadOnlyModelViewSet allows to have custom actions with POST, PATCH, DELETE methods + Test if ReadOnlyModelViewSet allows to have custom actions with POST, PATCH, DELETE methods """ + factory = RequestFactory() viewset_class = ReadOnlyViewSetWithCustomActions - media_type = 'application/vnd.api+json' + media_type = "application/vnd.api+json" def test_group_action_allows_get(self): - view = self.viewset_class.as_view({'get': 'group_action'}) - request = self.factory.get('/') + view = self.viewset_class.as_view({"get": "group_action"}) + request = self.factory.get("/") response = view(request) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_group_action_allows_post(self): - view = self.viewset_class.as_view({'post': 'group_action'}) - request = self.factory.post('/', '{}', content_type=self.media_type) + view = self.viewset_class.as_view({"post": "group_action"}) + request = self.factory.post("/", "{}", content_type=self.media_type) response = view(request) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_group_action_allows_patch(self): - view = self.viewset_class.as_view({'patch': 'group_action'}) - request = self.factory.patch('/', '{}', content_type=self.media_type) + view = self.viewset_class.as_view({"patch": "group_action"}) + request = self.factory.patch("/", "{}", content_type=self.media_type) response = view(request) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_group_action_allows_delete(self): - view = self.viewset_class.as_view({'delete': 'group_action'}) - request = self.factory.delete('/', '{}', content_type=self.media_type) + view = self.viewset_class.as_view({"delete": "group_action"}) + request = self.factory.delete("/", "{}", content_type=self.media_type) response = view(request) self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_item_action_allows_get(self): - view = self.viewset_class.as_view({'get': 'item_action'}) - request = self.factory.get('/') - response = view(request, pk='1') + view = self.viewset_class.as_view({"get": "item_action"}) + request = self.factory.get("/") + response = view(request, pk="1") self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_item_action_allows_post(self): - view = self.viewset_class.as_view({'post': 'item_action'}) - request = self.factory.post('/', '{}', content_type=self.media_type) - response = view(request, pk='1') + view = self.viewset_class.as_view({"post": "item_action"}) + request = self.factory.post("/", "{}", content_type=self.media_type) + response = view(request, pk="1") self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_item_action_allows_patch(self): - view = self.viewset_class.as_view({'patch': 'item_action'}) - request = self.factory.patch('/', '{}', content_type=self.media_type) - response = view(request, pk='1') + view = self.viewset_class.as_view({"patch": "item_action"}) + request = self.factory.patch("/", "{}", content_type=self.media_type) + response = view(request, pk="1") self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) def test_item_action_allows_delete(self): - view = self.viewset_class.as_view({'delete': 'item_action'}) - request = self.factory.delete('/', '{}', content_type=self.media_type) - response = view(request, pk='1') + view = self.viewset_class.as_view({"delete": "item_action"}) + request = self.factory.delete("/", "{}", content_type=self.media_type) + response = view(request, pk="1") self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code) diff --git a/example/tests/unit/test_default_drf_serializers.py b/example/tests/unit/test_default_drf_serializers.py index 3233ce84..e8c78df8 100644 --- a/example/tests/unit/test_default_drf_serializers.py +++ b/example/tests/unit/test_default_drf_serializers.py @@ -15,7 +15,7 @@ class RelatedModelSerializer(ModelSerializer): class Meta: model = Comment - fields = ('id',) + fields = ("id",) class DummyTestSerializer(ModelSerializer): @@ -23,16 +23,19 @@ class DummyTestSerializer(ModelSerializer): This serializer is a simple compound document serializer which includes only a single embedded relation """ - related_models = RelatedModelSerializer(source='comments', many=True, read_only=True) + + related_models = RelatedModelSerializer( + source="comments", many=True, read_only=True + ) json_field = SerializerMethodField() def get_json_field(self, entry): - return {'JsonKey': 'JsonValue'} + return {"JsonKey": "JsonValue"} class Meta: model = Entry - fields = ('related_models', 'json_field') + fields = ("related_models", "json_field") # views @@ -44,9 +47,7 @@ class DummyTestViewSet(viewsets.ModelViewSet): def render_dummy_test_serialized_view(view_class): serializer = DummyTestSerializer(instance=Entry()) renderer = JSONRenderer() - return renderer.render( - serializer.data, - renderer_context={'view': view_class()}) + return renderer.render(serializer.data, renderer_context={"view": view_class()}) # tests @@ -54,32 +55,28 @@ def test_simple_reverse_relation_included_renderer(): """ Test renderer when a single reverse fk relation is passed. """ - rendered = render_dummy_test_serialized_view( - DummyTestViewSet) + rendered = render_dummy_test_serialized_view(DummyTestViewSet) assert rendered def test_render_format_field_names(settings): """Test that json field is kept untouched.""" - settings.JSON_API_FORMAT_FIELD_NAMES = 'dasherize' + settings.JSON_API_FORMAT_FIELD_NAMES = "dasherize" rendered = render_dummy_test_serialized_view(DummyTestViewSet) result = json.loads(rendered.decode()) - assert result['data']['attributes']['json-field'] == {'JsonKey': 'JsonValue'} + assert result["data"]["attributes"]["json-field"] == {"JsonKey": "JsonValue"} @pytest.mark.django_db def test_blog_create(client): - url = reverse('drf-entry-blog-list') + url = reverse("drf-entry-blog-list") name = "Dummy Name" request_data = { - 'data': { - 'attributes': {'name': name}, - 'type': 'blogs' - }, + "data": {"attributes": {"name": name}, "type": "blogs"}, } resp = client.post(url, request_data) @@ -94,14 +91,14 @@ def test_blog_create(client): blog = blog.first() expected = { - 'data': { - 'attributes': {'name': blog.name, 'tags': []}, - 'id': '{}'.format(blog.id), - 'links': {'self': 'http://testserver/blogs/{}'.format(blog.id)}, - 'meta': {'copyright': datetime.now().year}, - 'type': 'blogs' + "data": { + "attributes": {"name": blog.name, "tags": []}, + "id": "{}".format(blog.id), + "links": {"self": "http://testserver/blogs/{}".format(blog.id)}, + "meta": {"copyright": datetime.now().year}, + "type": "blogs", }, - 'meta': {'apiDocs': '/docs/api/blogs'} + "meta": {"apiDocs": "/docs/api/blogs"}, } assert resp.status_code == 201 @@ -111,17 +108,17 @@ def test_blog_create(client): @pytest.mark.django_db def test_get_object_gives_correct_blog(client, blog, entry): - url = reverse('drf-entry-blog-detail', kwargs={'entry_pk': entry.id}) + url = reverse("drf-entry-blog-detail", kwargs={"entry_pk": entry.id}) resp = client.get(url) expected = { - 'data': { - 'attributes': {'name': blog.name, 'tags': []}, - 'id': '{}'.format(blog.id), - 'links': {'self': 'http://testserver/blogs/{}'.format(blog.id)}, - 'meta': {'copyright': datetime.now().year}, - 'type': 'blogs' + "data": { + "attributes": {"name": blog.name, "tags": []}, + "id": "{}".format(blog.id), + "links": {"self": "http://testserver/blogs/{}".format(blog.id)}, + "meta": {"copyright": datetime.now().year}, + "type": "blogs", }, - 'meta': {'apiDocs': '/docs/api/blogs'} + "meta": {"apiDocs": "/docs/api/blogs"}, } got = resp.json() assert got == expected @@ -130,20 +127,20 @@ def test_get_object_gives_correct_blog(client, blog, entry): @pytest.mark.django_db def test_get_object_patches_correct_blog(client, blog, entry): - url = reverse('drf-entry-blog-detail', kwargs={'entry_pk': entry.id}) + url = reverse("drf-entry-blog-detail", kwargs={"entry_pk": entry.id}) new_name = blog.name + " update" assert not new_name == blog.name request_data = { - 'data': { - 'attributes': {'name': new_name}, - 'id': '{}'.format(blog.id), - 'links': {'self': 'http://testserver/blogs/{}'.format(blog.id)}, - 'meta': {'copyright': datetime.now().year}, - 'relationships': {'tags': {'data': []}}, - 'type': 'blogs' + "data": { + "attributes": {"name": new_name}, + "id": "{}".format(blog.id), + "links": {"self": "http://testserver/blogs/{}".format(blog.id)}, + "meta": {"copyright": datetime.now().year}, + "relationships": {"tags": {"data": []}}, + "type": "blogs", }, - 'meta': {'apiDocs': '/docs/api/blogs'} + "meta": {"apiDocs": "/docs/api/blogs"}, } resp = client.patch(url, data=request_data) @@ -151,14 +148,14 @@ def test_get_object_patches_correct_blog(client, blog, entry): assert resp.status_code == 200 expected = { - 'data': { - 'attributes': {'name': new_name, 'tags': []}, - 'id': '{}'.format(blog.id), - 'links': {'self': 'http://testserver/blogs/{}'.format(blog.id)}, - 'meta': {'copyright': datetime.now().year}, - 'type': 'blogs' + "data": { + "attributes": {"name": new_name, "tags": []}, + "id": "{}".format(blog.id), + "links": {"self": "http://testserver/blogs/{}".format(blog.id)}, + "meta": {"copyright": datetime.now().year}, + "type": "blogs", }, - 'meta': {'apiDocs': '/docs/api/blogs'} + "meta": {"apiDocs": "/docs/api/blogs"}, } got = resp.json() assert got == expected @@ -167,7 +164,7 @@ def test_get_object_patches_correct_blog(client, blog, entry): @pytest.mark.django_db def test_get_object_deletes_correct_blog(client, entry): - url = reverse('drf-entry-blog-detail', kwargs={'entry_pk': entry.id}) + url = reverse("drf-entry-blog-detail", kwargs={"entry_pk": entry.id}) resp = client.delete(url) @@ -176,37 +173,29 @@ def test_get_object_deletes_correct_blog(client, entry): @pytest.mark.django_db def test_get_entry_list_with_blogs(client, entry): - url = reverse('drf-entry-suggested', kwargs={'entry_pk': entry.id}) + url = reverse("drf-entry-suggested", kwargs={"entry_pk": entry.id}) resp = client.get(url) got = resp.json() expected = { - 'links': { - 'first': 'http://testserver/drf-entries/1/suggested/?page%5Bnumber%5D=1', - 'last': 'http://testserver/drf-entries/1/suggested/?page%5Bnumber%5D=1', - 'next': None, - 'prev': None, + "links": { + "first": "http://testserver/drf-entries/1/suggested/?page%5Bnumber%5D=1", + "last": "http://testserver/drf-entries/1/suggested/?page%5Bnumber%5D=1", + "next": None, + "prev": None, }, - 'data': [ + "data": [ { - 'type': 'entries', - 'id': '1', - 'attributes': { - 'tags': [], + "type": "entries", + "id": "1", + "attributes": { + "tags": [], }, - 'links': { - 'self': 'http://testserver/drf-blogs/1' - } + "links": {"self": "http://testserver/drf-blogs/1"}, } ], - 'meta': { - 'pagination': { - 'page': 1, - 'pages': 1, - 'count': 1 - } - } + "meta": {"pagination": {"page": 1, "pages": 1, "count": 1}}, } assert resp.status_code == 200 diff --git a/example/tests/unit/test_factories.py b/example/tests/unit/test_factories.py index 8acc9e74..ac9d7b2a 100644 --- a/example/tests/unit/test_factories.py +++ b/example/tests/unit/test_factories.py @@ -17,25 +17,31 @@ def test_model_instance(blog): def test_multiple_blog(blog_factory): - another_blog = blog_factory(name='Cool Blog') - new_blog = blog_factory(name='Awesome Blog') + another_blog = blog_factory(name="Cool Blog") + new_blog = blog_factory(name="Awesome Blog") - assert another_blog.name == 'Cool Blog' - assert new_blog.name == 'Awesome Blog' + assert another_blog.name == "Cool Blog" + assert new_blog.name == "Awesome Blog" def test_factories_with_relations(author_factory, entry_factory): author = author_factory(name="Joel Spolsky") entry = entry_factory( - headline=("The Absolute Minimum Every Software Developer" - "Absolutely, Positively Must Know About Unicode " - "and Character Sets (No Excuses!)"), - blog__name='Joel on Software', authors=(author, )) - - assert entry.blog.name == 'Joel on Software' - assert entry.headline == ("The Absolute Minimum Every Software Developer" - "Absolutely, Positively Must Know About Unicode " - "and Character Sets (No Excuses!)") + headline=( + "The Absolute Minimum Every Software Developer" + "Absolutely, Positively Must Know About Unicode " + "and Character Sets (No Excuses!)" + ), + blog__name="Joel on Software", + authors=(author,), + ) + + assert entry.blog.name == "Joel on Software" + assert entry.headline == ( + "The Absolute Minimum Every Software Developer" + "Absolutely, Positively Must Know About Unicode " + "and Character Sets (No Excuses!)" + ) assert entry.authors.all().count() == 1 - assert entry.authors.all()[0].name == 'Joel Spolsky' + assert entry.authors.all()[0].name == "Joel Spolsky" diff --git a/example/tests/unit/test_filter_schema_params.py b/example/tests/unit/test_filter_schema_params.py index 2044c467..9c78dc61 100644 --- a/example/tests/unit/test_filter_schema_params.py +++ b/example/tests/unit/test_filter_schema_params.py @@ -7,12 +7,16 @@ class DummyEntryViewSet(EntryViewSet): - filter_backends = (dja_filters.QueryParameterValidationFilter, dja_filters.OrderingFilter, - backends.DjangoFilterBackend, drf_filters.SearchFilter) + filter_backends = ( + dja_filters.QueryParameterValidationFilter, + dja_filters.OrderingFilter, + backends.DjangoFilterBackend, + drf_filters.SearchFilter, + ) filterset_fields = { - 'id': ('exact',), - 'headline': ('exact', 'contains'), - 'blog__name': ('contains', ), + "id": ("exact",), + "headline": ("exact", "contains"), + "blog__name": ("contains",), } def __init__(self, **kwargs): @@ -28,38 +32,63 @@ def test_filters_get_schema_params(): # list of tuples: (filter, expected result) filters = [ (dja_filters.QueryParameterValidationFilter, []), - (backends.DjangoFilterBackend, [ - { - 'name': 'filter[id]', 'required': False, 'in': 'query', - 'description': 'id', 'schema': {'type': 'string'} - }, - { - 'name': 'filter[headline]', 'required': False, 'in': 'query', - 'description': 'headline', 'schema': {'type': 'string'} - }, - { - 'name': 'filter[headline.contains]', 'required': False, 'in': 'query', - 'description': 'headline__contains', 'schema': {'type': 'string'} - }, - { - 'name': 'filter[blog.name.contains]', 'required': False, 'in': 'query', - 'description': 'blog__name__contains', 'schema': {'type': 'string'} - }, - ]), - (dja_filters.OrderingFilter, [ - { - 'name': 'sort', 'required': False, 'in': 'query', - 'description': 'Which field to use when ordering the results.', - 'schema': {'type': 'string'} - } - ]), - (drf_filters.SearchFilter, [ - { - 'name': 'filter[search]', 'required': False, 'in': 'query', - 'description': 'A search term.', - 'schema': {'type': 'string'} - } - ]), + ( + backends.DjangoFilterBackend, + [ + { + "name": "filter[id]", + "required": False, + "in": "query", + "description": "id", + "schema": {"type": "string"}, + }, + { + "name": "filter[headline]", + "required": False, + "in": "query", + "description": "headline", + "schema": {"type": "string"}, + }, + { + "name": "filter[headline.contains]", + "required": False, + "in": "query", + "description": "headline__contains", + "schema": {"type": "string"}, + }, + { + "name": "filter[blog.name.contains]", + "required": False, + "in": "query", + "description": "blog__name__contains", + "schema": {"type": "string"}, + }, + ], + ), + ( + dja_filters.OrderingFilter, + [ + { + "name": "sort", + "required": False, + "in": "query", + "description": "Which field to use when ordering the results.", + "schema": {"type": "string"}, + } + ], + ), + ( + drf_filters.SearchFilter, + [ + { + "name": "filter[search]", + "required": False, + "in": "query", + "description": "A search term.", + "schema": {"type": "string"}, + } + ], + ), ] view = DummyEntryViewSet() @@ -71,7 +100,7 @@ def test_filters_get_schema_params(): continue # py35: the result list/dict ordering isn't guaranteed for res_item in result: - assert 'name' in res_item + assert "name" in res_item for exp_item in expected: - if res_item['name'] == exp_item['name']: + if res_item["name"] == exp_item["name"]: assert res_item == exp_item diff --git a/example/tests/unit/test_pagination.py b/example/tests/unit/test_pagination.py index aeb5f87e..45042939 100644 --- a/example/tests/unit/test_pagination.py +++ b/example/tests/unit/test_pagination.py @@ -21,7 +21,7 @@ class ExamplePagination(pagination.JsonApiLimitOffsetPagination): self.pagination = ExamplePagination() self.queryset = range(1, 101) - self.base_url = 'http://testserver/' + self.base_url = "http://testserver/" def paginate_queryset(self, request): return list(self.pagination.paginate_queryset(self.queryset, request)) @@ -31,7 +31,7 @@ def get_paginated_content(self, queryset): return response.data def get_test_request(self, arguments): - return Request(factory.get('/', arguments)) + return Request(factory.get("/", arguments)) def test_valid_offset_limit(self): """ @@ -44,34 +44,48 @@ def test_valid_offset_limit(self): next_offset = 15 prev_offset = 5 - request = self.get_test_request({ - self.pagination.limit_query_param: limit, - self.pagination.offset_query_param: offset - }) - base_url = replace_query_param(self.base_url, self.pagination.limit_query_param, limit) - last_url = replace_query_param(base_url, self.pagination.offset_query_param, last_offset) + request = self.get_test_request( + { + self.pagination.limit_query_param: limit, + self.pagination.offset_query_param: offset, + } + ) + base_url = replace_query_param( + self.base_url, self.pagination.limit_query_param, limit + ) + last_url = replace_query_param( + base_url, self.pagination.offset_query_param, last_offset + ) first_url = base_url - next_url = replace_query_param(base_url, self.pagination.offset_query_param, next_offset) - prev_url = replace_query_param(base_url, self.pagination.offset_query_param, prev_offset) + next_url = replace_query_param( + base_url, self.pagination.offset_query_param, next_offset + ) + prev_url = replace_query_param( + base_url, self.pagination.offset_query_param, prev_offset + ) queryset = self.paginate_queryset(request) content = self.get_paginated_content(queryset) next_offset = offset + limit expected_content = { - 'results': list(range(offset + 1, next_offset + 1)), - 'links': OrderedDict([ - ('first', first_url), - ('last', last_url), - ('next', next_url), - ('prev', prev_url), - ]), - 'meta': { - 'pagination': OrderedDict([ - ('count', count), - ('limit', limit), - ('offset', offset), - ]) - } + "results": list(range(offset + 1, next_offset + 1)), + "links": OrderedDict( + [ + ("first", first_url), + ("last", last_url), + ("next", next_url), + ("prev", prev_url), + ] + ), + "meta": { + "pagination": OrderedDict( + [ + ("count", count), + ("limit", limit), + ("offset", offset), + ] + ) + }, } assert queryset == list(range(offset + 1, next_offset + 1)) diff --git a/example/tests/unit/test_renderer_class_methods.py b/example/tests/unit/test_renderer_class_methods.py index 7a9230d3..c599abbd 100644 --- a/example/tests/unit/test_renderer_class_methods.py +++ b/example/tests/unit/test_renderer_class_methods.py @@ -11,34 +11,36 @@ class ResourceSerializer(serializers.ModelSerializer): version = serializers.SerializerMethodField() def get_version(self, obj): - return '1.0.0' + return "1.0.0" class Meta: - fields = ('username',) - meta_fields = ('version',) + fields = ("username",) + meta_fields = ("version",) model = get_user_model() def test_build_json_resource_obj(): resource = { - 'pk': 1, - 'username': 'Alice', + "pk": 1, + "username": "Alice", } - serializer = ResourceSerializer(data={'username': 'Alice'}) + serializer = ResourceSerializer(data={"username": "Alice"}) serializer.is_valid() resource_instance = serializer.save() output = { - 'type': 'user', - 'id': '1', - 'attributes': { - 'username': 'Alice' - }, + "type": "user", + "id": "1", + "attributes": {"username": "Alice"}, } - assert JSONRenderer.build_json_resource_obj( - serializer.fields, resource, resource_instance, 'user') == output + assert ( + JSONRenderer.build_json_resource_obj( + serializer.fields, resource, resource_instance, "user" + ) + == output + ) def test_can_override_methods(): @@ -46,20 +48,18 @@ def test_can_override_methods(): Make sure extract_attributes and extract_relationships can be overriden. """ resource = { - 'pk': 1, - 'username': 'Alice', + "pk": 1, + "username": "Alice", } - serializer = ResourceSerializer(data={'username': 'Alice'}) + serializer = ResourceSerializer(data={"username": "Alice"}) serializer.is_valid() resource_instance = serializer.save() output = { - 'type': 'user', - 'id': '1', - 'attributes': { - 'username': 'Alice' - }, + "type": "user", + "id": "1", + "attributes": {"username": "Alice"}, } class CustomRenderer(JSONRenderer): @@ -78,35 +78,37 @@ def extract_relationships(cls, fields, resource, resource_instance): fields, resource, resource_instance ) - assert CustomRenderer.build_json_resource_obj( - serializer.fields, resource, resource_instance, 'user') == output + assert ( + CustomRenderer.build_json_resource_obj( + serializer.fields, resource, resource_instance, "user" + ) + == output + ) assert CustomRenderer.extract_attributes_was_overriden assert CustomRenderer.extract_relationships_was_overriden def test_extract_attributes(): fields = { - 'id': serializers.Field(), - 'username': serializers.Field(), - 'deleted': serializers.ReadOnlyField(), - } - resource = {'id': 1, 'deleted': None, 'username': 'jerel'} - expected = { - 'username': 'jerel', - 'deleted': None + "id": serializers.Field(), + "username": serializers.Field(), + "deleted": serializers.ReadOnlyField(), } - assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted(expected), ( - 'Regular fields should be extracted' - ) + resource = {"id": 1, "deleted": None, "username": "jerel"} + expected = {"username": "jerel", "deleted": None} + assert sorted(JSONRenderer.extract_attributes(fields, resource)) == sorted( + expected + ), "Regular fields should be extracted" assert sorted(JSONRenderer.extract_attributes(fields, {})) == sorted( - {'username': ''}), 'Should not extract read_only fields on empty serializer' + {"username": ""} + ), "Should not extract read_only fields on empty serializer" def test_extract_meta(): - serializer = ResourceSerializer(data={'username': 'jerel', 'version': '1.0.0'}) + serializer = ResourceSerializer(data={"username": "jerel", "version": "1.0.0"}) serializer.is_valid() expected = { - 'version': '1.0.0', + "version": "1.0.0", } assert JSONRenderer.extract_meta(serializer, serializer.data) == expected @@ -114,33 +116,27 @@ def test_extract_meta(): class ExtractRootMetaResourceSerializer(ResourceSerializer): def get_root_meta(self, resource, many): if many: - return { - 'foo': 'meta-many-value' - } + return {"foo": "meta-many-value"} else: - return { - 'foo': 'meta-value' - } + return {"foo": "meta-value"} class InvalidExtractRootMetaResourceSerializer(ResourceSerializer): def get_root_meta(self, resource, many): - return 'not a dict' + return "not a dict" def test_extract_root_meta(): serializer = ExtractRootMetaResourceSerializer() expected = { - 'foo': 'meta-value', + "foo": "meta-value", } assert JSONRenderer.extract_root_meta(serializer, {}) == expected def test_extract_root_meta_many(): serializer = ExtractRootMetaResourceSerializer(many=True) - expected = { - 'foo': 'meta-many-value' - } + expected = {"foo": "meta-many-value"} assert JSONRenderer.extract_root_meta(serializer, {}) == expected diff --git a/example/tests/unit/test_renderers.py b/example/tests/unit/test_renderers.py index 034cc45f..47d37b5b 100644 --- a/example/tests/unit/test_renderers.py +++ b/example/tests/unit/test_renderers.py @@ -11,40 +11,41 @@ # serializers class RelatedModelSerializer(serializers.ModelSerializer): - blog = serializers.ReadOnlyField(source='entry.blog') + blog = serializers.ReadOnlyField(source="entry.blog") class Meta: model = Comment - fields = ('id', 'blog') + fields = ("id", "blog") class DummyTestSerializer(serializers.ModelSerializer): - ''' + """ This serializer is a simple compound document serializer which includes only a single embedded relation - ''' + """ + related_models = RelatedModelSerializer( - source='comments', many=True, read_only=True) + source="comments", many=True, read_only=True + ) json_field = serializers.SerializerMethodField() def get_json_field(self, entry): - return {'JsonKey': 'JsonValue'} + return {"JsonKey": "JsonValue"} class Meta: model = Entry - fields = ('related_models', 'json_field') + fields = ("related_models", "json_field") class JSONAPIMeta: - included_resources = ('related_models',) + included_resources = ("related_models",) class EntryDRFSerializers(serializers.ModelSerializer): - class Meta: model = Entry - fields = ('headline', 'body_text') - read_only_fields = ('tags',) + fields = ("headline", "body_text") + read_only_fields = ("tags",) class CommentWithNestedFieldsSerializer(serializers.ModelSerializer): @@ -52,7 +53,7 @@ class CommentWithNestedFieldsSerializer(serializers.ModelSerializer): class Meta: model = Comment - exclude = ('created_at', 'modified_at', 'author') + exclude = ("created_at", "modified_at", "author") # fields = ('entry', 'body', 'author',) @@ -61,7 +62,7 @@ class AuthorWithNestedFieldsSerializer(serializers.ModelSerializer): class Meta: model = Author - fields = ('name', 'email', 'comments') + fields = ("name", "email", "comments") # views @@ -78,59 +79,54 @@ class ReadOnlyDummyTestViewSet(views.ReadOnlyModelViewSet): class AuthorWithNestedFieldsViewSet(views.ModelViewSet): queryset = Author.objects.all() serializer_class = AuthorWithNestedFieldsSerializer - resource_name = 'authors' + resource_name = "authors" def render_dummy_test_serialized_view(view_class, instance): serializer = view_class.serializer_class(instance=instance) renderer = JSONRenderer() - return renderer.render( - serializer.data, - renderer_context={'view': view_class()}) + return renderer.render(serializer.data, renderer_context={"view": view_class()}) def test_simple_reverse_relation_included_renderer(): - ''' + """ Test renderer when a single reverse fk relation is passed. - ''' - rendered = render_dummy_test_serialized_view( - DummyTestViewSet, Entry()) + """ + rendered = render_dummy_test_serialized_view(DummyTestViewSet, Entry()) assert rendered def test_simple_reverse_relation_included_read_only_viewset(): - rendered = render_dummy_test_serialized_view( - ReadOnlyDummyTestViewSet, Entry()) + rendered = render_dummy_test_serialized_view(ReadOnlyDummyTestViewSet, Entry()) assert rendered def test_render_format_field_names(settings): """Test that json field is kept untouched.""" - settings.JSON_API_FORMAT_FIELD_NAMES = 'dasherize' + settings.JSON_API_FORMAT_FIELD_NAMES = "dasherize" rendered = render_dummy_test_serialized_view(DummyTestViewSet, Entry()) result = json.loads(rendered.decode()) - assert result['data']['attributes']['json-field'] == {'JsonKey': 'JsonValue'} + assert result["data"]["attributes"]["json-field"] == {"JsonKey": "JsonValue"} def test_writeonly_not_in_response(): """Test that writeonly fields are not shown in list response""" class WriteonlyTestSerializer(serializers.ModelSerializer): - '''Serializer for testing the absence of write_only fields''' + """Serializer for testing the absence of write_only fields""" + comments = serializers.ResourceRelatedField( - many=True, - write_only=True, - queryset=Comment.objects.all() + many=True, write_only=True, queryset=Comment.objects.all() ) rating = serializers.IntegerField(write_only=True) class Meta: model = Entry - fields = ('comments', 'rating') + fields = ("comments", "rating") class WriteOnlyDummyTestViewSet(views.ReadOnlyModelViewSet): queryset = Entry.objects.all() @@ -139,8 +135,8 @@ class WriteOnlyDummyTestViewSet(views.ReadOnlyModelViewSet): rendered = render_dummy_test_serialized_view(WriteOnlyDummyTestViewSet, Entry()) result = json.loads(rendered.decode()) - assert 'rating' not in result['data']['attributes'] - assert 'relationships' not in result['data'] + assert "rating" not in result["data"]["attributes"] + assert "relationships" not in result["data"] def test_render_empty_relationship_reverse_lookup(): @@ -149,7 +145,7 @@ def test_render_empty_relationship_reverse_lookup(): class EmptyRelationshipSerializer(serializers.ModelSerializer): class Meta: model = Author - fields = ('bio', ) + fields = ("bio",) class EmptyRelationshipViewSet(views.ReadOnlyModelViewSet): queryset = Author.objects.all() @@ -157,9 +153,9 @@ class EmptyRelationshipViewSet(views.ReadOnlyModelViewSet): rendered = render_dummy_test_serialized_view(EmptyRelationshipViewSet, Author()) result = json.loads(rendered.decode()) - assert 'relationships' in result['data'] - assert 'bio' in result['data']['relationships'] - assert result['data']['relationships']['bio'] == {'data': None} + assert "relationships" in result["data"] + assert "bio" in result["data"]["relationships"] + assert result["data"]["relationships"]["bio"] == {"data": None} @pytest.mark.django_db @@ -167,32 +163,30 @@ def test_extract_relation_instance(comment): serializer = RelatedModelSerializer(instance=comment) got = JSONRenderer.extract_relation_instance( - field=serializer.fields['blog'], resource_instance=comment + field=serializer.fields["blog"], resource_instance=comment ) assert got == comment.entry.blog def test_render_serializer_as_attribute(db): # setting up - blog = Blog.objects.create(name='Some Blog', tagline="It's a blog") + blog = Blog.objects.create(name="Some Blog", tagline="It's a blog") entry = Entry.objects.create( blog=blog, - headline='headline', - body_text='body_text', + headline="headline", + body_text="body_text", pub_date=timezone.now(), mod_date=timezone.now(), n_comments=0, n_pingbacks=0, - rating=3 + rating=3, ) - author = Author.objects.create(name='some_author', email='some_author@example.org') + author = Author.objects.create(name="some_author", email="some_author@example.org") entry.authors.add(author) Comment.objects.create( - entry=entry, - body='testing one two three', - author=Author.objects.first() + entry=entry, body="testing one two three", author=Author.objects.first() ) rendered = render_dummy_test_serialized_view(AuthorWithNestedFieldsViewSet, author) @@ -209,13 +203,13 @@ def test_render_serializer_as_attribute(db): { "id": 1, "entry": { - 'headline': 'headline', - 'body_text': 'body_text', + "headline": "headline", + "body_text": "body_text", }, - "body": "testing one two three" + "body": "testing one two three", } - ] - } + ], + }, } } assert expected == result diff --git a/example/tests/unit/test_serializer_method_field.py b/example/tests/unit/test_serializer_method_field.py index 89e18295..37e74ce6 100644 --- a/example/tests/unit/test_serializer_method_field.py +++ b/example/tests/unit/test_serializer_method_field.py @@ -13,28 +13,27 @@ class BlogSerializer(serializers.ModelSerializer): class Meta: model = Blog - fields = ['one_entry'] + fields = ["one_entry"] def get_one_entry(self, instance): return Entry(id=100) serializer = BlogSerializer(instance=Blog()) - assert serializer.data['one_entry']['id'] == '100' + assert serializer.data["one_entry"]["id"] == "100" def test_method_name_custom(): class BlogSerializer(serializers.ModelSerializer): one_entry = SerializerMethodResourceRelatedField( - model=Entry, - method_name='get_custom_entry' + model=Entry, method_name="get_custom_entry" ) class Meta: model = Blog - fields = ['one_entry'] + fields = ["one_entry"] def get_custom_entry(self, instance): return Entry(id=100) serializer = BlogSerializer(instance=Blog()) - assert serializer.data['one_entry']['id'] == '100' + assert serializer.data["one_entry"]["id"] == "100" diff --git a/example/tests/unit/test_settings.py b/example/tests/unit/test_settings.py index e6b82a24..666eae4a 100644 --- a/example/tests/unit/test_settings.py +++ b/example/tests/unit/test_settings.py @@ -13,5 +13,5 @@ def test_settings_default(): def test_settings_override(settings): - settings.JSON_API_FORMAT_FIELD_NAMES = 'dasherize' - assert json_api_settings.FORMAT_FIELD_NAMES == 'dasherize' + settings.JSON_API_FORMAT_FIELD_NAMES = "dasherize" + assert json_api_settings.FORMAT_FIELD_NAMES == "dasherize" diff --git a/example/urls.py b/example/urls.py index 9b882ce5..800b9f79 100644 --- a/example/urls.py +++ b/example/urls.py @@ -19,70 +19,95 @@ EntryViewSet, NonPaginatedEntryViewSet, ProjectTypeViewset, - ProjectViewset + ProjectViewset, ) router = routers.DefaultRouter(trailing_slash=False) -router.register(r'blogs', BlogViewSet) -router.register(r'entries', EntryViewSet) -router.register(r'nopage-entries', NonPaginatedEntryViewSet, 'nopage-entry') -router.register(r'authors', AuthorViewSet) -router.register(r'comments', CommentViewSet) -router.register(r'companies', CompanyViewset) -router.register(r'projects', ProjectViewset) -router.register(r'project-types', ProjectTypeViewset) +router.register(r"blogs", BlogViewSet) +router.register(r"entries", EntryViewSet) +router.register(r"nopage-entries", NonPaginatedEntryViewSet, "nopage-entry") +router.register(r"authors", AuthorViewSet) +router.register(r"comments", CommentViewSet) +router.register(r"companies", CompanyViewset) +router.register(r"projects", ProjectViewset) +router.register(r"project-types", ProjectTypeViewset) urlpatterns = [ - url(r'^', include(router.urls)), - url(r'^entries/(?P[^/.]+)/suggested/$', - EntryViewSet.as_view({'get': 'list'}), - name='entry-suggested' - ), - url(r'entries/(?P[^/.]+)/blog$', - BlogViewSet.as_view({'get': 'retrieve'}), - name='entry-blog'), - url(r'entries/(?P[^/.]+)/comments$', - CommentViewSet.as_view({'get': 'list'}), - name='entry-comments'), - url(r'entries/(?P[^/.]+)/authors$', - AuthorViewSet.as_view({'get': 'list'}), - name='entry-authors'), - url(r'entries/(?P[^/.]+)/featured$', - EntryViewSet.as_view({'get': 'retrieve'}), - name='entry-featured'), - - url(r'^authors/(?P[^/.]+)/(?P\w+)/$', - AuthorViewSet.as_view({'get': 'retrieve_related'}), - name='author-related'), - - url(r'^entries/(?P[^/.]+)/relationships/(?P\w+)$', + url(r"^", include(router.urls)), + url( + r"^entries/(?P[^/.]+)/suggested/$", + EntryViewSet.as_view({"get": "list"}), + name="entry-suggested", + ), + url( + r"entries/(?P[^/.]+)/blog$", + BlogViewSet.as_view({"get": "retrieve"}), + name="entry-blog", + ), + url( + r"entries/(?P[^/.]+)/comments$", + CommentViewSet.as_view({"get": "list"}), + name="entry-comments", + ), + url( + r"entries/(?P[^/.]+)/authors$", + AuthorViewSet.as_view({"get": "list"}), + name="entry-authors", + ), + url( + r"entries/(?P[^/.]+)/featured$", + EntryViewSet.as_view({"get": "retrieve"}), + name="entry-featured", + ), + url( + r"^authors/(?P[^/.]+)/(?P\w+)/$", + AuthorViewSet.as_view({"get": "retrieve_related"}), + name="author-related", + ), + url( + r"^entries/(?P[^/.]+)/relationships/(?P\w+)$", EntryRelationshipView.as_view(), - name='entry-relationships'), - url(r'^blogs/(?P[^/.]+)/relationships/(?P\w+)$', + name="entry-relationships", + ), + url( + r"^blogs/(?P[^/.]+)/relationships/(?P\w+)$", BlogRelationshipView.as_view(), - name='blog-relationships'), - url(r'^comments/(?P[^/.]+)/relationships/(?P\w+)$', + name="blog-relationships", + ), + url( + r"^comments/(?P[^/.]+)/relationships/(?P\w+)$", CommentRelationshipView.as_view(), - name='comment-relationships'), - url(r'^authors/(?P[^/.]+)/relationships/(?P\w+)$', + name="comment-relationships", + ), + url( + r"^authors/(?P[^/.]+)/relationships/(?P\w+)$", AuthorRelationshipView.as_view(), - name='author-relationships'), - path('openapi', get_schema_view( - title="Example API", - description="API for all things …", - version="1.0.0", - generator_class=SchemaGenerator - ), name='openapi-schema'), - path('swagger-ui/', TemplateView.as_view( - template_name='swagger-ui.html', - extra_context={'schema_url': 'openapi-schema'} - ), name='swagger-ui'), + name="author-relationships", + ), + path( + "openapi", + get_schema_view( + title="Example API", + description="API for all things …", + version="1.0.0", + generator_class=SchemaGenerator, + ), + name="openapi-schema", + ), + path( + "swagger-ui/", + TemplateView.as_view( + template_name="swagger-ui.html", + extra_context={"schema_url": "openapi-schema"}, + ), + name="swagger-ui", + ), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ - url(r'^__debug__/', include(debug_toolbar.urls)), + url(r"^__debug__/", include(debug_toolbar.urls)), ] + urlpatterns diff --git a/example/urls_test.py b/example/urls_test.py index 44ae6f58..7e875936 100644 --- a/example/urls_test.py +++ b/example/urls_test.py @@ -18,73 +18,90 @@ NoFiltersetEntryViewSet, NonPaginatedEntryViewSet, ProjectTypeViewset, - ProjectViewset + ProjectViewset, ) router = routers.DefaultRouter(trailing_slash=False) -router.register(r'blogs', BlogViewSet) +router.register(r"blogs", BlogViewSet) # router to test default DRF blog functionalities -router.register(r'drf-blogs', DRFBlogViewSet, 'drf-entry-blog') -router.register(r'entries', EntryViewSet) +router.register(r"drf-blogs", DRFBlogViewSet, "drf-entry-blog") +router.register(r"entries", EntryViewSet) # these "flavors" of entries are used for various tests: -router.register(r'nopage-entries', NonPaginatedEntryViewSet, 'nopage-entry') -router.register(r'filterset-entries', FiltersetEntryViewSet, 'filterset-entry') -router.register(r'nofilterset-entries', NoFiltersetEntryViewSet, 'nofilterset-entry') -router.register(r'authors', AuthorViewSet) -router.register(r'comments', CommentViewSet) -router.register(r'companies', CompanyViewset) -router.register(r'projects', ProjectViewset) -router.register(r'project-types', ProjectTypeViewset) +router.register(r"nopage-entries", NonPaginatedEntryViewSet, "nopage-entry") +router.register(r"filterset-entries", FiltersetEntryViewSet, "filterset-entry") +router.register(r"nofilterset-entries", NoFiltersetEntryViewSet, "nofilterset-entry") +router.register(r"authors", AuthorViewSet) +router.register(r"comments", CommentViewSet) +router.register(r"companies", CompanyViewset) +router.register(r"projects", ProjectViewset) +router.register(r"project-types", ProjectTypeViewset) # for the old tests -router.register(r'identities', Identity) +router.register(r"identities", Identity) urlpatterns = [ # old tests - re_path(r'identities/default/(?P\d+)$', - GenericIdentity.as_view(), name='user-default'), - - - re_path(r'^entries/(?P[^/.]+)/blog$', - BlogViewSet.as_view({'get': 'retrieve'}), - name='entry-blog' - ), - re_path(r'^entries/(?P[^/.]+)/comments$', - CommentViewSet.as_view({'get': 'list'}), - name='entry-comments' - ), - re_path(r'^entries/(?P[^/.]+)/suggested/$', - EntryViewSet.as_view({'get': 'list'}), - name='entry-suggested' - ), - re_path(r'^drf-entries/(?P[^/.]+)/suggested/$', - DRFEntryViewSet.as_view({'get': 'list'}), - name='drf-entry-suggested' - ), - re_path(r'entries/(?P[^/.]+)/authors$', - AuthorViewSet.as_view({'get': 'list'}), - name='entry-authors'), - re_path(r'entries/(?P[^/.]+)/featured$', - EntryViewSet.as_view({'get': 'retrieve'}), - name='entry-featured'), - - re_path(r'^authors/(?P[^/.]+)/(?P\w+)/$', - AuthorViewSet.as_view({'get': 'retrieve_related'}), - name='author-related'), - - re_path(r'^entries/(?P[^/.]+)/relationships/(?P\w+)$', - EntryRelationshipView.as_view(), - name='entry-relationships'), - re_path(r'^blogs/(?P[^/.]+)/relationships/(?P\w+)$', - BlogRelationshipView.as_view(), - name='blog-relationships'), - re_path(r'^comments/(?P[^/.]+)/relationships/(?P\w+)$', - CommentRelationshipView.as_view(), - name='comment-relationships'), - re_path(r'^authors/(?P[^/.]+)/relationships/(?P\w+)$', - AuthorRelationshipView.as_view(), - name='author-relationships'), + re_path( + r"identities/default/(?P\d+)$", + GenericIdentity.as_view(), + name="user-default", + ), + re_path( + r"^entries/(?P[^/.]+)/blog$", + BlogViewSet.as_view({"get": "retrieve"}), + name="entry-blog", + ), + re_path( + r"^entries/(?P[^/.]+)/comments$", + CommentViewSet.as_view({"get": "list"}), + name="entry-comments", + ), + re_path( + r"^entries/(?P[^/.]+)/suggested/$", + EntryViewSet.as_view({"get": "list"}), + name="entry-suggested", + ), + re_path( + r"^drf-entries/(?P[^/.]+)/suggested/$", + DRFEntryViewSet.as_view({"get": "list"}), + name="drf-entry-suggested", + ), + re_path( + r"entries/(?P[^/.]+)/authors$", + AuthorViewSet.as_view({"get": "list"}), + name="entry-authors", + ), + re_path( + r"entries/(?P[^/.]+)/featured$", + EntryViewSet.as_view({"get": "retrieve"}), + name="entry-featured", + ), + re_path( + r"^authors/(?P[^/.]+)/(?P\w+)/$", + AuthorViewSet.as_view({"get": "retrieve_related"}), + name="author-related", + ), + re_path( + r"^entries/(?P[^/.]+)/relationships/(?P\w+)$", + EntryRelationshipView.as_view(), + name="entry-relationships", + ), + re_path( + r"^blogs/(?P[^/.]+)/relationships/(?P\w+)$", + BlogRelationshipView.as_view(), + name="blog-relationships", + ), + re_path( + r"^comments/(?P[^/.]+)/relationships/(?P\w+)$", + CommentRelationshipView.as_view(), + name="comment-relationships", + ), + re_path( + r"^authors/(?P[^/.]+)/relationships/(?P\w+)$", + AuthorRelationshipView.as_view(), + name="author-relationships", + ), ] urlpatterns += router.urls diff --git a/example/utils.py b/example/utils.py index 65403038..1c9ef459 100644 --- a/example/utils.py +++ b/example/utils.py @@ -6,7 +6,7 @@ class BrowsableAPIRendererWithoutForms(BrowsableAPIRenderer): def get_context(self, *args, **kwargs): ctx = super().get_context(*args, **kwargs) - ctx['display_edit_forms'] = False + ctx["display_edit_forms"] = False return ctx def show_form_for_method(self, view, method, request, obj): diff --git a/example/views.py b/example/views.py index 99a54193..65bcb301 100644 --- a/example/views.py +++ b/example/views.py @@ -8,7 +8,10 @@ import rest_framework_json_api.parsers import rest_framework_json_api.renderers from rest_framework_json_api.django_filters import DjangoFilterBackend -from rest_framework_json_api.filters import OrderingFilter, QueryParameterValidationFilter +from rest_framework_json_api.filters import ( + OrderingFilter, + QueryParameterValidationFilter, +) from rest_framework_json_api.pagination import JsonApiPageNumberPagination from rest_framework_json_api.utils import format_drf_errors from rest_framework_json_api.views import ModelViewSet, RelationshipView @@ -25,7 +28,7 @@ EntryDRFSerializers, EntrySerializer, ProjectSerializer, - ProjectTypeSerializer + ProjectTypeSerializer, ) HTTP_422_UNPROCESSABLE_ENTITY = 422 @@ -36,7 +39,7 @@ class BlogViewSet(ModelViewSet): serializer_class = BlogSerializer def get_object(self): - entry_pk = self.kwargs.get('entry_pk', None) + entry_pk = self.kwargs.get("entry_pk", None) if entry_pk is not None: return Entry.objects.get(id=entry_pk).blog @@ -46,7 +49,7 @@ def get_object(self): class DRFBlogViewSet(ModelViewSet): queryset = Blog.objects.all() serializer_class = BlogDRFSerializer - lookup_url_kwarg = 'entry_pk' + lookup_url_kwarg = "entry_pk" def get_object(self): entry_pk = self.kwargs.get(self.lookup_url_kwarg, None) @@ -62,6 +65,7 @@ class JsonApiViewSet(ModelViewSet): within a class. It allows using DRF-jsonapi alongside vanilla DRF API views. """ + parser_classes = [ rest_framework_json_api.parsers.JSONParser, rest_framework.parsers.FormParser, @@ -92,14 +96,14 @@ class BlogCustomViewSet(JsonApiViewSet): class EntryViewSet(ModelViewSet): queryset = Entry.objects.all() - resource_name = 'posts' + resource_name = "posts" def get_serializer_class(self): return EntrySerializer def get_object(self): # Handle featured - entry_pk = self.kwargs.get('entry_pk', None) + entry_pk = self.kwargs.get("entry_pk", None) if entry_pk is not None: return Entry.objects.exclude(pk=entry_pk).first() @@ -109,7 +113,7 @@ def get_object(self): class DRFEntryViewSet(ModelViewSet): queryset = Entry.objects.all() serializer_class = EntryDRFSerializers - lookup_url_kwarg = 'entry_pk' + lookup_url_kwarg = "entry_pk" def get_object(self): # Handle featured @@ -128,30 +132,42 @@ class NonPaginatedEntryViewSet(EntryViewSet): pagination_class = NoPagination # override the default filter backends in order to test QueryParameterValidationFilter without # breaking older usage of non-standard query params like `page_size`. - filter_backends = (QueryParameterValidationFilter, OrderingFilter, - DjangoFilterBackend, SearchFilter) - ordering_fields = ('headline', 'body_text', 'blog__name', 'blog__id') - rels = ('exact', 'iexact', - 'contains', 'icontains', - 'gt', 'gte', 'lt', 'lte', - 'in', 'regex', 'isnull',) + filter_backends = ( + QueryParameterValidationFilter, + OrderingFilter, + DjangoFilterBackend, + SearchFilter, + ) + ordering_fields = ("headline", "body_text", "blog__name", "blog__id") + rels = ( + "exact", + "iexact", + "contains", + "icontains", + "gt", + "gte", + "lt", + "lte", + "in", + "regex", + "isnull", + ) filterset_fields = { - 'id': ('exact', 'in'), - 'headline': rels, - 'body_text': rels, - 'blog__name': rels, - 'blog__tagline': rels, + "id": ("exact", "in"), + "headline": rels, + "body_text": rels, + "blog__name": rels, + "blog__tagline": rels, } - search_fields = ('headline', 'body_text', 'blog__name', 'blog__tagline') + search_fields = ("headline", "body_text", "blog__name", "blog__tagline") class EntryFilter(filters.FilterSet): - bname = filters.CharFilter(field_name="blog__name", - lookup_expr="exact") + bname = filters.CharFilter(field_name="blog__name", lookup_expr="exact") authors__id = filters.ModelMultipleChoiceFilter( - field_name='authors', - to_field_name='id', + field_name="authors", + to_field_name="id", conjoined=True, # to "and" the ids queryset=Author.objects.all(), ) @@ -159,10 +175,10 @@ class EntryFilter(filters.FilterSet): class Meta: model = Entry fields = { - 'id': ('exact',), - 'headline': ('exact',), - 'body_text': ('exact',), - 'authors__id': ('in',), + "id": ("exact",), + "headline": ("exact",), + "body_text": ("exact",), + "authors__id": ("in",), } @@ -170,16 +186,21 @@ class FiltersetEntryViewSet(EntryViewSet): """ like above but use filterset_class instead of filterset_fields """ + pagination_class = NoPagination filterset_fields = None filterset_class = EntryFilter - filter_backends = (QueryParameterValidationFilter, DjangoFilterBackend,) + filter_backends = ( + QueryParameterValidationFilter, + DjangoFilterBackend, + ) class NoFiltersetEntryViewSet(EntryViewSet): """ like above but no filtersets """ + pagination_class = NoPagination filterset_fields = None filterset_class = None @@ -189,7 +210,8 @@ class AuthorViewSet(ModelViewSet): queryset = Author.objects.all() serializer_classes = { "list": AuthorListSerializer, - "retrieve": AuthorDetailSerializer} + "retrieve": AuthorDetailSerializer, + } serializer_class = AuthorSerializer # fallback def get_serializer_class(self): @@ -202,16 +224,14 @@ def get_serializer_class(self): class CommentViewSet(ModelViewSet): queryset = Comment.objects.all() serializer_class = CommentSerializer - select_for_includes = { - 'writer': ['author__bio'] - } + select_for_includes = {"writer": ["author__bio"]} prefetch_for_includes = { - '__all__': [], - 'author': ['author__bio', 'author__entries'], + "__all__": [], + "author": ["author__bio", "author__entries"], } def get_queryset(self, *args, **kwargs): - entry_pk = self.kwargs.get('entry_pk', None) + entry_pk = self.kwargs.get("entry_pk", None) if entry_pk is not None: return self.queryset.filter(entry_id=entry_pk) @@ -224,7 +244,7 @@ class CompanyViewset(ModelViewSet): class ProjectViewset(ModelViewSet): - queryset = Project.objects.all().order_by('pk') + queryset = Project.objects.all().order_by("pk") serializer_class = ProjectSerializer @@ -247,4 +267,4 @@ class CommentRelationshipView(RelationshipView): class AuthorRelationshipView(RelationshipView): queryset = Author.objects.all() - self_link_view_name = 'author-relationships' + self_link_view_name = "author-relationships" diff --git a/rest_framework_json_api/__init__.py b/rest_framework_json_api/__init__.py index f059f020..4c4f115d 100644 --- a/rest_framework_json_api/__init__.py +++ b/rest_framework_json_api/__init__.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- -__title__ = 'djangorestframework-jsonapi' -__version__ = '4.0.0' -__author__ = '' -__license__ = 'BSD' -__copyright__ = '' +__title__ = "djangorestframework-jsonapi" +__version__ = "4.0.0" +__author__ = "" +__license__ = "BSD" +__copyright__ = "" # Version synonym VERSION = __version__ diff --git a/rest_framework_json_api/django_filters/backends.py b/rest_framework_json_api/django_filters/backends.py index 814a79f3..bb24756c 100644 --- a/rest_framework_json_api/django_filters/backends.py +++ b/rest_framework_json_api/django_filters/backends.py @@ -54,6 +54,7 @@ class DjangoFilterBackend(DjangoFilterBackend): keyword. The default is "search" unless overriden but it's used here just to make sure we don't complain about it being an invalid filter. """ + search_param = api_settings.SEARCH_PARAM # Make this regex check for 'filter' as well as 'filter[...]' @@ -63,7 +64,9 @@ class DjangoFilterBackend(DjangoFilterBackend): # regex `\w` matches [a-zA-Z0-9_]. # TODO: U+0080 and above allowed but not recommended. Leave them out for now.e # Also, ' ' (space) is allowed within a member name but not recommended. - filter_regex = re.compile(r'^filter(?P\[?)(?P[\w\.\-]*)(?P\]?$)') + filter_regex = re.compile( + r"^filter(?P\[?)(?P[\w\.\-]*)(?P\]?$)" + ) def _validate_filter(self, keys, filterset_class): """ @@ -74,7 +77,7 @@ def _validate_filter(self, keys, filterset_class): :raises ValidationError: if key not in FilterSet keys or no FilterSet. """ for k in keys: - if ((not filterset_class) or (k not in filterset_class.base_filters)): + if (not filterset_class) or (k not in filterset_class.base_filters): raise ValidationError("invalid filter[{}]".format(k)) def get_filterset(self, request, queryset, view): @@ -86,7 +89,7 @@ def get_filterset(self, request, queryset, view): # TODO: .base_filters vs. .filters attr (not always present) filterset_class = self.get_filterset_class(view, queryset) kwargs = self.get_filterset_kwargs(request, queryset, view) - self._validate_filter(kwargs.pop('filter_keys'), filterset_class) + self._validate_filter(kwargs.pop("filter_keys"), filterset_class) if filterset_class is None: return None return filterset_class(**kwargs) @@ -103,24 +106,29 @@ def get_filterset_kwargs(self, request, queryset, view): data = request.query_params.copy() for qp, val in request.query_params.lists(): m = self.filter_regex.match(qp) - if m and (not m.groupdict()['assoc'] or - m.groupdict()['ldelim'] != '[' or m.groupdict()['rdelim'] != ']'): + if m and ( + not m.groupdict()["assoc"] + or m.groupdict()["ldelim"] != "[" + or m.groupdict()["rdelim"] != "]" + ): raise ValidationError("invalid query parameter: {}".format(qp)) if m and qp != self.search_param: if not all(val): - raise ValidationError("missing value for query parameter {}".format(qp)) + raise ValidationError( + "missing value for query parameter {}".format(qp) + ) # convert jsonapi relationship path to Django ORM's __ notation - key = m.groupdict()['assoc'].replace('.', '__') + key = m.groupdict()["assoc"].replace(".", "__") # undo JSON_API_FORMAT_FIELD_NAMES conversion: - key = format_value(key, 'underscore') + key = format_value(key, "underscore") data.setlist(key, val) filter_keys.append(key) del data[qp] return { - 'data': data, - 'queryset': queryset, - 'request': request, - 'filter_keys': filter_keys, + "data": data, + "queryset": queryset, + "request": request, + "filter_keys": filter_keys, } def get_schema_operation_parameters(self, view): @@ -133,6 +141,6 @@ def get_schema_operation_parameters(self, view): """ result = super(DjangoFilterBackend, self).get_schema_operation_parameters(view) for res in result: - if 'name' in res: - res['name'] = 'filter[{}]'.format(res['name']).replace('__', '.') + if "name" in res: + res["name"] = "filter[{}]".format(res["name"]).replace("__", ".") return result diff --git a/rest_framework_json_api/exceptions.py b/rest_framework_json_api/exceptions.py index 938a0c77..05f56756 100644 --- a/rest_framework_json_api/exceptions.py +++ b/rest_framework_json_api/exceptions.py @@ -8,7 +8,8 @@ def rendered_with_json_api(view): from rest_framework_json_api.renderers import JSONRenderer - for renderer_class in getattr(view, 'renderer_classes', []): + + for renderer_class in getattr(view, "renderer_classes", []): if issubclass(renderer_class, JSONRenderer): return True return False @@ -29,7 +30,7 @@ def exception_handler(exc, context): return response # Use regular DRF format if not rendered by DRF JSON API and not uniform - is_json_api_view = rendered_with_json_api(context['view']) + is_json_api_view = rendered_with_json_api(context["view"]) is_uniform = json_api_settings.UNIFORM_EXCEPTIONS if not is_json_api_view and not is_uniform: return response @@ -46,4 +47,4 @@ def exception_handler(exc, context): class Conflict(exceptions.APIException): status_code = status.HTTP_409_CONFLICT - default_detail = _('Conflict.') + default_detail = _("Conflict.") diff --git a/rest_framework_json_api/filters.py b/rest_framework_json_api/filters.py index eafcdb76..06f7667e 100644 --- a/rest_framework_json_api/filters.py +++ b/rest_framework_json_api/filters.py @@ -18,9 +18,10 @@ class OrderingFilter(OrderingFilter): Also applies DJA format_value() to convert (e.g. camelcase) to underscore. (See JSON_API_FORMAT_FIELD_NAMES in docs/usage.md) """ + #: override :py:attr:`rest_framework.filters.OrderingFilter.ordering_param` #: with JSON:API-compliant query parameter name. - ordering_param = 'sort' + ordering_param = "sort" def remove_invalid_fields(self, queryset, fields, view, request): """ @@ -31,16 +32,21 @@ def remove_invalid_fields(self, queryset, fields, view, request): :raises ValidationError: if a sort field is invalid. """ valid_fields = [ - item[0] for item in self.get_valid_fields(queryset, view, - {'request': request}) + item[0] + for item in self.get_valid_fields(queryset, view, {"request": request}) ] bad_terms = [ - term for term in fields - if format_value(term.replace(".", "__").lstrip('-'), "underscore") not in valid_fields + term + for term in fields + if format_value(term.replace(".", "__").lstrip("-"), "underscore") + not in valid_fields ] if bad_terms: - raise ValidationError('invalid sort parameter{}: {}'.format( - ('s' if len(bad_terms) > 1 else ''), ','.join(bad_terms))) + raise ValidationError( + "invalid sort parameter{}: {}".format( + ("s" if len(bad_terms) > 1 else ""), ",".join(bad_terms) + ) + ) # this looks like it duplicates code above, but we want the ValidationError to report # the actual parameter supplied while we want the fields passed to the super() to # be correctly rewritten. @@ -48,14 +54,16 @@ def remove_invalid_fields(self, queryset, fields, view, request): underscore_fields = [] for item in fields: item_rewritten = item.replace(".", "__") - if item_rewritten.startswith('-'): + if item_rewritten.startswith("-"): underscore_fields.append( - '-' + format_value(item_rewritten.lstrip('-'), "underscore")) + "-" + format_value(item_rewritten.lstrip("-"), "underscore") + ) else: underscore_fields.append(format_value(item_rewritten, "underscore")) return super(OrderingFilter, self).remove_invalid_fields( - queryset, underscore_fields, view, request) + queryset, underscore_fields, view, request + ) class QueryParameterValidationFilter(BaseFilterBackend): @@ -68,9 +76,12 @@ class QueryParameterValidationFilter(BaseFilterBackend): override :py:attr:`query_regex` adding the new parameters. Make sure to comply with the rules at http://jsonapi.org/format/#query-parameters. """ + #: compiled regex that matches the allowed http://jsonapi.org/format/#query-parameters: #: `sort` and `include` stand alone; `filter`, `fields`, and `page` have []'s - query_regex = re.compile(r'^(sort|include)$|^(?Pfilter|fields|page)(\[[\w\.\-]+\])?$') + query_regex = re.compile( + r"^(sort|include)$|^(?Pfilter|fields|page)(\[[\w\.\-]+\])?$" + ) def validate_query_params(self, request): """ @@ -84,10 +95,14 @@ def validate_query_params(self, request): for qp in request.query_params.keys(): m = self.query_regex.match(qp) if not m: - raise ValidationError('invalid query parameter: {}'.format(qp)) - if not m.group('type') == 'filter' and len(request.query_params.getlist(qp)) > 1: + raise ValidationError("invalid query parameter: {}".format(qp)) + if ( + not m.group("type") == "filter" + and len(request.query_params.getlist(qp)) > 1 + ): raise ValidationError( - 'repeated query parameter not allowed: {}'.format(qp)) + "repeated query parameter not allowed: {}".format(qp) + ) def filter_queryset(self, request, queryset, view): """ diff --git a/rest_framework_json_api/metadata.py b/rest_framework_json_api/metadata.py index ef3356fe..a48af532 100644 --- a/rest_framework_json_api/metadata.py +++ b/rest_framework_json_api/metadata.py @@ -17,56 +17,65 @@ class JSONAPIMetadata(SimpleMetadata): There are not any formalized standards for `OPTIONS` responses for us to base this on. """ - type_lookup = ClassLookupDict({ - serializers.Field: 'GenericField', - serializers.RelatedField: 'Relationship', - serializers.BooleanField: 'Boolean', - serializers.NullBooleanField: 'Boolean', - serializers.CharField: 'String', - serializers.URLField: 'URL', - serializers.EmailField: 'Email', - serializers.RegexField: 'Regex', - serializers.SlugField: 'Slug', - serializers.IntegerField: 'Integer', - serializers.FloatField: 'Float', - serializers.DecimalField: 'Decimal', - serializers.DateField: 'Date', - serializers.DateTimeField: 'DateTime', - serializers.TimeField: 'Time', - serializers.ChoiceField: 'Choice', - serializers.MultipleChoiceField: 'MultipleChoice', - serializers.FileField: 'File', - serializers.ImageField: 'Image', - serializers.ListField: 'List', - serializers.DictField: 'Dict', - serializers.Serializer: 'Serializer', - }) + + type_lookup = ClassLookupDict( + { + serializers.Field: "GenericField", + serializers.RelatedField: "Relationship", + serializers.BooleanField: "Boolean", + serializers.NullBooleanField: "Boolean", + serializers.CharField: "String", + serializers.URLField: "URL", + serializers.EmailField: "Email", + serializers.RegexField: "Regex", + serializers.SlugField: "Slug", + serializers.IntegerField: "Integer", + serializers.FloatField: "Float", + serializers.DecimalField: "Decimal", + serializers.DateField: "Date", + serializers.DateTimeField: "DateTime", + serializers.TimeField: "Time", + serializers.ChoiceField: "Choice", + serializers.MultipleChoiceField: "MultipleChoice", + serializers.FileField: "File", + serializers.ImageField: "Image", + serializers.ListField: "List", + serializers.DictField: "Dict", + serializers.Serializer: "Serializer", + } + ) try: - relation_type_lookup = ClassLookupDict({ - related.ManyToManyDescriptor: 'ManyToMany', - related.ReverseManyToOneDescriptor: 'OneToMany', - related.ForwardManyToOneDescriptor: 'ManyToOne', - }) + relation_type_lookup = ClassLookupDict( + { + related.ManyToManyDescriptor: "ManyToMany", + related.ReverseManyToOneDescriptor: "OneToMany", + related.ForwardManyToOneDescriptor: "ManyToOne", + } + ) except AttributeError: - relation_type_lookup = ClassLookupDict({ - related.ManyRelatedObjectsDescriptor: 'ManyToMany', - related.ReverseManyRelatedObjectsDescriptor: 'ManyToMany', - related.ForeignRelatedObjectsDescriptor: 'OneToMany', - related.ReverseSingleRelatedObjectDescriptor: 'ManyToOne', - }) + relation_type_lookup = ClassLookupDict( + { + related.ManyRelatedObjectsDescriptor: "ManyToMany", + related.ReverseManyRelatedObjectsDescriptor: "ManyToMany", + related.ForeignRelatedObjectsDescriptor: "OneToMany", + related.ReverseSingleRelatedObjectDescriptor: "ManyToOne", + } + ) def determine_metadata(self, request, view): metadata = OrderedDict() - metadata['name'] = view.get_view_name() - metadata['description'] = view.get_view_description() - metadata['renders'] = [renderer.media_type for renderer in view.renderer_classes] - metadata['parses'] = [parser.media_type for parser in view.parser_classes] - metadata['allowed_methods'] = view.allowed_methods - if hasattr(view, 'get_serializer'): + metadata["name"] = view.get_view_name() + metadata["description"] = view.get_view_description() + metadata["renders"] = [ + renderer.media_type for renderer in view.renderer_classes + ] + metadata["parses"] = [parser.media_type for parser in view.parser_classes] + metadata["allowed_methods"] = view.allowed_methods + if hasattr(view, "get_serializer"): actions = self.determine_actions(request, view) if actions: - metadata['actions'] = actions + metadata["actions"] = actions return metadata def get_serializer_info(self, serializer): @@ -74,7 +83,7 @@ def get_serializer_info(self, serializer): Given an instance of a serializer, return a dictionary of metadata about its fields. """ - if hasattr(serializer, 'child'): + if hasattr(serializer, "child"): # If this is a `ListSerializer` then we want to examine the # underlying child serializer instance instead. serializer = serializer.child @@ -82,10 +91,12 @@ def get_serializer_info(self, serializer): # Remove the URL field if present serializer.fields.pop(api_settings.URL_FIELD_NAME, None) - return OrderedDict([ - (format_value(field_name), self.get_field_info(field)) - for field_name, field in serializer.fields.items() - ]) + return OrderedDict( + [ + (format_value(field_name), self.get_field_info(field)) + for field_name, field in serializer.fields.items() + ] + ) def get_field_info(self, field): """ @@ -96,13 +107,13 @@ def get_field_info(self, field): serializer = field.parent if isinstance(field, serializers.ManyRelatedField): - field_info['type'] = self.type_lookup[field.child_relation] + field_info["type"] = self.type_lookup[field.child_relation] else: - field_info['type'] = self.type_lookup[field] + field_info["type"] = self.type_lookup[field] try: - serializer_model = getattr(serializer.Meta, 'model') - field_info['relationship_type'] = self.relation_type_lookup[ + serializer_model = getattr(serializer.Meta, "model") + field_info["relationship_type"] = self.relation_type_lookup[ getattr(serializer_model, field.field_name) ] except KeyError: @@ -110,40 +121,51 @@ def get_field_info(self, field): except AttributeError: pass else: - field_info['relationship_resource'] = get_related_resource_type(field) + field_info["relationship_resource"] = get_related_resource_type(field) - field_info['required'] = getattr(field, 'required', False) + field_info["required"] = getattr(field, "required", False) attrs = [ - 'read_only', 'write_only', 'label', 'help_text', - 'min_length', 'max_length', - 'min_value', 'max_value', 'initial' + "read_only", + "write_only", + "label", + "help_text", + "min_length", + "max_length", + "min_value", + "max_value", + "initial", ] for attr in attrs: value = getattr(field, attr, None) - if value is not None and value != '': + if value is not None and value != "": field_info[attr] = force_str(value, strings_only=True) - if getattr(field, 'child', None): - field_info['child'] = self.get_field_info(field.child) - elif getattr(field, 'fields', None): - field_info['children'] = self.get_serializer_info(field) + if getattr(field, "child", None): + field_info["child"] = self.get_field_info(field.child) + elif getattr(field, "fields", None): + field_info["children"] = self.get_serializer_info(field) if ( - not field_info.get('read_only') and - not field_info.get('relationship_resource') and - hasattr(field, 'choices') + not field_info.get("read_only") + and not field_info.get("relationship_resource") + and hasattr(field, "choices") ): - field_info['choices'] = [ + field_info["choices"] = [ { - 'value': choice_value, - 'display_name': force_str(choice_name, strings_only=True) + "value": choice_value, + "display_name": force_str(choice_name, strings_only=True), } for choice_value, choice_name in field.choices.items() ] - if hasattr(serializer, 'included_serializers') and 'relationship_resource' in field_info: - field_info['allows_include'] = field.field_name in serializer.included_serializers + if ( + hasattr(serializer, "included_serializers") + and "relationship_resource" in field_info + ): + field_info["allows_include"] = ( + field.field_name in serializer.included_serializers + ) return field_info diff --git a/rest_framework_json_api/pagination.py b/rest_framework_json_api/pagination.py index 2e57b937..468f684c 100644 --- a/rest_framework_json_api/pagination.py +++ b/rest_framework_json_api/pagination.py @@ -12,14 +12,15 @@ class JsonApiPageNumberPagination(PageNumberPagination): """ A json-api compatible pagination format. """ - page_query_param = 'page[number]' - page_size_query_param = 'page[size]' + + page_query_param = "page[number]" + page_size_query_param = "page[size]" max_page_size = 100 def build_link(self, index): if not index: return None - url = self.request and self.request.build_absolute_uri() or '' + url = self.request and self.request.build_absolute_uri() or "" return replace_query_param(url, self.page_query_param, index) def get_paginated_response(self, data): @@ -31,22 +32,28 @@ def get_paginated_response(self, data): if self.page.has_previous(): previous = self.page.previous_page_number() - return Response({ - 'results': data, - 'meta': { - 'pagination': OrderedDict([ - ('page', self.page.number), - ('pages', self.page.paginator.num_pages), - ('count', self.page.paginator.count), - ]) - }, - 'links': OrderedDict([ - ('first', self.build_link(1)), - ('last', self.build_link(self.page.paginator.num_pages)), - ('next', self.build_link(next)), - ('prev', self.build_link(previous)) - ]) - }) + return Response( + { + "results": data, + "meta": { + "pagination": OrderedDict( + [ + ("page", self.page.number), + ("pages", self.page.paginator.num_pages), + ("count", self.page.paginator.count), + ] + ) + }, + "links": OrderedDict( + [ + ("first", self.build_link(1)), + ("last", self.build_link(self.page.paginator.num_pages)), + ("next", self.build_link(next)), + ("prev", self.build_link(previous)), + ] + ), + } + ) class JsonApiLimitOffsetPagination(LimitOffsetPagination): @@ -59,8 +66,9 @@ class JsonApiLimitOffsetPagination(LimitOffsetPagination): http://api.example.org/accounts/?page[offset]=400&page[limit]=100 """ - limit_query_param = 'page[limit]' - offset_query_param = 'page[offset]' + + limit_query_param = "page[limit]" + offset_query_param = "page[offset]" max_limit = 100 def get_last_link(self): @@ -85,19 +93,25 @@ def get_first_link(self): return remove_query_param(url, self.offset_query_param) def get_paginated_response(self, data): - return Response({ - 'results': data, - 'meta': { - 'pagination': OrderedDict([ - ('count', self.count), - ('limit', self.limit), - ('offset', self.offset), - ]) - }, - 'links': OrderedDict([ - ('first', self.get_first_link()), - ('last', self.get_last_link()), - ('next', self.get_next_link()), - ('prev', self.get_previous_link()) - ]) - }) + return Response( + { + "results": data, + "meta": { + "pagination": OrderedDict( + [ + ("count", self.count), + ("limit", self.limit), + ("offset", self.offset), + ] + ) + }, + "links": OrderedDict( + [ + ("first", self.get_first_link()), + ("last", self.get_last_link()), + ("next", self.get_next_link()), + ("prev", self.get_previous_link()), + ] + ), + } + ) diff --git a/rest_framework_json_api/parsers.py b/rest_framework_json_api/parsers.py index 88c4f522..e3315334 100644 --- a/rest_framework_json_api/parsers.py +++ b/rest_framework_json_api/parsers.py @@ -31,41 +31,44 @@ class JSONParser(parsers.JSONParser): We extract the attributes so that DRF serializers can work as normal. """ - media_type = 'application/vnd.api+json' + + media_type = "application/vnd.api+json" renderer_class = renderers.JSONRenderer @staticmethod def parse_attributes(data): - attributes = data.get('attributes') + attributes = data.get("attributes") uses_format_translation = json_api_settings.FORMAT_FIELD_NAMES if not attributes: return dict() elif uses_format_translation: # convert back to python/rest_framework's preferred underscore format - return utils.format_field_names(attributes, 'underscore') + return utils.format_field_names(attributes, "underscore") else: return attributes @staticmethod def parse_relationships(data): uses_format_translation = json_api_settings.FORMAT_FIELD_NAMES - relationships = data.get('relationships') + relationships = data.get("relationships") if not relationships: relationships = dict() elif uses_format_translation: # convert back to python/rest_framework's preferred underscore format - relationships = utils.format_field_names(relationships, 'underscore') + relationships = utils.format_field_names(relationships, "underscore") # Parse the relationships parsed_relationships = dict() for field_name, field_data in relationships.items(): - field_data = field_data.get('data') + field_data = field_data.get("data") if isinstance(field_data, dict) or field_data is None: parsed_relationships[field_name] = field_data elif isinstance(field_data, list): - parsed_relationships[field_name] = list(relation for relation in field_data) + parsed_relationships[field_name] = list( + relation for relation in field_data + ) return parsed_relationships @staticmethod @@ -75,9 +78,9 @@ def parse_metadata(result): it reads the `meta` content in the request body and returns it in a dictionary with a `_meta` top level key. """ - metadata = result.get('meta') + metadata = result.get("meta") if metadata: - return {'_meta': metadata} + return {"_meta": metadata} else: return {} @@ -89,79 +92,92 @@ def parse(self, stream, media_type=None, parser_context=None): stream, media_type=media_type, parser_context=parser_context ) - if not isinstance(result, dict) or 'data' not in result: - raise ParseError('Received document does not contain primary data') + if not isinstance(result, dict) or "data" not in result: + raise ParseError("Received document does not contain primary data") - data = result.get('data') - view = parser_context['view'] + data = result.get("data") + view = parser_context["view"] from rest_framework_json_api.views import RelationshipView + if isinstance(view, RelationshipView): # We skip parsing the object as JSONAPI Resource Identifier Object and not a regular # Resource Object if isinstance(data, list): for resource_identifier_object in data: if not ( - resource_identifier_object.get('id') and - resource_identifier_object.get('type') + resource_identifier_object.get("id") + and resource_identifier_object.get("type") ): raise ParseError( - 'Received data contains one or more malformed JSONAPI ' - 'Resource Identifier Object(s)' + "Received data contains one or more malformed JSONAPI " + "Resource Identifier Object(s)" ) - elif not (data.get('id') and data.get('type')): - raise ParseError('Received data is not a valid JSONAPI Resource Identifier Object') + elif not (data.get("id") and data.get("type")): + raise ParseError( + "Received data is not a valid JSONAPI Resource Identifier Object" + ) return data - request = parser_context.get('request') + request = parser_context.get("request") # Sanity check if not isinstance(data, dict): - raise ParseError('Received data is not a valid JSONAPI Resource Identifier Object') + raise ParseError( + "Received data is not a valid JSONAPI Resource Identifier Object" + ) # Check for inconsistencies - if request.method in ('PUT', 'POST', 'PATCH'): + if request.method in ("PUT", "POST", "PATCH"): resource_name = utils.get_resource_name( - parser_context, expand_polymorphic_types=True) + parser_context, expand_polymorphic_types=True + ) if isinstance(resource_name, str): - if data.get('type') != resource_name: + if data.get("type") != resource_name: raise exceptions.Conflict( "The resource object's type ({data_type}) is not the type that " "constitute the collection represented by the endpoint " "({resource_type}).".format( - data_type=data.get('type'), - resource_type=resource_name)) + data_type=data.get("type"), resource_type=resource_name + ) + ) else: - if data.get('type') not in resource_name: + if data.get("type") not in resource_name: raise exceptions.Conflict( "The resource object's type ({data_type}) is not the type that " "constitute the collection represented by the endpoint " "(one of [{resource_types}]).".format( - data_type=data.get('type'), - resource_types=", ".join(resource_name))) - if not data.get('id') and request.method in ('PATCH', 'PUT'): - raise ParseError("The resource identifier object must contain an 'id' member") - - if request.method in ('PATCH', 'PUT'): - lookup_url_kwarg = getattr(view, 'lookup_url_kwarg', None) or \ - getattr(view, 'lookup_field', None) - if lookup_url_kwarg and str(data.get('id')) != str(view.kwargs[lookup_url_kwarg]): + data_type=data.get("type"), + resource_types=", ".join(resource_name), + ) + ) + if not data.get("id") and request.method in ("PATCH", "PUT"): + raise ParseError( + "The resource identifier object must contain an 'id' member" + ) + + if request.method in ("PATCH", "PUT"): + lookup_url_kwarg = getattr(view, "lookup_url_kwarg", None) or getattr( + view, "lookup_field", None + ) + if lookup_url_kwarg and str(data.get("id")) != str( + view.kwargs[lookup_url_kwarg] + ): raise exceptions.Conflict( "The resource object's id ({data_id}) does not match url's " "lookup id ({url_id})".format( - data_id=data.get('id'), - url_id=view.kwargs[lookup_url_kwarg] + data_id=data.get("id"), url_id=view.kwargs[lookup_url_kwarg] ) ) # Construct the return data - serializer_class = getattr(view, 'serializer_class', None) - parsed_data = {'id': data.get('id')} if 'id' in data else {} + serializer_class = getattr(view, "serializer_class", None) + parsed_data = {"id": data.get("id")} if "id" in data else {} # `type` field needs to be allowed in none polymorphic serializers if serializer_class is not None: if issubclass(serializer_class, serializers.PolymorphicModelSerializer): - parsed_data['type'] = data.get('type') + parsed_data["type"] = data.get("type") parsed_data.update(self.parse_attributes(data)) parsed_data.update(self.parse_relationships(data)) parsed_data.update(self.parse_metadata(result)) diff --git a/rest_framework_json_api/relations.py b/rest_framework_json_api/relations.py index 95df1d48..2924cd56 100644 --- a/rest_framework_json_api/relations.py +++ b/rest_framework_json_api/relations.py @@ -18,14 +18,14 @@ get_included_serializers, get_resource_type_from_instance, get_resource_type_from_queryset, - get_resource_type_from_serializer + get_resource_type_from_serializer, ) LINKS_PARAMS = [ - 'self_link_view_name', - 'related_link_view_name', - 'related_link_lookup_field', - 'related_link_url_kwarg' + "self_link_view_name", + "related_link_view_name", + "related_link_lookup_field", + "related_link_url_kwarg", ] @@ -52,7 +52,7 @@ class ManyRelatedFieldWithNoData(SkipDataMixin, DRFManyRelatedField): class HyperlinkedMixin(object): self_link_view_name = None related_link_view_name = None - related_link_lookup_field = 'pk' + related_link_lookup_field = "pk" def __init__(self, self_link_view_name=None, related_link_view_name=None, **kwargs): if self_link_view_name is not None: @@ -61,10 +61,10 @@ def __init__(self, self_link_view_name=None, related_link_view_name=None, **kwar self.related_link_view_name = related_link_view_name self.related_link_lookup_field = kwargs.pop( - 'related_link_lookup_field', self.related_link_lookup_field + "related_link_lookup_field", self.related_link_lookup_field ) self.related_link_url_kwarg = kwargs.pop( - 'related_link_url_kwarg', self.related_link_lookup_field + "related_link_url_kwarg", self.related_link_lookup_field ) # We include this simply for dependency injection in tests. @@ -91,7 +91,7 @@ def get_url(self, name, view_name, kwargs, request): url = self.reverse(view_name, kwargs=kwargs, request=request) except NoReverseMatch: msg = ( - 'Could not resolve URL for hyperlinked relationship using ' + "Could not resolve URL for hyperlinked relationship using " 'view name "%s".' ) raise ImproperlyConfigured(msg % view_name) @@ -101,18 +101,26 @@ def get_url(self, name, view_name, kwargs, request): return Hyperlink(url, name) - def get_links(self, obj=None, lookup_field='pk'): - request = self.context.get('request', None) - view = self.context.get('view', None) + def get_links(self, obj=None, lookup_field="pk"): + request = self.context.get("request", None) + view = self.context.get("view", None) return_data = OrderedDict() - kwargs = {lookup_field: getattr(obj, lookup_field) if obj else view.kwargs[lookup_field]} + kwargs = { + lookup_field: getattr(obj, lookup_field) + if obj + else view.kwargs[lookup_field] + } self_kwargs = kwargs.copy() - self_kwargs.update({ - 'related_field': self.field_name if self.field_name else self.parent.field_name - }) - self_link = self.get_url('self', self.self_link_view_name, self_kwargs, request) + self_kwargs.update( + { + "related_field": self.field_name + if self.field_name + else self.parent.field_name + } + ) + self_link = self.get_url("self", self.self_link_view_name, self_kwargs, request) # Assuming RelatedField will be declared in two ways: # 1. url(r'^authors/(?P[^/.]+)/(?P\w+)/$', @@ -120,22 +128,25 @@ def get_links(self, obj=None, lookup_field='pk'): # 2. url(r'^authors/(?P[^/.]+)/bio/$', # AuthorBioViewSet.as_view({'get': 'retrieve'})) # So, if related_link_url_kwarg == 'pk' it will add 'related_field' parameter to reverse() - if self.related_link_url_kwarg == 'pk': + if self.related_link_url_kwarg == "pk": related_kwargs = self_kwargs else: - related_kwargs = {self.related_link_url_kwarg: kwargs[self.related_link_lookup_field]} + related_kwargs = { + self.related_link_url_kwarg: kwargs[self.related_link_lookup_field] + } - related_link = self.get_url('related', self.related_link_view_name, related_kwargs, request) + related_link = self.get_url( + "related", self.related_link_view_name, related_kwargs, request + ) if self_link: - return_data.update({'self': self_link}) + return_data.update({"self": self_link}) if related_link: - return_data.update({'related': related_link}) + return_data.update({"related": related_link}) return return_data class HyperlinkedRelatedField(HyperlinkedMixin, SkipDataMixin, RelatedField): - @classmethod def many_init(cls, *args, **kwargs): """ @@ -155,7 +166,7 @@ def many_init(cls, *args, **kwargs): kwargs['child'] = cls() return CustomManyRelatedField(*args, **kwargs) """ - list_kwargs = {'child_relation': cls(*args, **kwargs)} + list_kwargs = {"child_relation": cls(*args, **kwargs)} for key in kwargs: if key in MANY_RELATION_KWARGS: list_kwargs[key] = kwargs[key] @@ -166,25 +177,27 @@ class ResourceRelatedField(HyperlinkedMixin, PrimaryKeyRelatedField): _skip_polymorphic_optimization = True self_link_view_name = None related_link_view_name = None - related_link_lookup_field = 'pk' + related_link_lookup_field = "pk" default_error_messages = { - 'required': _('This field is required.'), - 'does_not_exist': _('Invalid pk "{pk_value}" - object does not exist.'), - 'incorrect_type': _( - 'Incorrect type. Expected resource identifier object, received {data_type}.' + "required": _("This field is required."), + "does_not_exist": _('Invalid pk "{pk_value}" - object does not exist.'), + "incorrect_type": _( + "Incorrect type. Expected resource identifier object, received {data_type}." ), - 'incorrect_relation_type': _( - 'Incorrect relation type. Expected {relation_type}, received {received_type}.' + "incorrect_relation_type": _( + "Incorrect relation type. Expected {relation_type}, received {received_type}." ), - 'missing_type': _('Invalid resource identifier object: missing \'type\' attribute'), - 'missing_id': _('Invalid resource identifier object: missing \'id\' attribute'), - 'no_match': _('Invalid hyperlink - No URL match.'), + "missing_type": _( + "Invalid resource identifier object: missing 'type' attribute" + ), + "missing_id": _("Invalid resource identifier object: missing 'id' attribute"), + "no_match": _("Invalid hyperlink - No URL match."), } def __init__(self, **kwargs): # check for a model class that was passed in for the relation type - model = kwargs.pop('model', None) + model = kwargs.pop("model", None) if model: self.model = model @@ -213,9 +226,9 @@ def to_internal_value(self, data): data = json.loads(data) except ValueError: # show a useful error if they send a `pk` instead of resource object - self.fail('incorrect_type', data_type=type(data).__name__) + self.fail("incorrect_type", data_type=type(data).__name__) if not isinstance(data, dict): - self.fail('incorrect_type', data_type=type(data).__name__) + self.fail("incorrect_type", data_type=type(data).__name__) expected_relation_type = get_resource_type_from_queryset(self.get_queryset()) serializer_resource_type = self.get_resource_type_from_included_serializer() @@ -223,23 +236,23 @@ def to_internal_value(self, data): if serializer_resource_type is not None: expected_relation_type = serializer_resource_type - if 'type' not in data: - self.fail('missing_type') + if "type" not in data: + self.fail("missing_type") - if 'id' not in data: - self.fail('missing_id') + if "id" not in data: + self.fail("missing_id") - if data['type'] != expected_relation_type: + if data["type"] != expected_relation_type: self.conflict( - 'incorrect_relation_type', + "incorrect_relation_type", relation_type=expected_relation_type, - received_type=data['type'] + received_type=data["type"], ) - return super(ResourceRelatedField, self).to_internal_value(data['id']) + return super(ResourceRelatedField, self).to_internal_value(data["id"]) def to_representation(self, value): - if getattr(self, 'pk_field', None) is not None: + if getattr(self, "pk_field", None) is not None: pk = self.pk_field.to_representation(value.pk) else: pk = value.pk @@ -248,7 +261,7 @@ def to_representation(self, value): if resource_type is None or not self._skip_polymorphic_optimization: resource_type = get_resource_type_from_instance(value) - return OrderedDict([('type', resource_type), ('id', str(pk))]) + return OrderedDict([("type", resource_type), ("id", str(pk))]) def get_resource_type_from_included_serializer(self): """ @@ -262,7 +275,7 @@ def get_resource_type_from_included_serializer(self): # accept both singular and plural versions of field_name field_names = [ inflection.singularize(field_name), - inflection.pluralize(field_name) + inflection.pluralize(field_name), ] includes = get_included_serializers(parent) for field in field_names: @@ -272,7 +285,7 @@ def get_resource_type_from_included_serializer(self): return None def get_parent_serializer(self): - if hasattr(self.parent, 'parent') and self.is_serializer(self.parent.parent): + if hasattr(self.parent, "parent") and self.is_serializer(self.parent.parent): return self.parent.parent elif self.is_serializer(self.parent): return self.parent @@ -292,13 +305,12 @@ def get_choices(self, cutoff=None): if cutoff is not None: queryset = queryset[:cutoff] - return OrderedDict([ - ( - json.dumps(self.to_representation(item)), - self.display_value(item) - ) - for item in queryset - ]) + return OrderedDict( + [ + (json.dumps(self.to_representation(item)), self.display_value(item)) + for item in queryset + ] + ) class PolymorphicResourceRelatedField(ResourceRelatedField): @@ -309,10 +321,15 @@ class PolymorphicResourceRelatedField(ResourceRelatedField): """ _skip_polymorphic_optimization = False - default_error_messages = dict(ResourceRelatedField.default_error_messages, **{ - 'incorrect_relation_type': _('Incorrect relation type. Expected one of [{relation_type}], ' - 'received {received_type}.'), - }) + default_error_messages = dict( + ResourceRelatedField.default_error_messages, + **{ + "incorrect_relation_type": _( + "Incorrect relation type. Expected one of [{relation_type}], " + "received {received_type}." + ), + } + ) def __init__(self, polymorphic_serializer, *args, **kwargs): self.polymorphic_serializer = polymorphic_serializer @@ -327,34 +344,37 @@ def to_internal_value(self, data): data = json.loads(data) except ValueError: # show a useful error if they send a `pk` instead of resource object - self.fail('incorrect_type', data_type=type(data).__name__) + self.fail("incorrect_type", data_type=type(data).__name__) if not isinstance(data, dict): - self.fail('incorrect_type', data_type=type(data).__name__) + self.fail("incorrect_type", data_type=type(data).__name__) - if 'type' not in data: - self.fail('missing_type') + if "type" not in data: + self.fail("missing_type") - if 'id' not in data: - self.fail('missing_id') + if "id" not in data: + self.fail("missing_id") expected_relation_types = self.polymorphic_serializer.get_polymorphic_types() - if data['type'] not in expected_relation_types: - self.conflict('incorrect_relation_type', relation_type=", ".join( - expected_relation_types), received_type=data['type']) + if data["type"] not in expected_relation_types: + self.conflict( + "incorrect_relation_type", + relation_type=", ".join(expected_relation_types), + received_type=data["type"], + ) - return super(ResourceRelatedField, self).to_internal_value(data['id']) + return super(ResourceRelatedField, self).to_internal_value(data["id"]) class SerializerMethodFieldBase(Field): def __init__(self, method_name=None, **kwargs): self.method_name = method_name - kwargs['source'] = '*' - kwargs['read_only'] = True + kwargs["source"] = "*" + kwargs["read_only"] = True super().__init__(**kwargs) def bind(self, field_name, parent): - default_method_name = 'get_{field_name}'.format(field_name=field_name) + default_method_name = "get_{field_name}".format(field_name=field_name) if self.method_name is None: self.method_name = default_method_name super().bind(field_name, parent) @@ -364,40 +384,46 @@ def get_attribute(self, instance): return serializer_method(instance) -class ManySerializerMethodResourceRelatedField(SerializerMethodFieldBase, ResourceRelatedField): +class ManySerializerMethodResourceRelatedField( + SerializerMethodFieldBase, ResourceRelatedField +): def __init__(self, child_relation=None, *args, **kwargs): - assert child_relation is not None, '`child_relation` is a required argument.' + assert child_relation is not None, "`child_relation` is a required argument." self.child_relation = child_relation super().__init__(**kwargs) - self.child_relation.bind(field_name='', parent=self) + self.child_relation.bind(field_name="", parent=self) def to_representation(self, value): return [self.child_relation.to_representation(item) for item in value] -class SerializerMethodResourceRelatedField(SerializerMethodFieldBase, ResourceRelatedField): +class SerializerMethodResourceRelatedField( + SerializerMethodFieldBase, ResourceRelatedField +): """ Allows us to use serializer method RelatedFields with return querysets """ - many_kwargs = [*MANY_RELATION_KWARGS, *LINKS_PARAMS, 'method_name', 'model'] + many_kwargs = [*MANY_RELATION_KWARGS, *LINKS_PARAMS, "method_name", "model"] many_cls = ManySerializerMethodResourceRelatedField @classmethod def many_init(cls, *args, **kwargs): - list_kwargs = {'child_relation': cls(**kwargs)} + list_kwargs = {"child_relation": cls(**kwargs)} for key in kwargs: if key in cls.many_kwargs: list_kwargs[key] = kwargs[key] return cls.many_cls(**list_kwargs) -class ManySerializerMethodHyperlinkedRelatedField(SkipDataMixin, - ManySerializerMethodResourceRelatedField): +class ManySerializerMethodHyperlinkedRelatedField( + SkipDataMixin, ManySerializerMethodResourceRelatedField +): pass -class SerializerMethodHyperlinkedRelatedField(SkipDataMixin, - SerializerMethodResourceRelatedField): +class SerializerMethodHyperlinkedRelatedField( + SkipDataMixin, SerializerMethodResourceRelatedField +): many_cls = ManySerializerMethodHyperlinkedRelatedField diff --git a/rest_framework_json_api/renderers.py b/rest_framework_json_api/renderers.py index a9b8dd82..4733288f 100644 --- a/rest_framework_json_api/renderers.py +++ b/rest_framework_json_api/renderers.py @@ -17,7 +17,11 @@ import rest_framework_json_api from rest_framework_json_api import utils -from rest_framework_json_api.relations import HyperlinkedMixin, ResourceRelatedField, SkipDataMixin +from rest_framework_json_api.relations import ( + HyperlinkedMixin, + ResourceRelatedField, + SkipDataMixin, +) class JSONRenderer(renderers.JSONRenderer): @@ -44,8 +48,8 @@ class JSONRenderer(renderers.JSONRenderer): } """ - media_type = 'application/vnd.api+json' - format = 'vnd.api+json' + media_type = "application/vnd.api+json" + format = "vnd.api+json" @classmethod def extract_attributes(cls, fields, resource): @@ -55,15 +59,13 @@ def extract_attributes(cls, fields, resource): data = OrderedDict() for field_name, field in iter(fields.items()): # ID is always provided in the root of JSON API so remove it from attributes - if field_name == 'id': + if field_name == "id": continue # don't output a key for write only fields if fields[field_name].write_only: continue # Skip fields with relations - if isinstance( - field, (relations.RelatedField, relations.ManyRelatedField) - ): + if isinstance(field, (relations.RelatedField, relations.ManyRelatedField)): continue # Skip read_only attribute fields when `resource` is an empty @@ -75,9 +77,7 @@ def extract_attributes(cls, fields, resource): if fields[field_name].read_only: continue - data.update({ - field_name: resource.get(field_name) - }) + data.update({field_name: resource.get(field_name)}) return utils.format_field_names(data) @@ -123,67 +123,74 @@ def extract_relationships(cls, fields, resource, resource_instance): relation_data = list() # Don't try to query an empty relation - relation_queryset = relation_instance \ - if relation_instance is not None else list() + relation_queryset = ( + relation_instance if relation_instance is not None else list() + ) for related_object in relation_queryset: relation_data.append( - OrderedDict([ - ('type', relation_type), - ('id', encoding.force_str(related_object.pk)) - ]) + OrderedDict( + [ + ("type", relation_type), + ("id", encoding.force_str(related_object.pk)), + ] + ) ) - data.update({field_name: { - 'links': { - "related": resource.get(field_name)}, - 'data': relation_data, - 'meta': { - 'count': len(relation_data) + data.update( + { + field_name: { + "links": {"related": resource.get(field_name)}, + "data": relation_data, + "meta": {"count": len(relation_data)}, + } } - }}) + ) continue relation_data = {} if isinstance(field, HyperlinkedMixin): - field_links = field.get_links(resource_instance, field.related_link_lookup_field) - relation_data.update({'links': field_links} if field_links else dict()) + field_links = field.get_links( + resource_instance, field.related_link_lookup_field + ) + relation_data.update({"links": field_links} if field_links else dict()) data.update({field_name: relation_data}) - if isinstance(field, (ResourceRelatedField, )): + if isinstance(field, (ResourceRelatedField,)): if not isinstance(field, SkipDataMixin): - relation_data.update({'data': resource.get(field_name)}) + relation_data.update({"data": resource.get(field_name)}) data.update({field_name: relation_data}) continue if isinstance( - field, (relations.PrimaryKeyRelatedField, relations.HyperlinkedRelatedField) + field, + (relations.PrimaryKeyRelatedField, relations.HyperlinkedRelatedField), ): resolved, relation = utils.get_relation_instance( - resource_instance, '%s_id' % source, field.parent + resource_instance, "%s_id" % source, field.parent ) if not resolved: continue relation_id = relation if resource.get(field_name) else None relation_data = { - 'data': ( - OrderedDict([ - ('type', relation_type), ('id', encoding.force_str(relation_id)) - ]) - if relation_id is not None else None) + "data": ( + OrderedDict( + [ + ("type", relation_type), + ("id", encoding.force_str(relation_id)), + ] + ) + if relation_id is not None + else None + ) } - if ( - isinstance(field, relations.HyperlinkedRelatedField) and - resource.get(field_name) - ): + if isinstance( + field, relations.HyperlinkedRelatedField + ) and resource.get(field_name): relation_data.update( - { - 'links': { - 'related': resource.get(field_name) - } - } + {"links": {"related": resource.get(field_name)}} ) data.update({field_name: relation_data}) continue @@ -199,25 +206,20 @@ def extract_relationships(cls, fields, resource, resource_instance): if isinstance(resource.get(field_name), Iterable): relation_data.update( - { - 'meta': {'count': len(resource.get(field_name))} - } + {"meta": {"count": len(resource.get(field_name))}} ) if isinstance(field.child_relation, ResourceRelatedField): # special case for ResourceRelatedField - relation_data.update( - {'data': resource.get(field_name)} - ) + relation_data.update({"data": resource.get(field_name)}) if isinstance(field.child_relation, HyperlinkedMixin): field_links = field.child_relation.get_links( resource_instance, - field.child_relation.related_link_lookup_field + field.child_relation.related_link_lookup_field, ) relation_data.update( - {'links': field_links} - if field_links else dict() + {"links": field_links} if field_links else dict() ) data.update({field_name: relation_data}) @@ -226,22 +228,28 @@ def extract_relationships(cls, fields, resource, resource_instance): relation_data = list() for nested_resource_instance in relation_instance: nested_resource_instance_type = ( - relation_type or - utils.get_resource_type_from_instance(nested_resource_instance) + relation_type + or utils.get_resource_type_from_instance( + nested_resource_instance + ) ) - relation_data.append(OrderedDict([ - ('type', nested_resource_instance_type), - ('id', encoding.force_str(nested_resource_instance.pk)) - ])) - data.update({ - field_name: { - 'data': relation_data, - 'meta': { - 'count': len(relation_data) + relation_data.append( + OrderedDict( + [ + ("type", nested_resource_instance_type), + ("id", encoding.force_str(nested_resource_instance.pk)), + ] + ) + ) + data.update( + { + field_name: { + "data": relation_data, + "meta": {"count": len(relation_data)}, } } - }) + ) continue return utils.format_field_names(data) @@ -263,8 +271,9 @@ def extract_relation_instance(cls, field, resource_instance): return None @classmethod - def extract_included(cls, fields, resource, resource_instance, included_resources, - included_cache): + def extract_included( + cls, fields, resource, resource_instance, included_resources, included_cache + ): """ Adds related data to the top level included key when the request includes ?include=example,example_field2 @@ -277,7 +286,9 @@ def extract_included(cls, fields, resource, resource_instance, included_resource context = current_serializer.context included_serializers = utils.get_included_serializers(current_serializer) included_resources = copy.copy(included_resources) - included_resources = [inflection.underscore(value) for value in included_resources] + included_resources = [ + inflection.underscore(value) for value in included_resources + ] for field_name, field in iter(fields.items()): # Skip URL field @@ -295,12 +306,12 @@ def extract_included(cls, fields, resource, resource_instance, included_resource except ValueError: # Skip fields not in requested included resources # If no child field, directly continue with the next field - if field_name not in [node.split('.')[0] for node in included_resources]: + if field_name not in [ + node.split(".")[0] for node in included_resources + ]: continue - relation_instance = cls.extract_relation_instance( - field, resource_instance - ) + relation_instance = cls.extract_relation_instance(field, resource_instance) if isinstance(relation_instance, Manager): relation_instance = relation_instance.all() @@ -315,11 +326,14 @@ def extract_included(cls, fields, resource, resource_instance, included_resource if relation_instance is None or not serializer_data: continue - many = field._kwargs.get('child_relation', None) is not None + many = field._kwargs.get("child_relation", None) is not None if isinstance(field, ResourceRelatedField) and not many: - already_included = serializer_data['type'] in included_cache and \ - serializer_data['id'] in included_cache[serializer_data['type']] + already_included = ( + serializer_data["type"] in included_cache + and serializer_data["id"] + in included_cache[serializer_data["type"]] + ) if already_included: continue @@ -328,9 +342,11 @@ def extract_included(cls, fields, resource, resource_instance, included_resource field = serializer_class(relation_instance, many=many, context=context) serializer_data = field.data - new_included_resources = [key.replace('%s.' % field_name, '', 1) - for key in included_resources - if field_name == key.split('.')[0]] + new_included_resources = [ + key.replace("%s." % field_name, "", 1) + for key in included_resources + if field_name == key.split(".")[0] + ] if isinstance(field, ListSerializer): serializer = field.child @@ -342,8 +358,10 @@ def extract_included(cls, fields, resource, resource_instance, included_resource serializer_resource = serializer_data[position] nested_resource_instance = relation_queryset[position] resource_type = ( - relation_type or - utils.get_resource_type_from_instance(nested_resource_instance) + relation_type + or utils.get_resource_type_from_instance( + nested_resource_instance + ) ) serializer_fields = utils.get_serializer_fields( serializer.__class__( @@ -355,10 +373,11 @@ def extract_included(cls, fields, resource, resource_instance, included_resource serializer_resource, nested_resource_instance, resource_type, - getattr(serializer, '_poly_force_type_resolution', False) + getattr(serializer, "_poly_force_type_resolution", False), ) - included_cache[new_item['type']][new_item['id']] = \ - utils.format_field_names(new_item) + included_cache[new_item["type"]][ + new_item["id"] + ] = utils.format_field_names(new_item) cls.extract_included( serializer_fields, serializer_resource, @@ -378,11 +397,11 @@ def extract_included(cls, fields, resource, resource_instance, included_resource serializer_data, relation_instance, relation_type, - getattr(field, '_poly_force_type_resolution', False) - ) - included_cache[new_item['type']][new_item['id']] = utils.format_field_names( - new_item + getattr(field, "_poly_force_type_resolution", False), ) + included_cache[new_item["type"]][ + new_item["id"] + ] = utils.format_field_names(new_item) cls.extract_included( serializer_fields, serializer_data, @@ -397,16 +416,14 @@ def extract_meta(cls, serializer, resource): Gathers the data from serializer fields specified in meta_fields and adds it to the meta object. """ - if hasattr(serializer, 'child'): - meta = getattr(serializer.child, 'Meta', None) + if hasattr(serializer, "child"): + meta = getattr(serializer.child, "Meta", None) else: - meta = getattr(serializer, 'Meta', None) - meta_fields = getattr(meta, 'meta_fields', []) + meta = getattr(serializer, "Meta", None) + meta_fields = getattr(meta, "meta_fields", []) data = OrderedDict() for field_name in meta_fields: - data.update({ - field_name: resource.get(field_name) - }) + data.update({field_name: resource.get(field_name)}) return data @classmethod @@ -415,20 +432,26 @@ def extract_root_meta(cls, serializer, resource): Calls a `get_root_meta` function on a serializer, if it exists. """ many = False - if hasattr(serializer, 'child'): + if hasattr(serializer, "child"): many = True serializer = serializer.child data = {} - if getattr(serializer, 'get_root_meta', None): + if getattr(serializer, "get_root_meta", None): json_api_meta = serializer.get_root_meta(resource, many) - assert isinstance(json_api_meta, dict), 'get_root_meta must return a dict' + assert isinstance(json_api_meta, dict), "get_root_meta must return a dict" data.update(json_api_meta) return data @classmethod - def build_json_resource_obj(cls, fields, resource, resource_instance, resource_name, - force_type_resolution=False): + def build_json_resource_obj( + cls, + fields, + resource, + resource_instance, + resource_name, + force_type_resolution=False, + ): """ Builds the resource object (type, id, attributes) and extracts relationships. """ @@ -436,28 +459,34 @@ def build_json_resource_obj(cls, fields, resource, resource_instance, resource_n if force_type_resolution: resource_name = utils.get_resource_type_from_instance(resource_instance) resource_data = [ - ('type', resource_name), - ('id', encoding.force_str(resource_instance.pk) if resource_instance else None), - ('attributes', cls.extract_attributes(fields, resource)), + ("type", resource_name), + ( + "id", + encoding.force_str(resource_instance.pk) if resource_instance else None, + ), + ("attributes", cls.extract_attributes(fields, resource)), ] relationships = cls.extract_relationships(fields, resource, resource_instance) if relationships: - resource_data.append(('relationships', relationships)) + resource_data.append(("relationships", relationships)) # Add 'self' link if field is present and valid - if api_settings.URL_FIELD_NAME in resource and \ - isinstance(fields[api_settings.URL_FIELD_NAME], relations.RelatedField): - resource_data.append(('links', {'self': resource[api_settings.URL_FIELD_NAME]})) + if api_settings.URL_FIELD_NAME in resource and isinstance( + fields[api_settings.URL_FIELD_NAME], relations.RelatedField + ): + resource_data.append( + ("links", {"self": resource[api_settings.URL_FIELD_NAME]}) + ) return OrderedDict(resource_data) - def render_relationship_view(self, data, accepted_media_type=None, renderer_context=None): + def render_relationship_view( + self, data, accepted_media_type=None, renderer_context=None + ): # Special case for RelationshipView view = renderer_context.get("view", None) - render_data = OrderedDict([ - ('data', data) - ]) + render_data = OrderedDict([("data", data)]) links = view.get_links() if links: - render_data.update({'links': links}), + render_data.update({"links": links}), return super(JSONRenderer, self).render( render_data, accepted_media_type, renderer_context ) @@ -478,20 +507,23 @@ def render(self, data, accepted_media_type=None, renderer_context=None): resource_name = utils.get_resource_name(renderer_context) # If this is an error response, skip the rest. - if resource_name == 'errors': + if resource_name == "errors": return self.render_errors(data, accepted_media_type, renderer_context) # if response.status_code is 204 then the data to be rendered must # be None - response = renderer_context.get('response', None) + response = renderer_context.get("response", None) if response is not None and response.status_code == 204: return super(JSONRenderer, self).render( None, accepted_media_type, renderer_context ) from rest_framework_json_api.views import RelationshipView + if isinstance(view, RelationshipView): - return self.render_relationship_view(data, accepted_media_type, renderer_context) + return self.render_relationship_view( + data, accepted_media_type, renderer_context + ) # If `resource_name` is set to None then render default as the dev # wants to build the output format manually. @@ -502,15 +534,15 @@ def render(self, data, accepted_media_type=None, renderer_context=None): json_api_data = data # initialize json_api_meta with pagination meta or an empty dict - json_api_meta = data.get('meta', {}) if isinstance(data, dict) else {} + json_api_meta = data.get("meta", {}) if isinstance(data, dict) else {} included_cache = defaultdict(dict) - if data and 'results' in data: + if data and "results" in data: serializer_data = data["results"] else: serializer_data = data - serializer = getattr(serializer_data, 'serializer', None) + serializer = getattr(serializer_data, "serializer", None) included_resources = utils.get_included_resources(request, serializer) @@ -519,66 +551,92 @@ def render(self, data, accepted_media_type=None, renderer_context=None): # Extract root meta for any type of serializer json_api_meta.update(self.extract_root_meta(serializer, serializer_data)) - if getattr(serializer, 'many', False): + if getattr(serializer, "many", False): json_api_data = list() for position in range(len(serializer_data)): resource = serializer_data[position] # Get current resource - resource_instance = serializer.instance[position] # Get current instance - - if isinstance(serializer.child, rest_framework_json_api. - serializers.PolymorphicModelSerializer): - resource_serializer_class = serializer.child.\ - get_polymorphic_serializer_for_instance(resource_instance)( - context=serializer.child.context - ) + resource_instance = serializer.instance[ + position + ] # Get current instance + + if isinstance( + serializer.child, + rest_framework_json_api.serializers.PolymorphicModelSerializer, + ): + resource_serializer_class = ( + serializer.child.get_polymorphic_serializer_for_instance( + resource_instance + )(context=serializer.child.context) + ) else: resource_serializer_class = serializer.child fields = utils.get_serializer_fields(resource_serializer_class) force_type_resolution = getattr( - resource_serializer_class, '_poly_force_type_resolution', False) + resource_serializer_class, "_poly_force_type_resolution", False + ) json_resource_obj = self.build_json_resource_obj( - fields, resource, resource_instance, resource_name, force_type_resolution + fields, + resource, + resource_instance, + resource_name, + force_type_resolution, ) meta = self.extract_meta(serializer, resource) if meta: - json_resource_obj.update({'meta': utils.format_field_names(meta)}) + json_resource_obj.update( + {"meta": utils.format_field_names(meta)} + ) json_api_data.append(json_resource_obj) self.extract_included( - fields, resource, resource_instance, included_resources, included_cache + fields, + resource, + resource_instance, + included_resources, + included_cache, ) else: fields = utils.get_serializer_fields(serializer) - force_type_resolution = getattr(serializer, '_poly_force_type_resolution', False) + force_type_resolution = getattr( + serializer, "_poly_force_type_resolution", False + ) resource_instance = serializer.instance json_api_data = self.build_json_resource_obj( - fields, serializer_data, resource_instance, resource_name, force_type_resolution + fields, + serializer_data, + resource_instance, + resource_name, + force_type_resolution, ) meta = self.extract_meta(serializer, serializer_data) if meta: - json_api_data.update({'meta': utils.format_field_names(meta)}) + json_api_data.update({"meta": utils.format_field_names(meta)}) self.extract_included( - fields, serializer_data, resource_instance, included_resources, included_cache + fields, + serializer_data, + resource_instance, + included_resources, + included_cache, ) # Make sure we render data in a specific order render_data = OrderedDict() - if isinstance(data, dict) and data.get('links'): - render_data['links'] = data.get('links') + if isinstance(data, dict) and data.get("links"): + render_data["links"] = data.get("links") # format the api root link list - if view.__class__ and view.__class__.__name__ == 'APIRoot': - render_data['data'] = None - render_data['links'] = json_api_data + if view.__class__ and view.__class__.__name__ == "APIRoot": + render_data["data"] = None + render_data["links"] = json_api_data else: - render_data['data'] = json_api_data + render_data["data"] = json_api_data if included_cache: if isinstance(json_api_data, list): @@ -587,22 +645,23 @@ def render(self, data, accepted_media_type=None, renderer_context=None): objects = [json_api_data] for object in objects: - obj_type = object.get('type') - obj_id = object.get('id') - if obj_type in included_cache and \ - obj_id in included_cache[obj_type]: + obj_type = object.get("type") + obj_id = object.get("id") + if obj_type in included_cache and obj_id in included_cache[obj_type]: del included_cache[obj_type][obj_id] if not included_cache[obj_type]: del included_cache[obj_type] if included_cache: - render_data['included'] = list() + render_data["included"] = list() for included_type in sorted(included_cache.keys()): for included_id in sorted(included_cache[included_type].keys()): - render_data['included'].append(included_cache[included_type][included_id]) + render_data["included"].append( + included_cache[included_type][included_id] + ) if json_api_meta: - render_data['meta'] = utils.format_field_names(json_api_meta) + render_data["meta"] = utils.format_field_names(json_api_meta) return super(JSONRenderer, self).render( render_data, accepted_media_type, renderer_context @@ -610,21 +669,21 @@ def render(self, data, accepted_media_type=None, renderer_context=None): class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer): - template = 'rest_framework_json_api/api.html' - includes_template = 'rest_framework_json_api/includes.html' + template = "rest_framework_json_api/api.html" + includes_template = "rest_framework_json_api/includes.html" def get_context(self, data, accepted_media_type, renderer_context): context = super(BrowsableAPIRenderer, self).get_context( data, accepted_media_type, renderer_context ) - view = renderer_context['view'] + view = renderer_context["view"] - context['includes_form'] = self.get_includes_form(view) + context["includes_form"] = self.get_includes_form(view) return context @classmethod - def _get_included_serializers(cls, serializer, prefix='', already_seen=None): + def _get_included_serializers(cls, serializer, prefix="", already_seen=None): if not already_seen: already_seen = set() @@ -634,12 +693,15 @@ def _get_included_serializers(cls, serializer, prefix='', already_seen=None): included_serializers = [] already_seen.add(serializer) - for include, included_serializer in utils.get_included_serializers(serializer).items(): - included_serializers.append(f'{prefix}{include}') + for include, included_serializer in utils.get_included_serializers( + serializer + ).items(): + included_serializers.append(f"{prefix}{include}") included_serializers.extend( cls._get_included_serializers( - included_serializer, f'{prefix}{include}.', - already_seen=already_seen + included_serializer, + f"{prefix}{include}.", + already_seen=already_seen, ) ) @@ -651,9 +713,9 @@ def get_includes_form(self, view): except AttributeError: return - if not hasattr(serializer_class, 'included_serializers'): + if not hasattr(serializer_class, "included_serializers"): return template = loader.get_template(self.includes_template) - context = {'elements': self._get_included_serializers(serializer_class)} + context = {"elements": self._get_included_serializers(serializer_class)} return template.render(context) diff --git a/rest_framework_json_api/schemas/openapi.py b/rest_framework_json_api/schemas/openapi.py index fe6b095e..b51f366f 100644 --- a/rest_framework_json_api/schemas/openapi.py +++ b/rest_framework_json_api/schemas/openapi.py @@ -14,109 +14,104 @@ class SchemaGenerator(drf_openapi.SchemaGenerator): """ Extend DRF's SchemaGenerator to implement jsonapi-flavored generateschema command. """ + #: These JSONAPI component definitions are referenced by the generated OAS schema. #: If you need to add more or change these static component definitions, extend this dict. jsonapi_components = { - 'schemas': { - 'jsonapi': { - 'type': 'object', - 'description': "The server's implementation", - 'properties': { - 'version': {'type': 'string'}, - 'meta': {'$ref': '#/components/schemas/meta'} + "schemas": { + "jsonapi": { + "type": "object", + "description": "The server's implementation", + "properties": { + "version": {"type": "string"}, + "meta": {"$ref": "#/components/schemas/meta"}, }, - 'additionalProperties': False + "additionalProperties": False, }, - 'resource': { - 'type': 'object', - 'required': ['type', 'id'], - 'additionalProperties': False, - 'properties': { - 'type': { - '$ref': '#/components/schemas/type' - }, - 'id': { - '$ref': '#/components/schemas/id' - }, - 'attributes': { - 'type': 'object', + "resource": { + "type": "object", + "required": ["type", "id"], + "additionalProperties": False, + "properties": { + "type": {"$ref": "#/components/schemas/type"}, + "id": {"$ref": "#/components/schemas/id"}, + "attributes": { + "type": "object", # ... }, - 'relationships': { - 'type': 'object', + "relationships": { + "type": "object", # ... }, - 'links': { - '$ref': '#/components/schemas/links' - }, - 'meta': {'$ref': '#/components/schemas/meta'}, - } + "links": {"$ref": "#/components/schemas/links"}, + "meta": {"$ref": "#/components/schemas/meta"}, + }, }, - 'link': { - 'oneOf': [ + "link": { + "oneOf": [ { - 'description': "a string containing the link's URL", - 'type': 'string', - 'format': 'uri-reference' + "description": "a string containing the link's URL", + "type": "string", + "format": "uri-reference", }, { - 'type': 'object', - 'required': ['href'], - 'properties': { - 'href': { - 'description': "a string containing the link's URL", - 'type': 'string', - 'format': 'uri-reference' + "type": "object", + "required": ["href"], + "properties": { + "href": { + "description": "a string containing the link's URL", + "type": "string", + "format": "uri-reference", }, - 'meta': {'$ref': '#/components/schemas/meta'} - } - } + "meta": {"$ref": "#/components/schemas/meta"}, + }, + }, ] }, - 'links': { - 'type': 'object', - 'additionalProperties': {'$ref': '#/components/schemas/link'} + "links": { + "type": "object", + "additionalProperties": {"$ref": "#/components/schemas/link"}, }, - 'reltoone': { - 'description': "a singular 'to-one' relationship", - 'type': 'object', - 'properties': { - 'links': {'$ref': '#/components/schemas/relationshipLinks'}, - 'data': {'$ref': '#/components/schemas/relationshipToOne'}, - 'meta': {'$ref': '#/components/schemas/meta'} - } + "reltoone": { + "description": "a singular 'to-one' relationship", + "type": "object", + "properties": { + "links": {"$ref": "#/components/schemas/relationshipLinks"}, + "data": {"$ref": "#/components/schemas/relationshipToOne"}, + "meta": {"$ref": "#/components/schemas/meta"}, + }, }, - 'relationshipToOne': { - 'description': "reference to other resource in a to-one relationship", - 'anyOf': [ - {'$ref': '#/components/schemas/nulltype'}, - {'$ref': '#/components/schemas/linkage'} + "relationshipToOne": { + "description": "reference to other resource in a to-one relationship", + "anyOf": [ + {"$ref": "#/components/schemas/nulltype"}, + {"$ref": "#/components/schemas/linkage"}, ], }, - 'reltomany': { - 'description': "a multiple 'to-many' relationship", - 'type': 'object', - 'properties': { - 'links': {'$ref': '#/components/schemas/relationshipLinks'}, - 'data': {'$ref': '#/components/schemas/relationshipToMany'}, - 'meta': {'$ref': '#/components/schemas/meta'} - } + "reltomany": { + "description": "a multiple 'to-many' relationship", + "type": "object", + "properties": { + "links": {"$ref": "#/components/schemas/relationshipLinks"}, + "data": {"$ref": "#/components/schemas/relationshipToMany"}, + "meta": {"$ref": "#/components/schemas/meta"}, + }, }, - 'relationshipLinks': { - 'description': 'optional references to other resource objects', - 'type': 'object', - 'additionalProperties': True, - 'properties': { - 'self': {'$ref': '#/components/schemas/link'}, - 'related': {'$ref': '#/components/schemas/link'} - } + "relationshipLinks": { + "description": "optional references to other resource objects", + "type": "object", + "additionalProperties": True, + "properties": { + "self": {"$ref": "#/components/schemas/link"}, + "related": {"$ref": "#/components/schemas/link"}, + }, }, - 'relationshipToMany': { - 'description': "An array of objects each containing the " - "'type' and 'id' for to-many relationships", - 'type': 'array', - 'items': {'$ref': '#/components/schemas/linkage'}, - 'uniqueItems': True + "relationshipToMany": { + "description": "An array of objects each containing the " + "'type' and 'id' for to-many relationships", + "type": "array", + "items": {"$ref": "#/components/schemas/linkage"}, + "uniqueItems": True, }, # A RelationshipView uses a ResourceIdentifierObjectSerializer (hence the name # ResourceIdentifierObject returned by get_component_name()) which serializes type and @@ -124,159 +119,140 @@ class SchemaGenerator(drf_openapi.SchemaGenerator): # toMany or toOne so offer both options since we are not iterating over all the # possible {related_field}'s but rather rendering one path schema which may represent # toMany and toOne relationships. - 'ResourceIdentifierObject': { - 'oneOf': [ - {'$ref': '#/components/schemas/relationshipToOne'}, - {'$ref': '#/components/schemas/relationshipToMany'} + "ResourceIdentifierObject": { + "oneOf": [ + {"$ref": "#/components/schemas/relationshipToOne"}, + {"$ref": "#/components/schemas/relationshipToMany"}, ] }, - 'linkage': { - 'type': 'object', - 'description': "the 'type' and 'id'", - 'required': ['type', 'id'], - 'properties': { - 'type': {'$ref': '#/components/schemas/type'}, - 'id': {'$ref': '#/components/schemas/id'}, - 'meta': {'$ref': '#/components/schemas/meta'} - } + "linkage": { + "type": "object", + "description": "the 'type' and 'id'", + "required": ["type", "id"], + "properties": { + "type": {"$ref": "#/components/schemas/type"}, + "id": {"$ref": "#/components/schemas/id"}, + "meta": {"$ref": "#/components/schemas/meta"}, + }, }, - 'pagination': { - 'type': 'object', - 'properties': { - 'first': {'$ref': '#/components/schemas/pageref'}, - 'last': {'$ref': '#/components/schemas/pageref'}, - 'prev': {'$ref': '#/components/schemas/pageref'}, - 'next': {'$ref': '#/components/schemas/pageref'}, - } + "pagination": { + "type": "object", + "properties": { + "first": {"$ref": "#/components/schemas/pageref"}, + "last": {"$ref": "#/components/schemas/pageref"}, + "prev": {"$ref": "#/components/schemas/pageref"}, + "next": {"$ref": "#/components/schemas/pageref"}, + }, }, - 'pageref': { - 'oneOf': [ - {'type': 'string', 'format': 'uri-reference'}, - {'$ref': '#/components/schemas/nulltype'} + "pageref": { + "oneOf": [ + {"type": "string", "format": "uri-reference"}, + {"$ref": "#/components/schemas/nulltype"}, ] }, - 'failure': { - 'type': 'object', - 'required': ['errors'], - 'properties': { - 'errors': {'$ref': '#/components/schemas/errors'}, - 'meta': {'$ref': '#/components/schemas/meta'}, - 'jsonapi': {'$ref': '#/components/schemas/jsonapi'}, - 'links': {'$ref': '#/components/schemas/links'} - } + "failure": { + "type": "object", + "required": ["errors"], + "properties": { + "errors": {"$ref": "#/components/schemas/errors"}, + "meta": {"$ref": "#/components/schemas/meta"}, + "jsonapi": {"$ref": "#/components/schemas/jsonapi"}, + "links": {"$ref": "#/components/schemas/links"}, + }, }, - 'errors': { - 'type': 'array', - 'items': {'$ref': '#/components/schemas/error'}, - 'uniqueItems': True + "errors": { + "type": "array", + "items": {"$ref": "#/components/schemas/error"}, + "uniqueItems": True, }, - 'error': { - 'type': 'object', - 'additionalProperties': False, - 'properties': { - 'id': {'type': 'string'}, - 'status': {'type': 'string'}, - 'links': {'$ref': '#/components/schemas/links'}, - 'code': {'type': 'string'}, - 'title': {'type': 'string'}, - 'detail': {'type': 'string'}, - 'source': { - 'type': 'object', - 'properties': { - 'pointer': { - 'type': 'string', - 'description': - "A [JSON Pointer](https://tools.ietf.org/html/rfc6901) " - "to the associated entity in the request document " - "[e.g. `/data` for a primary data object, or " - "`/data/attributes/title` for a specific attribute." + "error": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "status": {"type": "string"}, + "links": {"$ref": "#/components/schemas/links"}, + "code": {"type": "string"}, + "title": {"type": "string"}, + "detail": {"type": "string"}, + "source": { + "type": "object", + "properties": { + "pointer": { + "type": "string", + "description": "A [JSON Pointer](https://tools.ietf.org/html/rfc6901) " + "to the associated entity in the request document " + "[e.g. `/data` for a primary data object, or " + "`/data/attributes/title` for a specific attribute.", }, - 'parameter': { - 'type': 'string', - 'description': - "A string indicating which query parameter " - "caused the error." + "parameter": { + "type": "string", + "description": "A string indicating which query parameter " + "caused the error.", }, - 'meta': {'$ref': '#/components/schemas/meta'} - } - } - } - }, - 'onlymeta': { - 'additionalProperties': False, - 'properties': { - 'meta': {'$ref': '#/components/schemas/meta'} - } + "meta": {"$ref": "#/components/schemas/meta"}, + }, + }, + }, }, - 'meta': { - 'type': 'object', - 'additionalProperties': True + "onlymeta": { + "additionalProperties": False, + "properties": {"meta": {"$ref": "#/components/schemas/meta"}}, }, - 'datum': { - 'description': 'singular item', - 'properties': { - 'data': {'$ref': '#/components/schemas/resource'} - } + "meta": {"type": "object", "additionalProperties": True}, + "datum": { + "description": "singular item", + "properties": {"data": {"$ref": "#/components/schemas/resource"}}, }, - 'nulltype': { - 'type': 'object', - 'nullable': True, - 'default': None + "nulltype": {"type": "object", "nullable": True, "default": None}, + "type": { + "type": "string", + "description": "The [type]" + "(https://jsonapi.org/format/#document-resource-object-identification) " + "member is used to describe resource objects that share common attributes " + "and relationships.", }, - 'type': { - 'type': 'string', - 'description': - 'The [type]' - '(https://jsonapi.org/format/#document-resource-object-identification) ' - 'member is used to describe resource objects that share common attributes ' - 'and relationships.' - }, - 'id': { - 'type': 'string', - 'description': - "Each resource object’s type and id pair MUST " - "[identify]" - "(https://jsonapi.org/format/#document-resource-object-identification) " - "a single, unique resource." + "id": { + "type": "string", + "description": "Each resource object’s type and id pair MUST " + "[identify]" + "(https://jsonapi.org/format/#document-resource-object-identification) " + "a single, unique resource.", }, }, - 'parameters': { - 'include': { - 'name': 'include', - 'in': 'query', - 'description': '[list of included related resources]' - '(https://jsonapi.org/format/#fetching-includes)', - 'required': False, - 'style': 'form', - 'schema': { - 'type': 'string' - } + "parameters": { + "include": { + "name": "include", + "in": "query", + "description": "[list of included related resources]" + "(https://jsonapi.org/format/#fetching-includes)", + "required": False, + "style": "form", + "schema": {"type": "string"}, }, # TODO: deepObject not well defined/supported: # https://github.com/OAI/OpenAPI-Specification/issues/1706 - 'fields': { - 'name': 'fields', - 'in': 'query', - 'description': '[sparse fieldsets]' - '(https://jsonapi.org/format/#fetching-sparse-fieldsets).\n' - 'Use fields[\\]=field1,field2,...,fieldN', - 'required': False, - 'style': 'deepObject', - 'schema': { - 'type': 'object', + "fields": { + "name": "fields", + "in": "query", + "description": "[sparse fieldsets]" + "(https://jsonapi.org/format/#fetching-sparse-fieldsets).\n" + "Use fields[\\]=field1,field2,...,fieldN", + "required": False, + "style": "deepObject", + "schema": { + "type": "object", }, - 'explode': True + "explode": True, }, - 'sort': { - 'name': 'sort', - 'in': 'query', - 'description': '[list of fields to sort by]' - '(https://jsonapi.org/format/#fetching-sorting)', - 'required': False, - 'style': 'form', - 'schema': { - 'type': 'string' - } + "sort": { + "name": "sort", + "in": "query", + "description": "[list of fields to sort by]" + "(https://jsonapi.org/format/#fetching-sorting)", + "required": False, + "style": "form", + "schema": {"type": "string"}, }, }, } @@ -299,10 +275,14 @@ def get_schema(self, request=None, public=False): #: - 'action' copy of current view.action (list/fetch) as this gets reset for each request. expanded_endpoints = [] for path, method, view in view_endpoints: - if hasattr(view, 'action') and view.action == 'retrieve_related': - expanded_endpoints += self._expand_related(path, method, view, view_endpoints) + if hasattr(view, "action") and view.action == "retrieve_related": + expanded_endpoints += self._expand_related( + path, method, view, view_endpoints + ) else: - expanded_endpoints.append((path, method, view, getattr(view, 'action', None))) + expanded_endpoints.append( + (path, method, view, getattr(view, "action", None)) + ) for path, method, view, action in expanded_endpoints: if not self.has_view_permissions(path, method, view): @@ -312,7 +292,7 @@ def get_schema(self, request=None, public=False): # (to_many). This patches the view.action appropriately so that # view.schema.get_operation() "does the right thing" for fetch vs. list. current_action = None - if hasattr(view, 'action'): + if hasattr(view, "action"): current_action = view.action view.action = action operation = view.schema.get_operation(path, method) @@ -323,16 +303,19 @@ def get_schema(self, request=None, public=False): if components_schemas[k] == components[k]: continue warnings.warn( - 'Schema component "{}" has been overriden with a different value.'.format(k)) + 'Schema component "{}" has been overriden with a different value.'.format( + k + ) + ) components_schemas.update(components) - if hasattr(view, 'action'): + if hasattr(view, "action"): view.action = current_action # Normalise path for any provided mount url. - if path.startswith('/'): + if path.startswith("/"): path = path[1:] - path = urljoin(self.url or '/', path) + path = urljoin(self.url or "/", path) paths.setdefault(path, {}) paths[path][method.lower()] = operation @@ -340,9 +323,9 @@ def get_schema(self, request=None, public=False): self.check_duplicate_operation_id(paths) # Compile final schema, overriding stuff from super class. - schema['paths'] = paths - schema['components'] = self.jsonapi_components - schema['components']['schemas'].update(components_schemas) + schema["paths"] = paths + schema["components"] = self.jsonapi_components + schema["components"]["schemas"].update(components_schemas) return schema @@ -362,18 +345,25 @@ def _expand_related(self, path, method, view, view_endpoints): # It's not obvious if it's allowed to have both included_ and related_ serializers, # so just merge both dicts. serializers = {} - if hasattr(serializer, 'included_serializers'): + if hasattr(serializer, "included_serializers"): serializers = {**serializers, **serializer.included_serializers} - if hasattr(serializer, 'related_serializers'): + if hasattr(serializer, "related_serializers"): serializers = {**serializers, **serializer.related_serializers} related_fields = [fs for fs in serializers.items()] for field, related_serializer in related_fields: - related_view = self._find_related_view(view_endpoints, related_serializer, view) + related_view = self._find_related_view( + view_endpoints, related_serializer, view + ) if related_view: action = self._field_is_one_or_many(field, view) result.append( - (path.replace('{related_field}', field), method, related_view, action) + ( + path.replace("{related_field}", field), + method, + related_view, + action, + ) ) return result @@ -390,7 +380,9 @@ def _find_related_view(self, view_endpoints, related_serializer, parent_view): for path, method, view in view_endpoints: view_serializer = view.get_serializer() if not isinstance(related_serializer, type): - related_serializer_class = import_class_from_dotted_path(related_serializer) + related_serializer_class = import_class_from_dotted_path( + related_serializer + ) else: related_serializer_class = related_serializer if isinstance(view_serializer, related_serializer_class): @@ -401,17 +393,18 @@ def _find_related_view(self, view_endpoints, related_serializer, parent_view): def _field_is_one_or_many(self, field, view): serializer = view.get_serializer() if isinstance(serializer.fields[field], ManyRelatedField): - return 'list' + return "list" else: - return 'fetch' + return "fetch" class AutoSchema(drf_openapi.AutoSchema): """ Extend DRF's openapi.AutoSchema for JSONAPI serialization. """ + #: ignore all the media types and only generate a JSONAPI schema. - content_types = ['application/vnd.api+json'] + content_types = ["application/vnd.api+json"] def get_operation(self, path, method): """ @@ -421,31 +414,31 @@ def get_operation(self, path, method): - special handling for POST, PATCH, DELETE """ operation = {} - operation['operationId'] = self.get_operation_id(path, method) - operation['description'] = self.get_description(path, method) + operation["operationId"] = self.get_operation_id(path, method) + operation["description"] = self.get_description(path, method) parameters = [] parameters += self.get_path_parameters(path, method) # pagination, filters only apply to GET/HEAD of collections and items - if method in ['GET', 'HEAD']: + if method in ["GET", "HEAD"]: parameters += self._get_include_parameters(path, method) parameters += self._get_fields_parameters(path, method) parameters += self._get_sort_parameters(path, method) parameters += self.get_pagination_parameters(path, method) parameters += self.get_filter_parameters(path, method) - operation['parameters'] = parameters + operation["parameters"] = parameters # get request and response code schemas - if method == 'GET': + if method == "GET": if is_list_view(path, method, self.view): self._add_get_collection_response(operation) else: self._add_get_item_response(operation) - elif method == 'POST': + elif method == "POST": self._add_post_item_response(operation, path) - elif method == 'PATCH': + elif method == "PATCH": self._add_patch_item_response(operation, path) - elif method == 'DELETE': + elif method == "DELETE": # should only allow deleting a resource, not a collection # TODO: implement delete of a relationship in future release. self._add_delete_item_response(operation, path) @@ -457,9 +450,9 @@ def get_operation_id(self, path, method): used for the main path as well as such as related and relationships. This concatenates the (mapped) method name and path as the spec allows most any """ - method_name = getattr(self.view, 'action', method.lower()) + method_name = getattr(self.view, "action", method.lower()) if is_list_view(path, method, self.view): - action = 'List' + action = "List" elif method_name not in self.method_mapping: action = method_name else: @@ -470,7 +463,7 @@ def _get_include_parameters(self, path, method): """ includes parameter: https://jsonapi.org/format/#fetching-includes """ - return [{'$ref': '#/components/parameters/include'}] + return [{"$ref": "#/components/parameters/include"}] def _get_fields_parameters(self, path, method): """ @@ -490,20 +483,20 @@ def _get_fields_parameters(self, path, method): # world: # type: string # noqa F821 # explode: true - return [{'$ref': '#/components/parameters/fields'}] + return [{"$ref": "#/components/parameters/fields"}] def _get_sort_parameters(self, path, method): """ sort parameter: https://jsonapi.org/format/#fetching-sorting """ - return [{'$ref': '#/components/parameters/sort'}] + return [{"$ref": "#/components/parameters/sort"}] def _add_get_collection_response(self, operation): """ Add GET 200 response for a collection to operation """ - operation['responses'] = { - '200': self._get_toplevel_200_response(operation, collection=True) + operation["responses"] = { + "200": self._get_toplevel_200_response(operation, collection=True) } self._add_get_4xx_responses(operation) @@ -511,8 +504,8 @@ def _add_get_item_response(self, operation): """ add GET 200 response for an item to operation """ - operation['responses'] = { - '200': self._get_toplevel_200_response(operation, collection=False) + operation["responses"] = { + "200": self._get_toplevel_200_response(operation, collection=False) } self._add_get_4xx_responses(operation) @@ -525,58 +518,57 @@ def _get_toplevel_200_response(self, operation, collection=True): Uses a $ref to the components.schemas. component definition. """ if collection: - data = {'type': 'array', 'items': self._get_reference(self.view.get_serializer())} + data = { + "type": "array", + "items": self._get_reference(self.view.get_serializer()), + } else: data = self._get_reference(self.view.get_serializer()) return { - 'description': operation['operationId'], - 'content': { - 'application/vnd.api+json': { - 'schema': { - 'type': 'object', - 'required': ['data'], - 'properties': { - 'data': data, - 'included': { - 'type': 'array', - 'uniqueItems': True, - 'items': { - '$ref': '#/components/schemas/resource' - } + "description": operation["operationId"], + "content": { + "application/vnd.api+json": { + "schema": { + "type": "object", + "required": ["data"], + "properties": { + "data": data, + "included": { + "type": "array", + "uniqueItems": True, + "items": {"$ref": "#/components/schemas/resource"}, }, - 'links': { - 'description': 'Link members related to primary data', - 'allOf': [ - {'$ref': '#/components/schemas/links'}, - {'$ref': '#/components/schemas/pagination'} - ] + "links": { + "description": "Link members related to primary data", + "allOf": [ + {"$ref": "#/components/schemas/links"}, + {"$ref": "#/components/schemas/pagination"}, + ], }, - 'jsonapi': { - '$ref': '#/components/schemas/jsonapi' - } - } + "jsonapi": {"$ref": "#/components/schemas/jsonapi"}, + }, } } - } + }, } def _add_post_item_response(self, operation, path): """ add response for POST of an item to operation """ - operation['requestBody'] = self.get_request_body(path, 'POST') - operation['responses'] = { - '201': self._get_toplevel_200_response(operation, collection=False) + operation["requestBody"] = self.get_request_body(path, "POST") + operation["responses"] = { + "201": self._get_toplevel_200_response(operation, collection=False) } - operation['responses']['201']['description'] = ( - '[Created](https://jsonapi.org/format/#crud-creating-responses-201). ' - 'Assigned `id` and/or any other changes are in this response.' + operation["responses"]["201"]["description"] = ( + "[Created](https://jsonapi.org/format/#crud-creating-responses-201). " + "Assigned `id` and/or any other changes are in this response." ) self._add_async_response(operation) - operation['responses']['204'] = { - 'description': '[Created](https://jsonapi.org/format/#crud-creating-responses-204) ' - 'with the supplied `id`. No other changes from what was POSTed.' + operation["responses"]["204"] = { + "description": "[Created](https://jsonapi.org/format/#crud-creating-responses-204) " + "with the supplied `id`. No other changes from what was POSTed." } self._add_post_4xx_responses(operation) @@ -584,9 +576,9 @@ def _add_patch_item_response(self, operation, path): """ Add PATCH response for an item to operation """ - operation['requestBody'] = self.get_request_body(path, 'PATCH') - operation['responses'] = { - '200': self._get_toplevel_200_response(operation, collection=False) + operation["requestBody"] = self.get_request_body(path, "PATCH") + operation["responses"] = { + "200": self._get_toplevel_200_response(operation, collection=False) } self._add_patch_4xx_responses(operation) @@ -596,7 +588,7 @@ def _add_delete_item_response(self, operation, path): """ # Only DELETE of relationships has a requestBody if isinstance(self.view, views.RelationshipView): - operation['requestBody'] = self.get_request_body(path, 'DELETE') + operation["requestBody"] = self.get_request_body(path, "DELETE") self._add_delete_responses(operation) def get_request_body(self, path, method): @@ -604,7 +596,7 @@ def get_request_body(self, path, method): A request body is required by jsonapi for POST, PATCH, and DELETE methods. """ serializer = self.get_serializer(path, method) - if not isinstance(serializer, (serializers.BaseSerializer, )): + if not isinstance(serializer, (serializers.BaseSerializer,)): return {} is_relationship = isinstance(self.view, views.RelationshipView) @@ -617,32 +609,35 @@ def get_request_body(self, path, method): # Another subclassed from base with required type/id but no required attributes (PATCH) if is_relationship: - item_schema = {'$ref': '#/components/schemas/ResourceIdentifierObject'} + item_schema = {"$ref": "#/components/schemas/ResourceIdentifierObject"} else: item_schema = self.map_serializer(serializer) - if method == 'POST': + if method == "POST": # 'type' and 'id' are both required for: # - all relationship operations # - regular PATCH or DELETE # Only 'type' is required for POST: system may assign the 'id'. - item_schema['required'] = ['type'] + item_schema["required"] = ["type"] - if 'properties' in item_schema and 'attributes' in item_schema['properties']: + if "properties" in item_schema and "attributes" in item_schema["properties"]: # No required attributes for PATCH - if method in ['PATCH', 'PUT'] and 'required' in item_schema['properties']['attributes']: - del item_schema['properties']['attributes']['required'] + if ( + method in ["PATCH", "PUT"] + and "required" in item_schema["properties"]["attributes"] + ): + del item_schema["properties"]["attributes"]["required"] # No read_only fields for request. - for name, schema in item_schema['properties']['attributes']['properties'].copy().items(): # noqa E501 - if 'readOnly' in schema: - del item_schema['properties']['attributes']['properties'][name] + for name, schema in ( + item_schema["properties"]["attributes"]["properties"].copy().items() + ): # noqa E501 + if "readOnly" in schema: + del item_schema["properties"]["attributes"]["properties"][name] return { - 'content': { + "content": { ct: { - 'schema': { - 'required': ['data'], - 'properties': { - 'data': item_schema - } + "schema": { + "required": ["data"], + "properties": {"data": item_schema}, } } for ct in self.content_types @@ -666,10 +661,14 @@ def map_serializer(self, serializer): if isinstance(field, serializers.HiddenField): continue if isinstance(field, serializers.RelatedField): - relationships[field.field_name] = {'$ref': '#/components/schemas/reltoone'} + relationships[field.field_name] = { + "$ref": "#/components/schemas/reltoone" + } continue if isinstance(field, serializers.ManyRelatedField): - relationships[field.field_name] = {'$ref': '#/components/schemas/reltomany'} + relationships[field.field_name] = { + "$ref": "#/components/schemas/reltomany" + } continue if field.required: @@ -677,47 +676,45 @@ def map_serializer(self, serializer): schema = self.map_field(field) if field.read_only: - schema['readOnly'] = True + schema["readOnly"] = True if field.write_only: - schema['writeOnly'] = True + schema["writeOnly"] = True if field.allow_null: - schema['nullable'] = True + schema["nullable"] = True if field.default and field.default != empty: - schema['default'] = field.default + schema["default"] = field.default if field.help_text: # Ensure django gettext_lazy is rendered correctly - schema['description'] = str(field.help_text) + schema["description"] = str(field.help_text) self.map_field_validators(field, schema) attributes[field.field_name] = schema result = { - 'type': 'object', - 'required': ['type', 'id'], - 'additionalProperties': False, - 'properties': { - 'type': {'$ref': '#/components/schemas/type'}, - 'id': {'$ref': '#/components/schemas/id'}, - 'links': { - 'type': 'object', - 'properties': { - 'self': {'$ref': '#/components/schemas/link'} - } - } - } + "type": "object", + "required": ["type", "id"], + "additionalProperties": False, + "properties": { + "type": {"$ref": "#/components/schemas/type"}, + "id": {"$ref": "#/components/schemas/id"}, + "links": { + "type": "object", + "properties": {"self": {"$ref": "#/components/schemas/link"}}, + }, + }, } if attributes: - result['properties']['attributes'] = { - 'type': 'object', - 'properties': attributes + result["properties"]["attributes"] = { + "type": "object", + "properties": attributes, } if required: - result['properties']['attributes']['required'] = required + result["properties"]["attributes"]["required"] = required if relationships: - result['properties']['relationships'] = { - 'type': 'object', - 'properties': relationships + result["properties"]["relationships"] = { + "type": "object", + "properties": relationships, } return result @@ -725,14 +722,14 @@ def _add_async_response(self, operation): """ Add async response to operation """ - operation['responses']['202'] = { - 'description': 'Accepted for [asynchronous processing]' - '(https://jsonapi.org/recommendations/#asynchronous-processing)', - 'content': { - 'application/vnd.api+json': { - 'schema': {'$ref': '#/components/schemas/datum'} + operation["responses"]["202"] = { + "description": "Accepted for [asynchronous processing]" + "(https://jsonapi.org/recommendations/#asynchronous-processing)", + "content": { + "application/vnd.api+json": { + "schema": {"$ref": "#/components/schemas/datum"} } - } + }, } def _failure_response(self, reason): @@ -740,28 +737,30 @@ def _failure_response(self, reason): Return failure response reason as the description """ return { - 'description': reason, - 'content': { - 'application/vnd.api+json': { - 'schema': {'$ref': '#/components/schemas/failure'} + "description": reason, + "content": { + "application/vnd.api+json": { + "schema": {"$ref": "#/components/schemas/failure"} } - } + }, } def _add_generic_failure_responses(self, operation): """ Add generic failure response(s) to operation """ - for code, reason in [('401', 'not authorized'), ]: - operation['responses'][code] = self._failure_response(reason) + for code, reason in [ + ("401", "not authorized"), + ]: + operation["responses"][code] = self._failure_response(reason) def _add_get_4xx_responses(self, operation): """ Add generic 4xx GET responses to operation """ self._add_generic_failure_responses(operation) - for code, reason in [('404', 'not found')]: - operation['responses'][code] = self._failure_response(reason) + for code, reason in [("404", "not found")]: + operation["responses"][code] = self._failure_response(reason) def _add_post_4xx_responses(self, operation): """ @@ -769,12 +768,21 @@ def _add_post_4xx_responses(self, operation): """ self._add_generic_failure_responses(operation) for code, reason in [ - ('403', '[Forbidden](https://jsonapi.org/format/#crud-creating-responses-403)'), - ('404', '[Related resource does not exist]' - '(https://jsonapi.org/format/#crud-creating-responses-404)'), - ('409', '[Conflict](https://jsonapi.org/format/#crud-creating-responses-409)'), + ( + "403", + "[Forbidden](https://jsonapi.org/format/#crud-creating-responses-403)", + ), + ( + "404", + "[Related resource does not exist]" + "(https://jsonapi.org/format/#crud-creating-responses-404)", + ), + ( + "409", + "[Conflict](https://jsonapi.org/format/#crud-creating-responses-409)", + ), ]: - operation['responses'][code] = self._failure_response(reason) + operation["responses"][code] = self._failure_response(reason) def _add_patch_4xx_responses(self, operation): """ @@ -782,37 +790,49 @@ def _add_patch_4xx_responses(self, operation): """ self._add_generic_failure_responses(operation) for code, reason in [ - ('403', '[Forbidden](https://jsonapi.org/format/#crud-updating-responses-403)'), - ('404', '[Related resource does not exist]' - '(https://jsonapi.org/format/#crud-updating-responses-404)'), - ('409', '[Conflict]([Conflict]' - '(https://jsonapi.org/format/#crud-updating-responses-409)'), + ( + "403", + "[Forbidden](https://jsonapi.org/format/#crud-updating-responses-403)", + ), + ( + "404", + "[Related resource does not exist]" + "(https://jsonapi.org/format/#crud-updating-responses-404)", + ), + ( + "409", + "[Conflict]([Conflict]" + "(https://jsonapi.org/format/#crud-updating-responses-409)", + ), ]: - operation['responses'][code] = self._failure_response(reason) + operation["responses"][code] = self._failure_response(reason) def _add_delete_responses(self, operation): """ Add generic DELETE responses to operation """ # the 2xx statuses: - operation['responses'] = { - '200': { - 'description': '[OK](https://jsonapi.org/format/#crud-deleting-responses-200)', - 'content': { - 'application/vnd.api+json': { - 'schema': {'$ref': '#/components/schemas/onlymeta'} + operation["responses"] = { + "200": { + "description": "[OK](https://jsonapi.org/format/#crud-deleting-responses-200)", + "content": { + "application/vnd.api+json": { + "schema": {"$ref": "#/components/schemas/onlymeta"} } - } + }, } } self._add_async_response(operation) - operation['responses']['204'] = { - 'description': '[no content](https://jsonapi.org/format/#crud-deleting-responses-204)', + operation["responses"]["204"] = { + "description": "[no content](https://jsonapi.org/format/#crud-deleting-responses-204)", } # the 4xx errors: self._add_generic_failure_responses(operation) for code, reason in [ - ('404', '[Resource does not exist]' - '(https://jsonapi.org/format/#crud-deleting-responses-404)'), + ( + "404", + "[Resource does not exist]" + "(https://jsonapi.org/format/#crud-deleting-responses-404)", + ), ]: - operation['responses'][code] = self._failure_response(reason) + operation["responses"][code] = self._failure_response(reason) diff --git a/rest_framework_json_api/serializers.py b/rest_framework_json_api/serializers.py index 31f2a86b..50b546b6 100644 --- a/rest_framework_json_api/serializers.py +++ b/rest_framework_json_api/serializers.py @@ -12,45 +12,47 @@ get_included_serializers, get_resource_type_from_instance, get_resource_type_from_model, - get_resource_type_from_serializer + get_resource_type_from_serializer, ) class ResourceIdentifierObjectSerializer(BaseSerializer): default_error_messages = { - 'incorrect_model_type': _( - 'Incorrect model type. Expected {model_type}, received {received_type}.' + "incorrect_model_type": _( + "Incorrect model type. Expected {model_type}, received {received_type}." ), - 'does_not_exist': _('Invalid pk "{pk_value}" - object does not exist.'), - 'incorrect_type': _('Incorrect type. Expected pk value, received {data_type}.'), + "does_not_exist": _('Invalid pk "{pk_value}" - object does not exist.'), + "incorrect_type": _("Incorrect type. Expected pk value, received {data_type}."), } model_class = None def __init__(self, *args, **kwargs): - self.model_class = kwargs.pop('model_class', self.model_class) + self.model_class = kwargs.pop("model_class", self.model_class) # this has no fields but assumptions are made elsewhere that self.fields exists. self.fields = {} super(ResourceIdentifierObjectSerializer, self).__init__(*args, **kwargs) def to_representation(self, instance): return { - 'type': get_resource_type_from_instance(instance), - 'id': str(instance.pk) + "type": get_resource_type_from_instance(instance), + "id": str(instance.pk), } def to_internal_value(self, data): - if data['type'] != get_resource_type_from_model(self.model_class): + if data["type"] != get_resource_type_from_model(self.model_class): self.fail( - 'incorrect_model_type', model_type=self.model_class, received_type=data['type'] + "incorrect_model_type", + model_type=self.model_class, + received_type=data["type"], ) - pk = data['id'] + pk = data["id"] try: return self.model_class.objects.get(pk=pk) except ObjectDoesNotExist: - self.fail('does_not_exist', pk_value=pk) + self.fail("does_not_exist", pk_value=pk) except (TypeError, ValueError): - self.fail('incorrect_type', data_type=type(data['pk']).__name__) + self.fail("incorrect_type", data_type=type(data["pk"]).__name__) class SparseFieldsetsMixin(object): @@ -62,26 +64,30 @@ class SparseFieldsetsMixin(object): def __init__(self, *args, **kwargs): super(SparseFieldsetsMixin, self).__init__(*args, **kwargs) - context = kwargs.get('context') - request = context.get('request') if context else None + context = kwargs.get("context") + request = context.get("request") if context else None if request: - sparse_fieldset_query_param = 'fields[{}]'.format( + sparse_fieldset_query_param = "fields[{}]".format( get_resource_type_from_serializer(self) ) try: param_name = next( - key for key in request.query_params if sparse_fieldset_query_param == key + key + for key in request.query_params + if sparse_fieldset_query_param == key ) except StopIteration: pass else: - fieldset = request.query_params.get(param_name).split(',') + fieldset = request.query_params.get(param_name).split(",") # iterate over a *copy* of self.fields' underlying OrderedDict, because we may # modify the original during the iteration. # self.fields is a `rest_framework.utils.serializer_helpers.BindingDict` for field_name, field in self.fields.fields.copy().items(): - if field_name == api_settings.URL_FIELD_NAME: # leave self link there + if ( + field_name == api_settings.URL_FIELD_NAME + ): # leave self link there continue if field_name not in fieldset: self.fields.pop(field_name) @@ -96,19 +102,19 @@ class IncludedResourcesValidationMixin(object): """ def __init__(self, *args, **kwargs): - context = kwargs.get('context') - request = context.get('request') if context else None - view = context.get('view') if context else None + context = kwargs.get("context") + request = context.get("request") if context else None + view = context.get("view") if context else None def validate_path(serializer_class, field_path, path): serializers = get_included_serializers(serializer_class) if serializers is None: - raise ParseError('This endpoint does not support the include parameter') + raise ParseError("This endpoint does not support the include parameter") this_field_name = inflection.underscore(field_path[0]) this_included_serializer = serializers.get(this_field_name) if this_included_serializer is None: raise ParseError( - 'This endpoint does not support the include parameter for path {}'.format( + "This endpoint does not support the include parameter for path {}".format( path ) ) @@ -120,10 +126,12 @@ def validate_path(serializer_class, field_path, path): if request and view: included_resources = get_included_resources(request) for included_field_name in included_resources: - included_field_path = included_field_name.split('.') + included_field_path = included_field_name.split(".") this_serializer_class = view.get_serializer_class() # lets validate the current path - validate_path(this_serializer_class, included_field_path, included_field_name) + validate_path( + this_serializer_class, included_field_path, included_field_name + ) super(IncludedResourcesValidationMixin, self).__init__(*args, **kwargs) @@ -135,8 +143,10 @@ class SerializerMetaclass(SerializerMetaclass): # If user imports serializer from here we can catch class definition and check # nested serializers for depricated use. class Serializer( - IncludedResourcesValidationMixin, SparseFieldsetsMixin, Serializer, - metaclass=SerializerMetaclass + IncludedResourcesValidationMixin, + SparseFieldsetsMixin, + Serializer, + metaclass=SerializerMetaclass, ): """ A `Serializer` is a model-less serializer class with additional @@ -156,8 +166,10 @@ class Serializer( class HyperlinkedModelSerializer( - IncludedResourcesValidationMixin, SparseFieldsetsMixin, HyperlinkedModelSerializer, - metaclass=SerializerMetaclass + IncludedResourcesValidationMixin, + SparseFieldsetsMixin, + HyperlinkedModelSerializer, + metaclass=SerializerMetaclass, ): """ A type of `ModelSerializer` that uses hyperlinked relationships instead @@ -173,8 +185,12 @@ class HyperlinkedModelSerializer( """ -class ModelSerializer(IncludedResourcesValidationMixin, SparseFieldsetsMixin, ModelSerializer, - metaclass=SerializerMetaclass): +class ModelSerializer( + IncludedResourcesValidationMixin, + SparseFieldsetsMixin, + ModelSerializer, + metaclass=SerializerMetaclass, +): """ A `ModelSerializer` is just a regular `Serializer`, except that: @@ -196,6 +212,7 @@ class ModelSerializer(IncludedResourcesValidationMixin, SparseFieldsetsMixin, Mo * A mixin class to enable sparse fieldsets is included * A mixin class to enable validation of included resources is included """ + serializer_related_field = ResourceRelatedField def get_field_names(self, declared_fields, info): @@ -203,7 +220,7 @@ def get_field_names(self, declared_fields, info): We override the parent to omit explicity defined meta fields (such as SerializerMethodFields) from the list of declared fields """ - meta_fields = getattr(self.Meta, 'meta_fields', []) + meta_fields = getattr(self.Meta, "meta_fields", []) declared = OrderedDict() for field_name in set(declared_fields.keys()): @@ -211,7 +228,7 @@ def get_field_names(self, declared_fields, info): if field_name not in meta_fields: declared[field_name] = field fields = super(ModelSerializer, self).get_field_names(declared, info) - return list(fields) + list(getattr(self.Meta, 'meta_fields', list())) + return list(fields) + list(getattr(self.Meta, "meta_fields", list())) class PolymorphicSerializerMetaclass(SerializerMetaclass): @@ -221,7 +238,9 @@ class PolymorphicSerializerMetaclass(SerializerMetaclass): """ def __new__(cls, name, bases, attrs): - new_class = super(PolymorphicSerializerMetaclass, cls).__new__(cls, name, bases, attrs) + new_class = super(PolymorphicSerializerMetaclass, cls).__new__( + cls, name, bases, attrs + ) # Ensure initialization is only performed for subclasses of PolymorphicModelSerializer # (excluding PolymorphicModelSerializer class itself). @@ -229,17 +248,21 @@ def __new__(cls, name, bases, attrs): if not parents: return new_class - polymorphic_serializers = getattr(new_class, 'polymorphic_serializers', None) + polymorphic_serializers = getattr(new_class, "polymorphic_serializers", None) if not polymorphic_serializers: raise NotImplementedError( - "A PolymorphicModelSerializer must define a `polymorphic_serializers` attribute.") + "A PolymorphicModelSerializer must define a `polymorphic_serializers` attribute." + ) serializer_to_model = { - serializer: serializer.Meta.model for serializer in polymorphic_serializers} + serializer: serializer.Meta.model for serializer in polymorphic_serializers + } model_to_serializer = { - serializer.Meta.model: serializer for serializer in polymorphic_serializers} + serializer.Meta.model: serializer for serializer in polymorphic_serializers + } type_to_serializer = { - get_resource_type_from_serializer(serializer): serializer for - serializer in polymorphic_serializers} + get_resource_type_from_serializer(serializer): serializer + for serializer in polymorphic_serializers + } new_class._poly_serializer_model_map = serializer_to_model new_class._poly_model_serializer_map = model_to_serializer new_class._poly_type_serializer_map = type_to_serializer @@ -252,21 +275,30 @@ def __new__(cls, name, bases, attrs): return new_class -class PolymorphicModelSerializer(ModelSerializer, metaclass=PolymorphicSerializerMetaclass): +class PolymorphicModelSerializer( + ModelSerializer, metaclass=PolymorphicSerializerMetaclass +): """ A serializer for polymorphic models. Useful for "lazy" parent models. Leaves should be represented with a regular serializer. """ + def get_fields(self): """ Return an exhaustive list of the polymorphic serializer fields. """ if self.instance not in (None, []): if not isinstance(self.instance, QuerySet): - serializer_class = self.get_polymorphic_serializer_for_instance(self.instance) - return serializer_class(self.instance, context=self.context).get_fields() + serializer_class = self.get_polymorphic_serializer_for_instance( + self.instance + ) + return serializer_class( + self.instance, context=self.context + ).get_fields() else: - raise Exception("Cannot get fields from a polymorphic serializer given a queryset") + raise Exception( + "Cannot get fields from a polymorphic serializer given a queryset" + ) return super(PolymorphicModelSerializer, self).get_fields() @classmethod @@ -281,7 +313,9 @@ def get_polymorphic_serializer_for_instance(cls, instance): except KeyError: raise NotImplementedError( "No polymorphic serializer has been found for model {}".format( - instance._meta.model.__name__)) + instance._meta.model.__name__ + ) + ) @classmethod def get_polymorphic_model_for_serializer(cls, serializer): @@ -294,7 +328,10 @@ def get_polymorphic_model_for_serializer(cls, serializer): return cls._poly_serializer_model_map[serializer] except KeyError: raise NotImplementedError( - "No polymorphic model has been found for serializer {}".format(serializer.__name__)) + "No polymorphic model has been found for serializer {}".format( + serializer.__name__ + ) + ) @classmethod def get_polymorphic_serializer_for_type(cls, obj_type): @@ -307,7 +344,8 @@ def get_polymorphic_serializer_for_type(cls, obj_type): return cls._poly_type_serializer_map[obj_type] except KeyError: raise NotImplementedError( - "No polymorphic serializer has been found for type {}".format(obj_type)) + "No polymorphic serializer has been found for type {}".format(obj_type) + ) @classmethod def get_polymorphic_model_for_type(cls, obj_type): @@ -317,7 +355,8 @@ def get_polymorphic_model_for_type(cls, obj_type): means that a serializer is missing in the class's `polymorphic_serializers` attribute. """ return cls.get_polymorphic_model_for_serializer( - cls.get_polymorphic_serializer_for_type(obj_type)) + cls.get_polymorphic_serializer_for_type(obj_type) + ) @classmethod def get_polymorphic_types(cls): @@ -331,21 +370,27 @@ def to_representation(self, instance): Retrieve the appropriate polymorphic serializer and use this to handle representation. """ serializer_class = self.get_polymorphic_serializer_for_instance(instance) - return serializer_class(instance, context=self.context).to_representation(instance) + return serializer_class(instance, context=self.context).to_representation( + instance + ) def to_internal_value(self, data): """ Ensure that the given type is one of the expected polymorphic types, then retrieve the appropriate polymorphic serializer and use this to handle internal value. """ - received_type = data.get('type') + received_type = data.get("type") expected_types = self.get_polymorphic_types() if received_type not in expected_types: raise Conflict( - 'Incorrect relation type. Expected on of [{expected_types}], ' - 'received {received_type}.'.format( - expected_types=', '.join(expected_types), received_type=received_type)) + "Incorrect relation type. Expected on of [{expected_types}], " + "received {received_type}.".format( + expected_types=", ".join(expected_types), + received_type=received_type, + ) + ) serializer_class = self.get_polymorphic_serializer_for_type(received_type) self.__class__ = serializer_class - return serializer_class(self.instance, data, context=self.context, - partial=self.partial).to_internal_value(data) + return serializer_class( + self.instance, data, context=self.context, partial=self.partial + ).to_internal_value(data) diff --git a/rest_framework_json_api/settings.py b/rest_framework_json_api/settings.py index 1385630c..0384894c 100644 --- a/rest_framework_json_api/settings.py +++ b/rest_framework_json_api/settings.py @@ -7,13 +7,13 @@ from django.conf import settings from django.core.signals import setting_changed -JSON_API_SETTINGS_PREFIX = 'JSON_API_' +JSON_API_SETTINGS_PREFIX = "JSON_API_" DEFAULTS = { - 'FORMAT_FIELD_NAMES': False, - 'FORMAT_TYPES': False, - 'PLURALIZE_TYPES': False, - 'UNIFORM_EXCEPTIONS': False, + "FORMAT_FIELD_NAMES": False, + "FORMAT_TYPES": False, + "PLURALIZE_TYPES": False, + "UNIFORM_EXCEPTIONS": False, } @@ -31,7 +31,9 @@ def __getattr__(self, attr): if attr not in self.defaults: raise AttributeError("Invalid JSON API setting: '%s'" % attr) - value = getattr(self.user_settings, JSON_API_SETTINGS_PREFIX + attr, self.defaults[attr]) + value = getattr( + self.user_settings, JSON_API_SETTINGS_PREFIX + attr, self.defaults[attr] + ) # Cache the result setattr(self, attr, value) @@ -42,9 +44,9 @@ def __getattr__(self, attr): def reload_json_api_settings(*args, **kwargs): - django_setting = kwargs['setting'] - setting = django_setting.replace(JSON_API_SETTINGS_PREFIX, '') - value = kwargs['value'] + django_setting = kwargs["setting"] + setting = django_setting.replace(JSON_API_SETTINGS_PREFIX, "") + value = kwargs["value"] if setting in DEFAULTS.keys(): if value is not None: setattr(json_api_settings, setting, value) diff --git a/rest_framework_json_api/utils.py b/rest_framework_json_api/utils.py index b99de91a..41683303 100644 --- a/rest_framework_json_api/utils.py +++ b/rest_framework_json_api/utils.py @@ -8,7 +8,7 @@ from django.db.models import Manager from django.db.models.fields.related_descriptors import ( ManyToManyDescriptor, - ReverseManyToOneDescriptor + ReverseManyToOneDescriptor, ) from django.http import Http404 from django.utils import encoding @@ -20,7 +20,7 @@ from .settings import json_api_settings # Generic relation descriptor from django.contrib.contenttypes. -if 'django.contrib.contenttypes' not in settings.INSTALLED_APPS: # pragma: no cover +if "django.contrib.contenttypes" not in settings.INSTALLED_APPS: # pragma: no cover # Target application does not use contenttypes. Importing would cause errors. ReverseGenericManyToOneDescriptor = object() else: @@ -32,7 +32,8 @@ def get_resource_name(context, expand_polymorphic_types=False): Return the name of a resource. """ from rest_framework_json_api.serializers import PolymorphicModelSerializer - view = context.get('view') + + view = context.get("view") # Sanity check to make sure we have a view. if not view: @@ -45,18 +46,20 @@ def get_resource_name(context, expand_polymorphic_types=False): except (AttributeError, ValueError): pass else: - if code.startswith('4') or code.startswith('5'): - return 'errors' + if code.startswith("4") or code.startswith("5"): + return "errors" try: - resource_name = getattr(view, 'resource_name') + resource_name = getattr(view, "resource_name") except AttributeError: try: - if 'kwargs' in context and 'related_field' in context['kwargs']: + if "kwargs" in context and "related_field" in context["kwargs"]: serializer = view.get_related_serializer_class() else: serializer = view.get_serializer_class() - if expand_polymorphic_types and issubclass(serializer, PolymorphicModelSerializer): + if expand_polymorphic_types and issubclass( + serializer, PolymorphicModelSerializer + ): return serializer.get_polymorphic_types() else: return get_resource_type_from_serializer(serializer) @@ -78,15 +81,15 @@ def get_resource_name(context, expand_polymorphic_types=False): def get_serializer_fields(serializer): fields = None - if hasattr(serializer, 'child'): - fields = getattr(serializer.child, 'fields') - meta = getattr(serializer.child, 'Meta', None) - if hasattr(serializer, 'fields'): - fields = getattr(serializer, 'fields') - meta = getattr(serializer, 'Meta', None) + if hasattr(serializer, "child"): + fields = getattr(serializer.child, "fields") + meta = getattr(serializer.child, "Meta", None) + if hasattr(serializer, "fields"): + fields = getattr(serializer, "fields") + meta = getattr(serializer, "Meta", None) if fields is not None: - meta_fields = getattr(meta, 'meta_fields', {}) + meta_fields = getattr(meta, "meta_fields", {}) for field in meta_fields: try: fields.pop(field) @@ -118,15 +121,15 @@ def format_field_names(obj, format_type=None): def format_value(value, format_type=None): if format_type is None: format_type = json_api_settings.FORMAT_FIELD_NAMES - if format_type == 'dasherize': + if format_type == "dasherize": # inflection can't dasherize camelCase value = inflection.underscore(value) value = inflection.dasherize(value) - elif format_type == 'camelize': + elif format_type == "camelize": value = inflection.camelize(value, False) - elif format_type == 'capitalize': + elif format_type == "capitalize": value = inflection.camelize(value) - elif format_type == 'underscore': + elif format_type == "underscore": value = inflection.underscore(value) return value @@ -147,22 +150,24 @@ def format_resource_type(value, format_type=None, pluralize=None): def get_related_resource_type(relation): from rest_framework_json_api.serializers import PolymorphicModelSerializer + try: return get_resource_type_from_serializer(relation) except AttributeError: pass relation_model = None - if hasattr(relation, '_meta'): + if hasattr(relation, "_meta"): relation_model = relation._meta.model - elif hasattr(relation, 'model'): + elif hasattr(relation, "model"): # the model type was explicitly passed as a kwarg to ResourceRelatedField relation_model = relation.model - elif hasattr(relation, 'get_queryset') and relation.get_queryset() is not None: + elif hasattr(relation, "get_queryset") and relation.get_queryset() is not None: relation_model = relation.get_queryset().model elif ( - getattr(relation, 'many', False) and - hasattr(relation.child, 'Meta') and - hasattr(relation.child.Meta, 'model')): + getattr(relation, "many", False) + and hasattr(relation.child, "Meta") + and hasattr(relation.child.Meta, "model") + ): # For ManyToMany relationships, get the model from the child # serializer of the list serializer relation_model = relation.child.Meta.model @@ -171,20 +176,25 @@ def get_related_resource_type(relation): parent_model = None if isinstance(parent_serializer, PolymorphicModelSerializer): parent_model = parent_serializer.get_polymorphic_serializer_for_instance( - parent_serializer.instance).Meta.model - elif hasattr(parent_serializer, 'Meta'): - parent_model = getattr(parent_serializer.Meta, 'model', None) - elif hasattr(parent_serializer, 'parent') and hasattr(parent_serializer.parent, 'Meta'): - parent_model = getattr(parent_serializer.parent.Meta, 'model', None) + parent_serializer.instance + ).Meta.model + elif hasattr(parent_serializer, "Meta"): + parent_model = getattr(parent_serializer.Meta, "model", None) + elif hasattr(parent_serializer, "parent") and hasattr( + parent_serializer.parent, "Meta" + ): + parent_model = getattr(parent_serializer.parent.Meta, "model", None) if parent_model is not None: if relation.source: - if relation.source != '*': + if relation.source != "*": parent_model_relation = getattr(parent_model, relation.source) else: parent_model_relation = getattr(parent_model, relation.field_name) else: - parent_model_relation = getattr(parent_model, parent_serializer.field_name) + parent_model_relation = getattr( + parent_model, parent_serializer.field_name + ) parent_model_relation_type = type(parent_model_relation) if parent_model_relation_type is ReverseManyToOneDescriptor: @@ -196,7 +206,7 @@ def get_related_resource_type(relation): relation_model = parent_model_relation.field.model elif parent_model_relation_type is ReverseGenericManyToOneDescriptor: relation_model = parent_model_relation.rel.model - elif hasattr(parent_model_relation, 'field'): + elif hasattr(parent_model_relation, "field"): try: relation_model = parent_model_relation.field.remote_field.model except AttributeError: @@ -205,17 +215,16 @@ def get_related_resource_type(relation): return get_related_resource_type(parent_model_relation) if relation_model is None: - raise APIException(_('Could not resolve resource type for relation %s' % relation)) + raise APIException( + _("Could not resolve resource type for relation %s" % relation) + ) return get_resource_type_from_model(relation_model) def get_resource_type_from_model(model): - json_api_meta = getattr(model, 'JSONAPIMeta', None) - return getattr( - json_api_meta, - 'resource_name', - format_resource_type(model.__name__)) + json_api_meta = getattr(model, "JSONAPIMeta", None) + return getattr(json_api_meta, "resource_name", format_resource_type(model.__name__)) def get_resource_type_from_queryset(qs): @@ -223,7 +232,7 @@ def get_resource_type_from_queryset(qs): def get_resource_type_from_instance(instance): - if hasattr(instance, '_meta'): + if hasattr(instance, "_meta"): return get_resource_type_from_model(instance._meta.model) @@ -232,39 +241,41 @@ def get_resource_type_from_manager(manager): def get_resource_type_from_serializer(serializer): - json_api_meta = getattr(serializer, 'JSONAPIMeta', None) - meta = getattr(serializer, 'Meta', None) - if hasattr(json_api_meta, 'resource_name'): + json_api_meta = getattr(serializer, "JSONAPIMeta", None) + meta = getattr(serializer, "Meta", None) + if hasattr(json_api_meta, "resource_name"): return json_api_meta.resource_name - elif hasattr(meta, 'resource_name'): + elif hasattr(meta, "resource_name"): return meta.resource_name - elif hasattr(meta, 'model'): + elif hasattr(meta, "model"): return get_resource_type_from_model(meta.model) raise AttributeError() def get_included_resources(request, serializer=None): """ Build a list of included resources. """ - include_resources_param = request.query_params.get('include') if request else None + include_resources_param = request.query_params.get("include") if request else None if include_resources_param: - return include_resources_param.split(',') + return include_resources_param.split(",") else: return get_default_included_resources_from_serializer(serializer) def get_default_included_resources_from_serializer(serializer): - meta = getattr(serializer, 'JSONAPIMeta', None) - if meta is None and getattr(serializer, 'many', False): - meta = getattr(serializer.child, 'JSONAPIMeta', None) - return list(getattr(meta, 'included_resources', [])) + meta = getattr(serializer, "JSONAPIMeta", None) + if meta is None and getattr(serializer, "many", False): + meta = getattr(serializer.child, "JSONAPIMeta", None) + return list(getattr(meta, "included_resources", [])) def get_included_serializers(serializer): - included_serializers = copy.copy(getattr(serializer, 'included_serializers', dict())) + included_serializers = copy.copy( + getattr(serializer, "included_serializers", dict()) + ) for name, value in iter(included_serializers.items()): if not isinstance(value, type): - if value == 'self': + if value == "self": included_serializers[name] = ( serializer if isinstance(serializer, type) else serializer.__class__ ) @@ -281,7 +292,7 @@ def get_relation_instance(resource_instance, source, serializer): # if the field is not defined on the model then we check the serializer # and if no value is there we skip over the field completely serializer_method = getattr(serializer, source, None) - if serializer_method and hasattr(serializer_method, '__call__'): + if serializer_method and hasattr(serializer_method, "__call__"): relation_instance = serializer_method(resource_instance) else: return False, None @@ -315,12 +326,12 @@ def format_drf_errors(response, context, exc): # handle generic errors. ValidationError('test') in a view for example if isinstance(response.data, list): for message in response.data: - errors.extend(format_error_object(message, '/data', response)) + errors.extend(format_error_object(message, "/data", response)) # handle all errors thrown from serializers else: for field, error in response.data.items(): field = format_value(field) - pointer = '/data/attributes/{}'.format(field) + pointer = "/data/attributes/{}".format(field) if isinstance(exc, Http404) and isinstance(error, str): # 404 errors don't have a pointer errors.extend(format_error_object(error, None, response)) @@ -328,14 +339,14 @@ def format_drf_errors(response, context, exc): classes = inspect.getmembers(exceptions, inspect.isclass) # DRF sets the `field` to 'detail' for its own exceptions if isinstance(exc, tuple(x[1] for x in classes)): - pointer = '/data' + pointer = "/data" errors.extend(format_error_object(error, pointer, response)) elif isinstance(error, list): errors.extend(format_error_object(error, pointer, response)) else: errors.extend(format_error_object(error, pointer, response)) - context['view'].resource_name = 'errors' + context["view"].resource_name = "errors" response.data = errors return response @@ -347,41 +358,46 @@ def format_error_object(message, pointer, response): # as there is no required field in error object we check that all fields are string # except links and source which might be a dict - is_custom_error = all([ - isinstance(value, str) - for key, value in message.items() if key not in ['links', 'source'] - ]) + is_custom_error = all( + [ + isinstance(value, str) + for key, value in message.items() + if key not in ["links", "source"] + ] + ) if is_custom_error: - if 'source' not in message: - message['source'] = {} - message['source'] = { - 'pointer': pointer, + if "source" not in message: + message["source"] = {} + message["source"] = { + "pointer": pointer, } errors.append(message) else: for k, v in message.items(): - errors.extend(format_error_object(v, pointer + '/{}'.format(k), response)) + errors.extend( + format_error_object(v, pointer + "/{}".format(k), response) + ) elif isinstance(message, list): for num, error in enumerate(message): if isinstance(error, (list, dict)): - new_pointer = pointer + '/{}'.format(num) + new_pointer = pointer + "/{}".format(num) else: new_pointer = pointer if error: errors.extend(format_error_object(error, new_pointer, response)) else: error_obj = { - 'detail': message, - 'status': encoding.force_str(response.status_code), + "detail": message, + "status": encoding.force_str(response.status_code), } if pointer is not None: - error_obj['source'] = { - 'pointer': pointer, + error_obj["source"] = { + "pointer": pointer, } code = getattr(message, "code", None) if code is not None: - error_obj['code'] = code + error_obj["code"] = code errors.append(error_obj) return errors @@ -389,5 +405,5 @@ def format_error_object(message, pointer, response): def format_errors(data): if len(data) > 1 and isinstance(data, list): - data.sort(key=lambda x: x.get('source', {}).get('pointer', '')) - return {'errors': data} + data.sort(key=lambda x: x.get("source", {}).get("pointer", "")) + return {"errors": data} diff --git a/rest_framework_json_api/views.py b/rest_framework_json_api/views.py index 7c874e7a..7a558cbf 100644 --- a/rest_framework_json_api/views.py +++ b/rest_framework_json_api/views.py @@ -6,7 +6,7 @@ ForwardManyToOneDescriptor, ManyToManyDescriptor, ReverseManyToOneDescriptor, - ReverseOneToOneDescriptor + ReverseOneToOneDescriptor, ) from django.db.models.manager import Manager from django.db.models.query import QuerySet @@ -26,7 +26,7 @@ Hyperlink, OrderedDict, get_included_resources, - get_resource_type_from_instance + get_resource_type_from_instance, ) @@ -54,16 +54,16 @@ class MyViewSet(viewsets.ModelViewSet): """ def get_select_related(self, include): - return getattr(self, 'select_for_includes', {}).get(include, None) + return getattr(self, "select_for_includes", {}).get(include, None) def get_prefetch_related(self, include): - return getattr(self, 'prefetch_for_includes', {}).get(include, None) + return getattr(self, "prefetch_for_includes", {}).get(include, None) def get_queryset(self, *args, **kwargs): qs = super(PreloadIncludesMixin, self).get_queryset(*args, **kwargs) included_resources = get_included_resources(self.request) - for included in included_resources + ['__all__']: + for included in included_resources + ["__all__"]: select_related = self.get_select_related(included) if select_related is not None: @@ -83,10 +83,10 @@ def get_queryset(self, *args, **kwargs): included_resources = get_included_resources(self.request) - for included in included_resources + ['__all__']: + for included in included_resources + ["__all__"]: # If include was not defined, trying to resolve it automatically included_model = None - levels = included.split('.') + levels = included.split(".") level_model = qs.model for level in levels: if not hasattr(level_model, level): @@ -94,11 +94,11 @@ def get_queryset(self, *args, **kwargs): field = getattr(level_model, level) field_class = field.__class__ - is_forward_relation = ( - issubclass(field_class, (ForwardManyToOneDescriptor, ManyToManyDescriptor)) + is_forward_relation = issubclass( + field_class, (ForwardManyToOneDescriptor, ManyToManyDescriptor) ) - is_reverse_relation = ( - issubclass(field_class, (ReverseManyToOneDescriptor, ReverseOneToOneDescriptor)) + is_reverse_relation = issubclass( + field_class, (ReverseManyToOneDescriptor, ReverseOneToOneDescriptor) ) if not (is_forward_relation or is_reverse_relation): break @@ -118,7 +118,7 @@ def get_queryset(self, *args, **kwargs): level_model = model_field.model if included_model is not None: - qs = qs.prefetch_related(included.replace('.', '__')) + qs = qs.prefetch_related(included.replace(".", "__")) return qs @@ -132,7 +132,7 @@ def retrieve_related(self, request, *args, **kwargs): serializer_kwargs = {} instance = self.get_related_instance() - if hasattr(instance, 'all'): + if hasattr(instance, "all"): instance = instance.all() if callable(instance): @@ -142,36 +142,41 @@ def retrieve_related(self, request, *args, **kwargs): return Response(data=None) if isinstance(instance, Iterable): - serializer_kwargs['many'] = True + serializer_kwargs["many"] = True serializer = self.get_related_serializer(instance, **serializer_kwargs) return Response(serializer.data) def get_related_serializer(self, instance, **kwargs): serializer_class = self.get_related_serializer_class() - kwargs.setdefault('context', self.get_serializer_context()) + kwargs.setdefault("context", self.get_serializer_context()) return serializer_class(instance, **kwargs) def get_related_serializer_class(self): parent_serializer_class = super(RelatedMixin, self).get_serializer_class() - if 'related_field' in self.kwargs: - field_name = self.kwargs['related_field'] + if "related_field" in self.kwargs: + field_name = self.kwargs["related_field"] # Try get the class from related_serializers - if hasattr(parent_serializer_class, 'related_serializers'): - _class = parent_serializer_class.related_serializers.get(field_name, None) + if hasattr(parent_serializer_class, "related_serializers"): + _class = parent_serializer_class.related_serializers.get( + field_name, None + ) if _class is None: raise NotFound - elif hasattr(parent_serializer_class, 'included_serializers'): - _class = parent_serializer_class.included_serializers.get(field_name, None) + elif hasattr(parent_serializer_class, "included_serializers"): + _class = parent_serializer_class.included_serializers.get( + field_name, None + ) if _class is None: raise NotFound else: - assert False, \ - 'Either "included_serializers" or "related_serializers" should be configured' + assert ( + False + ), 'Either "included_serializers" or "related_serializers" should be configured' if not isinstance(_class, type): return import_class_from_dotted_path(_class) @@ -180,7 +185,7 @@ def get_related_serializer_class(self): return parent_serializer_class def get_related_field_name(self): - return self.kwargs['related_field'] + return self.kwargs["related_field"] def get_related_instance(self): parent_obj = self.get_object() @@ -206,17 +211,16 @@ def get_related_instance(self): raise NotFound -class ModelViewSet(AutoPrefetchMixin, - PreloadIncludesMixin, - RelatedMixin, - viewsets.ModelViewSet): - http_method_names = ['get', 'post', 'patch', 'delete', 'head', 'options'] +class ModelViewSet( + AutoPrefetchMixin, PreloadIncludesMixin, RelatedMixin, viewsets.ModelViewSet +): + http_method_names = ["get", "post", "patch", "delete", "head", "options"] -class ReadOnlyModelViewSet(AutoPrefetchMixin, - RelatedMixin, - viewsets.ReadOnlyModelViewSet): - http_method_names = ['get', 'post', 'patch', 'delete', 'head', 'options'] +class ReadOnlyModelViewSet( + AutoPrefetchMixin, RelatedMixin, viewsets.ReadOnlyModelViewSet +): + http_method_names = ["get", "post", "patch", "delete", "head", "options"] class RelationshipView(generics.GenericAPIView): @@ -224,10 +228,10 @@ class RelationshipView(generics.GenericAPIView): self_link_view_name = None related_link_view_name = None field_name_mapping = {} - http_method_names = ['get', 'post', 'patch', 'delete', 'head', 'options'] + http_method_names = ["get", "post", "patch", "delete", "head", "options"] def get_serializer_class(self): - if getattr(self, 'action', False) is None: + if getattr(self, "action", False) is None: return Serializer return self.serializer_class @@ -255,10 +259,10 @@ def get_url(self, name, view_name, kwargs, request): url = self.reverse(view_name, kwargs=kwargs, request=request) except NoReverseMatch: msg = ( - 'Could not resolve URL for hyperlinked relationship using ' + "Could not resolve URL for hyperlinked relationship using " 'view name "%s". You may have failed to include the related ' - 'model in your API, or incorrectly configured the ' - '`lookup_field` attribute on this field.' + "model in your API, or incorrectly configured the " + "`lookup_field` attribute on this field." ) raise ImproperlyConfigured(msg % view_name) @@ -269,15 +273,17 @@ def get_url(self, name, view_name, kwargs, request): def get_links(self): return_data = OrderedDict() - self_link = self.get_url('self', self.self_link_view_name, self.kwargs, self.request) + self_link = self.get_url( + "self", self.self_link_view_name, self.kwargs, self.request + ) related_kwargs = {self.lookup_field: self.kwargs.get(self.lookup_field)} related_link = self.get_url( - 'related', self.related_link_view_name, related_kwargs, self.request + "related", self.related_link_view_name, related_kwargs, self.request ) if self_link: - return_data.update({'self': self_link}) + return_data.update({"self": self_link}) if related_link: - return_data.update({'related': related_link}) + return_data.update({"related": related_link}) return return_data def get(self, request, *args, **kwargs): @@ -292,7 +298,7 @@ def remove_relationships(self, instance_manager, field): for obj in instance_manager.all(): setattr(obj, field_object.name, None) obj.save() - elif hasattr(instance_manager, 'clear'): + elif hasattr(instance_manager, "clear"): instance_manager.clear() else: instance_manager.all().delete() @@ -313,25 +319,33 @@ def patch(self, request, *args, **kwargs): # for to one if hasattr(related_instance_or_manager, "field"): related_instance_or_manager = self.remove_relationships( - instance_manager=related_instance_or_manager, field="field") + instance_manager=related_instance_or_manager, field="field" + ) # for to many else: related_instance_or_manager = self.remove_relationships( - instance_manager=related_instance_or_manager, field="target_field") + instance_manager=related_instance_or_manager, field="target_field" + ) # have to set bulk to False since data isn't saved yet class_name = related_instance_or_manager.__class__.__name__ - if class_name != 'ManyRelatedManager': + if class_name != "ManyRelatedManager": related_instance_or_manager.add(*serializer.validated_data, bulk=False) else: related_instance_or_manager.add(*serializer.validated_data) else: related_model_class = related_instance_or_manager.__class__ - serializer = self.get_serializer(data=request.data, model_class=related_model_class) + serializer = self.get_serializer( + data=request.data, model_class=related_model_class + ) serializer.is_valid(raise_exception=True) - setattr(parent_obj, self.get_related_field_name(), serializer.validated_data) + setattr( + parent_obj, self.get_related_field_name(), serializer.validated_data + ) parent_obj.save() - related_instance_or_manager = self.get_related_instance() # Refresh instance + related_instance_or_manager = ( + self.get_related_instance() + ) # Refresh instance result_serializer = self._instantiate_serializer(related_instance_or_manager) return Response(result_serializer.data) @@ -344,11 +358,13 @@ def post(self, request, *args, **kwargs): data=request.data, model_class=related_model_class, many=True ) serializer.is_valid(raise_exception=True) - if frozenset(serializer.validated_data) <= frozenset(related_instance_or_manager.all()): + if frozenset(serializer.validated_data) <= frozenset( + related_instance_or_manager.all() + ): return Response(status=204) related_instance_or_manager.add(*serializer.validated_data) else: - raise MethodNotAllowed('POST') + raise MethodNotAllowed("POST") result_serializer = self._instantiate_serializer(related_instance_or_manager) return Response(result_serializer.data) @@ -368,11 +384,11 @@ def delete(self, request, *args, **kwargs): related_instance_or_manager.remove(*serializer.validated_data) except AttributeError: raise Conflict( - 'This object cannot be removed from this relationship without being ' - 'added to another' + "This object cannot be removed from this relationship without being " + "added to another" ) else: - raise MethodNotAllowed('DELETE') + raise MethodNotAllowed("DELETE") result_serializer = self._instantiate_serializer(related_instance_or_manager) return Response(result_serializer.data) @@ -383,7 +399,7 @@ def get_related_instance(self): raise NotFound def get_related_field_name(self): - field_name = self.kwargs['related_field'] + field_name = self.kwargs["related_field"] if field_name in self.field_name_mapping: return self.field_name_mapping[field_name] return field_name @@ -398,7 +414,7 @@ def _instantiate_serializer(self, instance): return self.get_serializer(instance=instance, many=True) def get_resource_name(self): - if not hasattr(self, '_resource_name'): + if not hasattr(self, "_resource_name"): instance = getattr(self.get_object(), self.get_related_field_name()) self._resource_name = get_resource_type_from_instance(instance) return self._resource_name diff --git a/setup.py b/setup.py index 3bf1c728..a076a7a5 100755 --- a/setup.py +++ b/setup.py @@ -7,15 +7,15 @@ from setuptools import setup -needs_wheel = {'bdist_wheel'}.intersection(sys.argv) -wheel = ['wheel'] if needs_wheel else [] +needs_wheel = {"bdist_wheel"}.intersection(sys.argv) +wheel = ["wheel"] if needs_wheel else [] def read(*paths): """ Build a file path from paths and return the contents. """ - with open(os.path.join(*paths), 'r') as f: + with open(os.path.join(*paths), "r") as f: return f.read() @@ -23,7 +23,7 @@ def get_version(package): """ Return package version as listed in `__version__` in `init.py`. """ - init_py = open(os.path.join(package, '__init__.py')).read() + init_py = open(os.path.join(package, "__init__.py")).read() return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) @@ -31,9 +31,11 @@ def get_packages(package): """ Return root package and all sub-packages. """ - return [dirpath - for dirpath, dirnames, filenames in os.walk(package) - if os.path.exists(os.path.join(dirpath, '__init__.py'))] + return [ + dirpath + for dirpath, dirnames, filenames in os.walk(package) + if os.path.exists(os.path.join(dirpath, "__init__.py")) + ] def get_package_data(package): @@ -41,63 +43,67 @@ def get_package_data(package): Return all files under the root package, that are not in a package themselves. """ - walk = [(dirpath.replace(package + os.sep, '', 1), filenames) - for dirpath, dirnames, filenames in os.walk(package) - if not os.path.exists(os.path.join(dirpath, '__init__.py'))] + walk = [ + (dirpath.replace(package + os.sep, "", 1), filenames) + for dirpath, dirnames, filenames in os.walk(package) + if not os.path.exists(os.path.join(dirpath, "__init__.py")) + ] filepaths = [] for base, filenames in walk: - filepaths.extend([os.path.join(base, filename) - for filename in filenames]) + filepaths.extend([os.path.join(base, filename) for filename in filenames]) return {package: filepaths} -if sys.argv[-1] == 'publish': +if sys.argv[-1] == "publish": os.system("python setup.py sdist upload") os.system("python setup.py bdist_wheel upload") print("You probably want to also tag the version now:") - print(" git tag -a {0} -m 'version {0}'".format( - get_version('rest_framework_json_api'))) + print( + " git tag -a {0} -m 'version {0}'".format( + get_version("rest_framework_json_api") + ) + ) print(" git push --tags") sys.exit() setup( - name='djangorestframework-jsonapi', - version=get_version('rest_framework_json_api'), - url='https://github.com/django-json-api/django-rest-framework-json-api', - license='BSD', - description='A Django REST framework API adapter for the JSON API spec.', - long_description=read('README.rst'), - author='Jerel Unruh', - author_email='', - packages=get_packages('rest_framework_json_api'), - package_data=get_package_data('rest_framework_json_api'), + name="djangorestframework-jsonapi", + version=get_version("rest_framework_json_api"), + url="https://github.com/django-json-api/django-rest-framework-json-api", + license="BSD", + description="A Django REST framework API adapter for the JSON API spec.", + long_description=read("README.rst"), + author="Jerel Unruh", + author_email="", + packages=get_packages("rest_framework_json_api"), + package_data=get_package_data("rest_framework_json_api"), classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Framework :: Django', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Software Development :: Libraries :: Application Frameworks', - 'Topic :: Software Development :: Libraries :: Python Modules', + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Framework :: Django", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Software Development :: Libraries :: Application Frameworks", + "Topic :: Software Development :: Libraries :: Python Modules", ], install_requires=[ - 'inflection>=0.3.0', - 'djangorestframework>=3.12,<3.13', - 'django>=2.2,<3.2', + "inflection>=0.3.0", + "djangorestframework>=3.12,<3.13", + "django>=2.2,<3.2", ], extras_require={ - 'django-polymorphic': ['django-polymorphic>=2.0'], - 'django-filter': ['django-filter>=2.0'], - 'openapi': ['pyyaml>=5.3', 'uritemplate>=3.0.1'] + "django-polymorphic": ["django-polymorphic>=2.0"], + "django-filter": ["django-filter>=2.0"], + "openapi": ["pyyaml>=5.3", "uritemplate>=3.0.1"], }, setup_requires=wheel, python_requires=">=3.6", diff --git a/tests/models.py b/tests/models.py index e91911ce..3c3e6146 100644 --- a/tests/models.py +++ b/tests/models.py @@ -7,7 +7,7 @@ class DJAModel(models.Model): """ class Meta: - app_label = 'tests' + app_label = "tests" abstract = True @@ -23,7 +23,7 @@ class ManyToManyTarget(DJAModel): class ManyToManySource(DJAModel): name = models.CharField(max_length=100) - targets = models.ManyToManyField(ManyToManyTarget, related_name='sources') + targets = models.ManyToManyField(ManyToManyTarget, related_name="sources") # ForeignKey @@ -33,5 +33,6 @@ class ForeignKeyTarget(DJAModel): class ForeignKeySource(DJAModel): name = models.CharField(max_length=100) - target = models.ForeignKey(ForeignKeyTarget, related_name='sources', - on_delete=models.CASCADE) + target = models.ForeignKey( + ForeignKeyTarget, related_name="sources", on_delete=models.CASCADE + ) diff --git a/tests/test_utils.py b/tests/test_utils.py index 8f272e1a..657f8160 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,7 +12,7 @@ format_value, get_included_serializers, get_related_resource_type, - get_resource_name + get_resource_name, ) from tests.models import ( BasicModel, @@ -20,7 +20,7 @@ ForeignKeySource, ForeignKeyTarget, ManyToManySource, - ManyToManyTarget + ManyToManyTarget, ) @@ -28,64 +28,79 @@ def test_get_resource_name_no_view(): assert get_resource_name({}) is None -@pytest.mark.parametrize("format_type,pluralize_type,output", [ - (False, False, 'APIView'), - (False, True, 'APIViews'), - ('dasherize', False, 'api-view'), - ('dasherize', True, 'api-views'), -]) +@pytest.mark.parametrize( + "format_type,pluralize_type,output", + [ + (False, False, "APIView"), + (False, True, "APIViews"), + ("dasherize", False, "api-view"), + ("dasherize", True, "api-views"), + ], +) def test_get_resource_name_from_view(settings, format_type, pluralize_type, output): settings.JSON_API_FORMAT_TYPES = format_type settings.JSON_API_PLURALIZE_TYPES = pluralize_type view = APIView() - context = {'view': view} + context = {"view": view} assert output == get_resource_name(context) -@pytest.mark.parametrize("format_type,pluralize_type", [ - (False, False), - (False, True), - ('dasherize', False), - ('dasherize', True), -]) -def test_get_resource_name_from_view_custom_resource_name(settings, format_type, pluralize_type): +@pytest.mark.parametrize( + "format_type,pluralize_type", + [ + (False, False), + (False, True), + ("dasherize", False), + ("dasherize", True), + ], +) +def test_get_resource_name_from_view_custom_resource_name( + settings, format_type, pluralize_type +): settings.JSON_API_FORMAT_TYPES = format_type settings.JSON_API_PLURALIZE_TYPES = pluralize_type view = APIView() - view.resource_name = 'custom' - context = {'view': view} - assert 'custom' == get_resource_name(context) - - -@pytest.mark.parametrize("format_type,pluralize_type,output", [ - (False, False, 'BasicModel'), - (False, True, 'BasicModels'), - ('dasherize', False, 'basic-model'), - ('dasherize', True, 'basic-models'), -]) + view.resource_name = "custom" + context = {"view": view} + assert "custom" == get_resource_name(context) + + +@pytest.mark.parametrize( + "format_type,pluralize_type,output", + [ + (False, False, "BasicModel"), + (False, True, "BasicModels"), + ("dasherize", False, "basic-model"), + ("dasherize", True, "basic-models"), + ], +) def test_get_resource_name_from_model(settings, format_type, pluralize_type, output): settings.JSON_API_FORMAT_TYPES = format_type settings.JSON_API_PLURALIZE_TYPES = pluralize_type view = APIView() view.model = BasicModel - context = {'view': view} + context = {"view": view} assert output == get_resource_name(context) -@pytest.mark.parametrize("format_type,pluralize_type,output", [ - (False, False, 'BasicModel'), - (False, True, 'BasicModels'), - ('dasherize', False, 'basic-model'), - ('dasherize', True, 'basic-models'), -]) -def test_get_resource_name_from_model_serializer_class(settings, format_type, - pluralize_type, output): +@pytest.mark.parametrize( + "format_type,pluralize_type,output", + [ + (False, False, "BasicModel"), + (False, True, "BasicModels"), + ("dasherize", False, "basic-model"), + ("dasherize", True, "basic-models"), + ], +) +def test_get_resource_name_from_model_serializer_class( + settings, format_type, pluralize_type, output +): class BasicModelSerializer(serializers.ModelSerializer): class Meta: - fields = ('text',) + fields = ("text",) model = BasicModel settings.JSON_API_FORMAT_TYPES = format_type @@ -93,22 +108,25 @@ class Meta: view = GenericAPIView() view.serializer_class = BasicModelSerializer - context = {'view': view} + context = {"view": view} assert output == get_resource_name(context) -@pytest.mark.parametrize("format_type,pluralize_type", [ - (False, False), - (False, True), - ('dasherize', False), - ('dasherize', True), -]) -def test_get_resource_name_from_model_serializer_class_custom_resource_name(settings, - format_type, - pluralize_type): +@pytest.mark.parametrize( + "format_type,pluralize_type", + [ + (False, False), + (False, True), + ("dasherize", False), + ("dasherize", True), + ], +) +def test_get_resource_name_from_model_serializer_class_custom_resource_name( + settings, format_type, pluralize_type +): class BasicModelSerializer(serializers.ModelSerializer): class Meta: - fields = ('text',) + fields = ("text",) model = BasicModel settings.JSON_API_FORMAT_TYPES = format_type @@ -116,22 +134,27 @@ class Meta: view = GenericAPIView() view.serializer_class = BasicModelSerializer - view.serializer_class.Meta.resource_name = 'custom' + view.serializer_class.Meta.resource_name = "custom" - context = {'view': view} - assert 'custom' == get_resource_name(context) + context = {"view": view} + assert "custom" == get_resource_name(context) -@pytest.mark.parametrize("format_type,pluralize_type", [ - (False, False), - (False, True), - ('dasherize', False), - ('dasherize', True), -]) -def test_get_resource_name_from_plain_serializer_class(settings, format_type, pluralize_type): +@pytest.mark.parametrize( + "format_type,pluralize_type", + [ + (False, False), + (False, True), + ("dasherize", False), + ("dasherize", True), + ], +) +def test_get_resource_name_from_plain_serializer_class( + settings, format_type, pluralize_type +): class PlainSerializer(serializers.Serializer): class Meta: - resource_name = 'custom' + resource_name = "custom" settings.JSON_API_FORMAT_TYPES = format_type settings.JSON_API_PLURALIZE_TYPES = pluralize_type @@ -139,61 +162,76 @@ class Meta: view = GenericAPIView() view.serializer_class = PlainSerializer - context = {'view': view} - assert 'custom' == get_resource_name(context) + context = {"view": view} + assert "custom" == get_resource_name(context) -@pytest.mark.parametrize("status_code", [ - status.HTTP_400_BAD_REQUEST, - status.HTTP_403_FORBIDDEN, - status.HTTP_500_INTERNAL_SERVER_ERROR -]) +@pytest.mark.parametrize( + "status_code", + [ + status.HTTP_400_BAD_REQUEST, + status.HTTP_403_FORBIDDEN, + status.HTTP_500_INTERNAL_SERVER_ERROR, + ], +) def test_get_resource_name_with_errors(status_code): view = APIView() - context = {'view': view} + context = {"view": view} view.response = Response(status=status_code) - assert 'errors' == get_resource_name(context) + assert "errors" == get_resource_name(context) -@pytest.mark.parametrize("format_type,output", [ - ('camelize', {'fullName': {'last-name': 'a', 'first-name': 'b'}}), - ('capitalize', {'FullName': {'last-name': 'a', 'first-name': 'b'}}), - ('dasherize', {'full-name': {'last-name': 'a', 'first-name': 'b'}}), - ('underscore', {'full_name': {'last-name': 'a', 'first-name': 'b'}}), -]) +@pytest.mark.parametrize( + "format_type,output", + [ + ("camelize", {"fullName": {"last-name": "a", "first-name": "b"}}), + ("capitalize", {"FullName": {"last-name": "a", "first-name": "b"}}), + ("dasherize", {"full-name": {"last-name": "a", "first-name": "b"}}), + ("underscore", {"full_name": {"last-name": "a", "first-name": "b"}}), + ], +) def test_format_field_names(settings, format_type, output): settings.JSON_API_FORMAT_FIELD_NAMES = format_type - value = {'full_name': {'last-name': 'a', 'first-name': 'b'}} + value = {"full_name": {"last-name": "a", "first-name": "b"}} assert format_field_names(value, format_type) == output -@pytest.mark.parametrize("format_type,output", [ - (None, 'first_name'), - ('camelize', 'firstName'), - ('capitalize', 'FirstName'), - ('dasherize', 'first-name'), - ('underscore', 'first_name') -]) +@pytest.mark.parametrize( + "format_type,output", + [ + (None, "first_name"), + ("camelize", "firstName"), + ("capitalize", "FirstName"), + ("dasherize", "first-name"), + ("underscore", "first_name"), + ], +) def test_format_value(settings, format_type, output): - assert format_value('first_name', format_type) == output + assert format_value("first_name", format_type) == output -@pytest.mark.parametrize("resource_type,pluralize,output", [ - (None, None, 'ResourceType'), - ('camelize', False, 'resourceType'), - ('camelize', True, 'resourceTypes'), -]) +@pytest.mark.parametrize( + "resource_type,pluralize,output", + [ + (None, None, "ResourceType"), + ("camelize", False, "resourceType"), + ("camelize", True, "resourceTypes"), + ], +) def test_format_resource_type(settings, resource_type, pluralize, output): - assert format_resource_type('ResourceType', resource_type, pluralize) == output + assert format_resource_type("ResourceType", resource_type, pluralize) == output -@pytest.mark.parametrize('model_class,field,output', [ - (ManyToManySource, 'targets', 'ManyToManyTarget'), - (ManyToManyTarget, 'sources', 'ManyToManySource'), - (ForeignKeySource, 'target', 'ForeignKeyTarget'), - (ForeignKeyTarget, 'sources', 'ForeignKeySource'), -]) +@pytest.mark.parametrize( + "model_class,field,output", + [ + (ManyToManySource, "targets", "ManyToManyTarget"), + (ManyToManyTarget, "sources", "ManyToManySource"), + (ForeignKeySource, "target", "ForeignKeyTarget"), + (ForeignKeyTarget, "sources", "ForeignKeySource"), + ], +) def test_get_related_resource_type(model_class, field, output): class RelatedResourceTypeSerializer(serializers.ModelSerializer): class Meta: @@ -212,29 +250,29 @@ class Meta: def test_get_included_serializers(): class IncludedSerializersModel(DJAModel): - self = models.ForeignKey('self', on_delete=models.CASCADE) + self = models.ForeignKey("self", on_delete=models.CASCADE) target = models.ForeignKey(ManyToManyTarget, on_delete=models.CASCADE) other_target = models.ForeignKey(ManyToManyTarget, on_delete=models.CASCADE) class Meta: - app_label = 'tests' + app_label = "tests" class IncludedSerializersSerializer(serializers.ModelSerializer): included_serializers = { - 'self': 'self', - 'target': ManyToManyTargetSerializer, - 'other_target': 'tests.test_utils.ManyToManyTargetSerializer' + "self": "self", + "target": ManyToManyTargetSerializer, + "other_target": "tests.test_utils.ManyToManyTargetSerializer", } class Meta: model = IncludedSerializersModel - fields = ('self', 'other_target', 'target') + fields = ("self", "other_target", "target") included_serializers = get_included_serializers(IncludedSerializersSerializer) expected_included_serializers = { - 'self': IncludedSerializersSerializer, - 'target': ManyToManyTargetSerializer, - 'other_target': ManyToManyTargetSerializer + "self": IncludedSerializersSerializer, + "target": ManyToManyTargetSerializer, + "other_target": ManyToManyTargetSerializer, } assert included_serializers == expected_included_serializers