diff --git a/docs/usage/file_upload.rst b/docs/usage/file_upload.rst index 18718e75..cfc85df9 100644 --- a/docs/usage/file_upload.rst +++ b/docs/usage/file_upload.rst @@ -2,6 +2,7 @@ File uploads ============ GQL supports file uploads with the :ref:`aiohttp transport ` +and the :ref:`requests transport ` using the `GraphQL multipart request spec`_. .. _GraphQL multipart request spec: https://github.com/jaydenseric/graphql-multipart-request-spec @@ -18,6 +19,7 @@ In order to upload a single file, you need to: .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL') + # Or transport = RequestsHTTPTransport(url='YOUR_URL') client = Client(transport=transport) @@ -45,6 +47,7 @@ It is also possible to upload multiple files using a list. .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL') + # Or transport = RequestsHTTPTransport(url='YOUR_URL') client = Client(transport=transport) @@ -84,6 +87,9 @@ We provide methods to do that for two different uses cases: * Sending local files * Streaming downloaded files from an external URL to the GraphQL API +.. note:: + Streaming is only supported with the :ref:`aiohttp transport ` + Streaming local files ^^^^^^^^^^^^^^^^^^^^^ diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 7f9ff26a..68b4144b 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -1,15 +1,18 @@ +import io import json import logging -from typing import Any, Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Type, Union import requests from graphql import DocumentNode, ExecutionResult, print_ast from requests.adapters import HTTPAdapter, Retry from requests.auth import AuthBase from requests.cookies import RequestsCookieJar +from requests_toolbelt.multipart.encoder import MultipartEncoder from gql.transport import Transport +from ..utils import extract_files from .exceptions import ( TransportAlreadyConnected, TransportClosed, @@ -27,6 +30,8 @@ class RequestsHTTPTransport(Transport): The transport uses the requests library to send HTTP POST requests. """ + file_classes: Tuple[Type[Any], ...] = (io.IOBase,) + def __init__( self, url: str, @@ -104,6 +109,7 @@ def execute( # type: ignore operation_name: Optional[str] = None, timeout: Optional[int] = None, extra_args: Dict[str, Any] = None, + upload_files: bool = False, ) -> ExecutionResult: """Execute GraphQL query. @@ -116,6 +122,7 @@ def execute( # type: ignore Only required in multi-operation documents (Default: None). :param timeout: Specifies a default timeout for requests (Default: None). :param extra_args: additional arguments to send to the requests post method + :param upload_files: Set to True if you want to put files in the variable values :return: The result of execution. `data` is the result of executing the query, `errors` is null if no errors occurred, and is a non-empty array if an error occurred. @@ -126,21 +133,77 @@ def execute( # type: ignore query_str = print_ast(document) payload: Dict[str, Any] = {"query": query_str} - if variable_values: - payload["variables"] = variable_values + if operation_name: payload["operationName"] = operation_name - data_key = "json" if self.use_json else "data" post_args = { "headers": self.headers, "auth": self.auth, "cookies": self.cookies, "timeout": timeout or self.default_timeout, "verify": self.verify, - data_key: payload, } + if upload_files: + # If the upload_files flag is set, then we need variable_values + assert variable_values is not None + + # If we upload files, we will extract the files present in the + # variable_values dict and replace them by null values + nulled_variable_values, files = extract_files( + variables=variable_values, file_classes=self.file_classes, + ) + + # Save the nulled variable values in the payload + payload["variables"] = nulled_variable_values + + # Add the payload to the operations field + operations_str = json.dumps(payload) + log.debug("operations %s", operations_str) + + # Generate the file map + # path is nested in a list because the spec allows multiple pointers + # to the same file. But we don't support that. + # Will generate something like {"0": ["variables.file"]} + file_map = {str(i): [path] for i, path in enumerate(files)} + + # Enumerate the file streams + # Will generate something like {'0': <_io.BufferedReader ...>} + file_streams = {str(i): files[path] for i, path in enumerate(files)} + + # Add the file map field + file_map_str = json.dumps(file_map) + log.debug("file_map %s", file_map_str) + + fields = {"operations": operations_str, "map": file_map_str} + + # Add the extracted files as remaining fields + for k, v in file_streams.items(): + fields[k] = (getattr(v, "name", k), v) + + # Prepare requests http to send multipart-encoded data + data = MultipartEncoder(fields=fields) + + post_args["data"] = data + + if post_args["headers"] is None: + post_args["headers"] = {} + else: + post_args["headers"] = {**post_args["headers"]} + + post_args["headers"]["Content-Type"] = data.content_type + + else: + if variable_values: + payload["variables"] = variable_values + + if log.isEnabledFor(logging.INFO): + log.info(">>> %s", json.dumps(payload)) + + data_key = "json" if self.use_json else "data" + post_args[data_key] = payload + # Log the payload if log.isEnabledFor(logging.INFO): log.info(">>> %s", json.dumps(payload)) diff --git a/setup.py b/setup.py index 248099ab..ead75821 100644 --- a/setup.py +++ b/setup.py @@ -38,6 +38,7 @@ install_requests_requires = [ "requests>=2.23,<3", + "requests_toolbelt>=0.9.1,<1", ] install_websockets_requires = [ diff --git a/tests/test_requests.py b/tests/test_requests.py index e18875a2..d0cc7eb7 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -8,6 +8,7 @@ TransportQueryError, TransportServerError, ) +from tests.conftest import TemporaryFile # Marking all tests in this file with the requests marker pytestmark = pytest.mark.requests @@ -332,3 +333,414 @@ def test_code(): assert execution_result.extensions["key1"] == "val1" await run_sync_test(event_loop, server, test_code) + + +file_upload_server_answer = '{"data":{"success":true}}' + +file_upload_mutation_1 = """ + mutation($file: Upload!) { + uploadFile(input:{other_var:$other_var, file:$file}) { + success + } + } +""" + +file_upload_mutation_1_operations = ( + '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' + '$other_var, file: $file}) {\\n success\\n }\\n}\\n", "variables": ' + '{"file": null, "other_var": 42}}' +) + +file_upload_mutation_1_map = '{"0": ["variables.file"]}' + +file_1_content = """ +This is a test file +This file will be sent in the GraphQL mutation +""" + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload(event_loop, aiohttp_server, run_sync_test): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + + async def single_upload_handler(request): + from aiohttp import web + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_1_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_1_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", single_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + sample_transport = RequestsHTTPTransport(url=url) + + with TemporaryFile(file_1_content) as test_file: + with Client(transport=sample_transport) as session: + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(event_loop, server, test_code) + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload_additional_headers( + event_loop, aiohttp_server, run_sync_test +): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + + async def single_upload_handler(request): + from aiohttp import web + + assert request.headers["X-Auth"] == "foobar" + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_1_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_1_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", single_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + sample_transport = RequestsHTTPTransport(url=url, headers={"X-Auth": "foobar"}) + + with TemporaryFile(file_1_content) as test_file: + with Client(transport=sample_transport) as session: + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(event_loop, server, test_code) + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_binary_file_upload(event_loop, aiohttp_server, run_sync_test): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + + # This is a sample binary file content containing all possible byte values + binary_file_content = bytes(range(0, 256)) + + async def binary_upload_handler(request): + + from aiohttp import web + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_1_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_1_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_binary = await field_2.read() + assert field_2_binary == binary_file_content + + field_3 = await reader.next() + assert field_3 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", binary_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + sample_transport = RequestsHTTPTransport(url=url) + + def test_code(): + with TemporaryFile(binary_file_content) as test_file: + with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + await run_sync_test(event_loop, server, test_code) + + +file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}\\n", ' + '"variables": {"file1": null, "file2": null}}' +) + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload_two_files( + event_loop, aiohttp_server, run_sync_test +): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + + file_upload_mutation_2 = """ + mutation($file1: Upload!, $file2: Upload!) { + uploadFile(input:{file1:$file, file2:$file}) { + success + } + } + """ + + file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' + + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ + + async def handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_2_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_2_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3.name == "1" + field_3_text = await field_3.text() + assert field_3_text == file_2_content + + field_4 = await reader.next() + assert field_4 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + sample_transport = RequestsHTTPTransport(url=url) + + with TemporaryFile(file_1_content) as test_file_1: + with TemporaryFile(file_2_content) as test_file_2: + + with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_2) + + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename + + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = { + "file1": f1, + "file2": f2, + } + + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + f1.close() + f2.close() + + await run_sync_test(event_loop, server, test_code) + + +file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(input: {files: $files})' + ' {\\n success\\n }\\n}\\n", "variables": {"files": [null, null]}}' +) + + +@pytest.mark.aiohttp +@pytest.mark.asyncio +async def test_requests_file_upload_list_of_two_files( + event_loop, aiohttp_server, run_sync_test +): + from aiohttp import web + from gql.transport.requests import RequestsHTTPTransport + + file_upload_mutation_3 = """ + mutation($files: [Upload!]!) { + uploadFiles(input:{files:$files}) { + success + } + } + """ + + file_upload_mutation_3_map = ( + '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' + ) + + file_2_content = """ + This is a second test file + This file will also be sent in the GraphQL mutation + """ + + async def handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_3_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_3_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3.name == "1" + field_3_text = await field_3.text() + assert field_3_text == file_2_content + + field_4 = await reader.next() + assert field_4 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + sample_transport = RequestsHTTPTransport(url=url) + + with TemporaryFile(file_1_content) as test_file_1: + with TemporaryFile(file_2_content) as test_file_2: + with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_3) + + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename + + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = {"files": [f1, f2]} + + execution_result = session._execute( + query, variable_values=params, upload_files=True + ) + + assert execution_result.data["success"] + + f1.close() + f2.close() + + await run_sync_test(event_loop, server, test_code)