From 68585b38cb5c4d2fea3dfe013dcab0efb46e553b Mon Sep 17 00:00:00 2001 From: Leah Date: Sat, 11 Jul 2020 21:57:53 -0400 Subject: [PATCH 01/23] add file utils --- gql/utils.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/gql/utils.py b/gql/utils.py index 8f47d97d..33318eeb 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -1,5 +1,8 @@ """Utilities to manipulate several python objects.""" +import io +from typing import Dict, List, Any, Union + # From this response in Stackoverflow # http://stackoverflow.com/a/19053800/1072990 @@ -8,3 +11,27 @@ def to_camel_case(snake_str): # We capitalize the first letter of each component except the first one # with the 'title' method and join them together. return components[0] + "".join(x.title() if x else "_" for x in components[1:]) + + +def is_file_like(value: Any) -> bool: + """Check if a value represents a file like object""" + return isinstance(value, io.IOBase) + + +def is_file_like_list(value: Any) -> bool: + """Check if value is a list and if all items in the list are file-like""" + return isinstance(value, list) and all(is_file_like(item) for item in value) + + +def contains_file_like_values(value: Any) -> bool: + return is_file_like(value) or is_file_like_list(value) + + +def get_file_variables( + variables: Dict[str, Any] +) -> Dict[str, Union[io.IOBase, List[io.IOBase]]]: + return { + variable: value + for variable, value in variables.items() + if contains_file_like_values(value) + } From aabd2de9029355d56e84b3cf2c5870bc84ab5d7e Mon Sep 17 00:00:00 2001 From: Leah Date: Sat, 11 Jul 2020 21:59:49 -0400 Subject: [PATCH 02/23] initial add for file support --- gql/transport/requests.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 8eb4b2f8..bb3feb63 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -1,4 +1,5 @@ from typing import Any, Dict, Optional, Union +import json import requests from graphql import DocumentNode, ExecutionResult, print_ast @@ -8,6 +9,7 @@ from gql.transport import Transport +from .utils import get_file_variables, is_file_like, is_file_like_list from .exceptions import ( TransportAlreadyConnected, TransportClosed, @@ -120,7 +122,28 @@ def execute( # type: ignore query_str = print_ast(document) payload: Dict[str, Any] = {"query": query_str} if variable_values: - payload["variables"] = variable_values + file_variables = get_file_variables(variable_values) + if file_variables: + map_ = { + file_variable: [f"variables.{file_variable}"] + for file_variable, value in file_variables.items() + } + all_variables = { + **variable_values, + **{ + file_variable: None + for file_variable in file_variables.values() + } + } + file_payload = { + "operations": + json.dumps( + {"query": query_str, "variables": all_variables} + ), + "map": json.dumps(map_) + } + else: + payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name From f6ffdd382074bad86347765c4fc70abf1f1823ab Mon Sep 17 00:00:00 2001 From: Michael Liu Date: Fri, 17 Jul 2020 04:22:46 +0800 Subject: [PATCH 03/23] Implement file handling in aiohttp.py --- gql/transport/aiohttp.py | 29 +++++++++++++++--- gql/transport/requests.py | 46 ++++++++++++++-------------- gql/utils.py | 63 ++++++++++++++++++++++++++++++--------- 3 files changed, 97 insertions(+), 41 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index fa66e4db..e6da8ec3 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -1,5 +1,6 @@ from ssl import SSLContext from typing import Any, AsyncGenerator, Dict, Optional, Union +import json import aiohttp from aiohttp.client_exceptions import ClientResponseError @@ -15,6 +16,7 @@ TransportProtocolError, TransportServerError, ) +from ..utils import extract_files class AIOHTTPTransport(AsyncTransport): @@ -103,15 +105,34 @@ async def execute( """ query_str = print_ast(document) + + nulled_variable_values, files = extract_files(variable_values) + payload = { "query": query_str, - "variables": variable_values or {}, + "variables": nulled_variable_values or {}, "operationName": operation_name or "", } - post_args = { - "json": payload, - } + if files: + data = aiohttp.FormData() + operations_json = json.dumps(cls.prepare_json_data(query, variables, operation)) + + file_map = {str(i): [path] for i, path in enumerate(files)} # header + # path is nested in a list because the spec allows multiple pointers to the same file. + # But we don't use that. + file_streams = {str(i): files[path] for i, path in enumerate(files)} # payload + + data.add_field('operations', operations_json, content_type='application/json') + data.add_field('map', json.dumps(file_map), content_type='application/json') + data.add_fields(*file_streams.items()) + + post_args = { "data": payload } + + else: + post_args = { "json": payload } + + # Pass post_args to aiohttp post method post_args.update(extra_args) diff --git a/gql/transport/requests.py b/gql/transport/requests.py index bb3feb63..d38e2f5b 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -9,7 +9,7 @@ from gql.transport import Transport -from .utils import get_file_variables, is_file_like, is_file_like_list +# from ..utils import get_file_variables, is_file_like, is_file_like_list from .exceptions import ( TransportAlreadyConnected, TransportClosed, @@ -122,28 +122,28 @@ def execute( # type: ignore query_str = print_ast(document) payload: Dict[str, Any] = {"query": query_str} if variable_values: - file_variables = get_file_variables(variable_values) - if file_variables: - map_ = { - file_variable: [f"variables.{file_variable}"] - for file_variable, value in file_variables.items() - } - all_variables = { - **variable_values, - **{ - file_variable: None - for file_variable in file_variables.values() - } - } - file_payload = { - "operations": - json.dumps( - {"query": query_str, "variables": all_variables} - ), - "map": json.dumps(map_) - } - else: - payload["variables"] = variable_values + # file_variables = get_file_variables(variable_values) + # if file_variables: + # map_ = { + # file_variable: [f"variables.{file_variable}"] + # for file_variable, value in file_variables.items() + # } + # all_variables = { + # **variable_values, + # **{ + # file_variable: None + # for file_variable in file_variables.values() + # } + # } + # file_payload = { + # "operations": + # json.dumps( + # {"query": query_str, "variables": all_variables} + # ), + # "map": json.dumps(map_) + # } + # else: + payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name diff --git a/gql/utils.py b/gql/utils.py index 33318eeb..47203f0d 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -1,7 +1,7 @@ """Utilities to manipulate several python objects.""" import io -from typing import Dict, List, Any, Union +from typing import Dict, List, Any, Union, Tuple # From this response in Stackoverflow @@ -18,20 +18,55 @@ def is_file_like(value: Any) -> bool: return isinstance(value, io.IOBase) -def is_file_like_list(value: Any) -> bool: - """Check if value is a list and if all items in the list are file-like""" - return isinstance(value, list) and all(is_file_like(item) for item in value) +# def is_file_like_list(value: Any) -> bool: +# """Check if value is a list and if all items in the list are file-like""" +# return isinstance(value, list) and all(is_file_like(item) for item in value) -def contains_file_like_values(value: Any) -> bool: - return is_file_like(value) or is_file_like_list(value) +# def contains_file_like_values(value: Any) -> bool: +# return is_file_like(value) or is_file_like_list(value) -def get_file_variables( - variables: Dict[str, Any] -) -> Dict[str, Union[io.IOBase, List[io.IOBase]]]: - return { - variable: value - for variable, value in variables.items() - if contains_file_like_values(value) - } +# def get_file_variables(variables: Dict[str, Any]) -> Dict[str, Union[io.IOBase, List[io.IOBase]]]: +# return { +# variable: value +# for variable, value in variables.items() +# if contains_file_like_values(value) +# } + + + +def extract_files(variables: dict) -> Tuple[dict, dict, list]: + files = {} + + def recurse_extract(path, obj): + """ + recursively traverse obj, doing a deepcopy, but + replacing any file-like objects with nulls and + shunting the originals off to the side. + """ + nonlocal files + if type(obj) is list: + nulled_obj = [] + for key, value in enumerate(obj): + value = recurse_extract(f'{path}.{key}', value) + nulled_obj.append(value) + # TODO: merge this with dict case below. somehow. + return nulled_obj + elif type(obj) is dict: + nulled_obj = {} + for key, value in obj.items(): + value = recurse_extract(f'{path}.{key}', value) + nulled_obj[key] = value + return nulled_obj + elif is_file_like(obj): + # extract obj from its parent and put it into files instead. + files[path] = obj + return None + else: + # base case: pass through unchanged + return obj + + nulled_variables = recurse_extract('variables', variables) + + return nulled_variables, files \ No newline at end of file From e637983e3a408501a1aa16127c12602b13730668 Mon Sep 17 00:00:00 2001 From: Michael Liu Date: Sat, 1 Aug 2020 05:44:41 +0800 Subject: [PATCH 04/23] Fix JSON serialization, remove comments, conform to double quotes --- gql/transport/aiohttp.py | 10 +++++----- gql/transport/requests.py | 21 --------------------- gql/utils.py | 24 +++--------------------- 3 files changed, 8 insertions(+), 47 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index e6da8ec3..70403f6f 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -110,21 +110,21 @@ async def execute( payload = { "query": query_str, - "variables": nulled_variable_values or {}, + "variables": json.dumps(nulled_variable_values) or "{}", "operationName": operation_name or "", } if files: data = aiohttp.FormData() - operations_json = json.dumps(cls.prepare_json_data(query, variables, operation)) + operations_json = json.dumps(payload) file_map = {str(i): [path] for i, path in enumerate(files)} # header # path is nested in a list because the spec allows multiple pointers to the same file. # But we don't use that. - file_streams = {str(i): files[path] for i, path in enumerate(files)} # payload + file_streams = {i: files[path] for i, path in enumerate(files)} # payload - data.add_field('operations', operations_json, content_type='application/json') - data.add_field('map', json.dumps(file_map), content_type='application/json') + data.add_field("operations", operations_json, content_type="application/json") + data.add_field("map", json.dumps(file_map), content_type="application/json") data.add_fields(*file_streams.items()) post_args = { "data": payload } diff --git a/gql/transport/requests.py b/gql/transport/requests.py index d38e2f5b..59c7ac48 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -122,27 +122,6 @@ def execute( # type: ignore query_str = print_ast(document) payload: Dict[str, Any] = {"query": query_str} if variable_values: - # file_variables = get_file_variables(variable_values) - # if file_variables: - # map_ = { - # file_variable: [f"variables.{file_variable}"] - # for file_variable, value in file_variables.items() - # } - # all_variables = { - # **variable_values, - # **{ - # file_variable: None - # for file_variable in file_variables.values() - # } - # } - # file_payload = { - # "operations": - # json.dumps( - # {"query": query_str, "variables": all_variables} - # ), - # "map": json.dumps(map_) - # } - # else: payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name diff --git a/gql/utils.py b/gql/utils.py index 47203f0d..6e6decf5 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -18,24 +18,6 @@ def is_file_like(value: Any) -> bool: return isinstance(value, io.IOBase) -# def is_file_like_list(value: Any) -> bool: -# """Check if value is a list and if all items in the list are file-like""" -# return isinstance(value, list) and all(is_file_like(item) for item in value) - - -# def contains_file_like_values(value: Any) -> bool: -# return is_file_like(value) or is_file_like_list(value) - - -# def get_file_variables(variables: Dict[str, Any]) -> Dict[str, Union[io.IOBase, List[io.IOBase]]]: -# return { -# variable: value -# for variable, value in variables.items() -# if contains_file_like_values(value) -# } - - - def extract_files(variables: dict) -> Tuple[dict, dict, list]: files = {} @@ -49,14 +31,14 @@ def recurse_extract(path, obj): if type(obj) is list: nulled_obj = [] for key, value in enumerate(obj): - value = recurse_extract(f'{path}.{key}', value) + value = recurse_extract(f"{path}.{key}", value) nulled_obj.append(value) # TODO: merge this with dict case below. somehow. return nulled_obj elif type(obj) is dict: nulled_obj = {} for key, value in obj.items(): - value = recurse_extract(f'{path}.{key}', value) + value = recurse_extract(f"{path}.{key}", value) nulled_obj[key] = value return nulled_obj elif is_file_like(obj): @@ -67,6 +49,6 @@ def recurse_extract(path, obj): # base case: pass through unchanged return obj - nulled_variables = recurse_extract('variables', variables) + nulled_variables = recurse_extract("variables", variables) return nulled_variables, files \ No newline at end of file From 9e86b03d9b4d152e40b7181ae3e316213d5f6d34 Mon Sep 17 00:00:00 2001 From: Michael Liu Date: Sat, 1 Aug 2020 06:33:37 +0800 Subject: [PATCH 05/23] Fix more JSON serialization --- gql/transport/aiohttp.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index 70403f6f..de8eb19d 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -110,24 +110,23 @@ async def execute( payload = { "query": query_str, - "variables": json.dumps(nulled_variable_values) or "{}", + "variables": nulled_variable_values or {}, "operationName": operation_name or "", } if files: data = aiohttp.FormData() - operations_json = json.dumps(payload) file_map = {str(i): [path] for i, path in enumerate(files)} # header # path is nested in a list because the spec allows multiple pointers to the same file. # But we don't use that. - file_streams = {i: files[path] for i, path in enumerate(files)} # payload + file_streams = {str(i): files[path] for i, path in enumerate(files)} # payload - data.add_field("operations", operations_json, content_type="application/json") + data.add_field("operations", json.dumps(payload), content_type="application/json") data.add_field("map", json.dumps(file_map), content_type="application/json") data.add_fields(*file_streams.items()) - post_args = { "data": payload } + post_args = { "data": data } else: post_args = { "json": payload } From c2e38dc1ad18a95805a0e3d3fa2e646d3c195798 Mon Sep 17 00:00:00 2001 From: Michael Liu Date: Sat, 1 Aug 2020 06:58:02 +0800 Subject: [PATCH 06/23] Cleanup --- gql/transport/aiohttp.py | 2 +- gql/transport/requests.py | 2 -- gql/utils.py | 11 +++++------ 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index de8eb19d..374ad358 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -1,6 +1,6 @@ +import json from ssl import SSLContext from typing import Any, AsyncGenerator, Dict, Optional, Union -import json import aiohttp from aiohttp.client_exceptions import ClientResponseError diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 59c7ac48..8eb4b2f8 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -1,5 +1,4 @@ from typing import Any, Dict, Optional, Union -import json import requests from graphql import DocumentNode, ExecutionResult, print_ast @@ -9,7 +8,6 @@ from gql.transport import Transport -# from ..utils import get_file_variables, is_file_like, is_file_like_list from .exceptions import ( TransportAlreadyConnected, TransportClosed, diff --git a/gql/utils.py b/gql/utils.py index 6e6decf5..591b92a2 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -1,7 +1,7 @@ """Utilities to manipulate several python objects.""" import io -from typing import Dict, List, Any, Union, Tuple +from typing import Dict, Any, Tuple # From this response in Stackoverflow @@ -18,7 +18,7 @@ def is_file_like(value: Any) -> bool: return isinstance(value, io.IOBase) -def extract_files(variables: dict) -> Tuple[dict, dict, list]: +def extract_files(variables: Dict) -> Tuple[Dict, Dict]: files = {} def recurse_extract(path, obj): @@ -28,14 +28,13 @@ def recurse_extract(path, obj): shunting the originals off to the side. """ nonlocal files - if type(obj) is list: + if isinstance(obj, list): nulled_obj = [] for key, value in enumerate(obj): value = recurse_extract(f"{path}.{key}", value) nulled_obj.append(value) - # TODO: merge this with dict case below. somehow. return nulled_obj - elif type(obj) is dict: + elif isinstance(obj, dict): nulled_obj = {} for key, value in obj.items(): value = recurse_extract(f"{path}.{key}", value) @@ -51,4 +50,4 @@ def recurse_extract(path, obj): nulled_variables = recurse_extract("variables", variables) - return nulled_variables, files \ No newline at end of file + return nulled_variables, files From 8839141d59b0666935ee75658b13ba0520a42fea Mon Sep 17 00:00:00 2001 From: Michael Liu Date: Sat, 1 Aug 2020 07:34:13 +0800 Subject: [PATCH 07/23] Blackened --- gql/transport/aiohttp.py | 2 +- gql/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index f8fee87b..ca394cd2 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -9,6 +9,7 @@ from aiohttp.typedefs import LooseCookies, LooseHeaders from graphql import DocumentNode, ExecutionResult, print_ast +from ..utils import extract_files from .async_transport import AsyncTransport from .exceptions import ( TransportAlreadyConnected, @@ -16,7 +17,6 @@ TransportProtocolError, TransportServerError, ) -from ..utils import extract_files class AIOHTTPTransport(AsyncTransport): diff --git a/gql/utils.py b/gql/utils.py index 591b92a2..ce0318b0 100644 --- a/gql/utils.py +++ b/gql/utils.py @@ -1,7 +1,7 @@ """Utilities to manipulate several python objects.""" import io -from typing import Dict, Any, Tuple +from typing import Any, Dict, Tuple # From this response in Stackoverflow From 194d6457c03bf68a5309b9361467250ef008a969 Mon Sep 17 00:00:00 2001 From: Manuel Bojato <30560560+KingDarBoja@users.noreply.github.com> Date: Sat, 19 Sep 2020 12:07:58 -0500 Subject: [PATCH 08/23] fix: safe check if parameters are none on aiohttp --- gql/transport/aiohttp.py | 37 +++++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index ca394cd2..7d71f3a9 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -35,7 +35,7 @@ def __init__( auth: Optional[BasicAuth] = None, ssl: Union[SSLContext, bool, Fingerprint] = False, timeout: Optional[int] = None, - client_session_args: Dict[str, Any] = {}, + client_session_args: Optional[Dict[str, Any]] = None, ) -> None: """Initialize the transport with the given aiohttp parameters. @@ -53,7 +53,6 @@ def __init__( self.ssl: Union[SSLContext, bool, Fingerprint] = ssl self.timeout: Optional[int] = timeout self.client_session_args = client_session_args - self.session: Optional[aiohttp.ClientSession] = None async def connect(self) -> None: @@ -78,7 +77,8 @@ async def connect(self) -> None: ) # Adding custom parameters passed from init - client_session_args.update(self.client_session_args) + if self.client_session_args: + client_session_args.update(self.client_session_args) # type: ignore self.session = aiohttp.ClientSession(**client_session_args) @@ -95,7 +95,7 @@ async def execute( document: DocumentNode, variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, - extra_args: Dict[str, Any] = {}, + extra_args: Dict[str, Any] = None, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server. This uses the aiohttp library to perform a HTTP POST request asynchronously @@ -106,7 +106,10 @@ async def execute( query_str = print_ast(document) - nulled_variable_values, files = extract_files(variable_values) + nulled_variable_values = None + files = None + if variable_values: + nulled_variable_values, files = extract_files(variable_values) payload: Dict[str, Any] = { "query": query_str, @@ -120,22 +123,28 @@ async def execute( if files: data = aiohttp.FormData() - file_map = {str(i): [path] for i, path in enumerate(files)} # header - # path is nested in a list because the spec allows multiple pointers to the same file. - # But we don't use that. - file_streams = {str(i): files[path] for i, path in enumerate(files)} # payload - - data.add_field("operations", json.dumps(payload), content_type="application/json") + # header + file_map = {str(i): [path] for i, path in enumerate(files)} + # path is nested in a list because the spec allows multiple pointers + # to the same file. But we don't use that. + file_streams = { + str(i): files[path] for i, path in enumerate(files) + } # payload + + data.add_field( + "operations", json.dumps(payload), content_type="application/json" + ) data.add_field("map", json.dumps(file_map), content_type="application/json") data.add_fields(*file_streams.items()) - post_args = { "data": data } + post_args = {"data": data} else: - post_args = { "json": payload } + post_args = {"json": payload} # type: ignore # Pass post_args to aiohttp post method - post_args.update(extra_args) + if extra_args: + post_args.update(extra_args) # type: ignore if self.session is None: raise TransportClosed("Transport is not connected") From 2a020adfb766301ecb6cd3b22ad0bafa29d3299d Mon Sep 17 00:00:00 2001 From: Manuel Bojato <30560560+KingDarBoja@users.noreply.github.com> Date: Sun, 20 Sep 2020 14:53:09 -0500 Subject: [PATCH 09/23] chore: generate docs with sphinx (#117) * chore: generate docs with sphinx * chore: avoid documenting private members * chore: add docs to manifiest * chore: manually document every class on transport * Write docs in sphinx rst format * fix manifest * Improve classes reference documentation Co-authored-by: Hanusz Leszek --- MANIFEST.in | 3 + Makefile | 7 +- docs/Makefile | 20 ++++++ docs/advanced/async_advanced_usage.rst | 57 +++++++++++++++ docs/advanced/dsl_module.rst | 34 +++++++++ docs/advanced/index.rst | 9 +++ docs/advanced/local_schema.rst | 25 +++++++ docs/async/async_intro.rst | 18 +++++ docs/async/async_usage.rst | 17 +++++ docs/async/index.rst | 10 +++ docs/code_examples/aiohttp_async.py | 28 ++++++++ docs/code_examples/aiohttp_sync.py | 23 ++++++ docs/code_examples/requests_sync.py | 25 +++++++ docs/code_examples/websockets_async.py | 41 +++++++++++ docs/conf.py | 77 ++++++++++++++++++++ docs/index.rst | 28 ++++++++ docs/intro.rst | 39 ++++++++++ docs/make.bat | 35 +++++++++ docs/modules/client.rst | 6 ++ docs/modules/gql.rst | 22 ++++++ docs/modules/transport.rst | 16 +++++ docs/requirements.txt | 2 + docs/transports/aiohttp.rst | 13 ++++ docs/transports/async_transports.rst | 14 ++++ docs/transports/index.rst | 13 ++++ docs/transports/phoenix.rst | 10 +++ docs/transports/requests.rst | 9 +++ docs/transports/sync_transports.rst | 12 ++++ docs/transports/websockets.rst | 71 ++++++++++++++++++ docs/usage/basic_usage.rst | 21 ++++++ docs/usage/headers.rst | 8 +++ docs/usage/index.rst | 11 +++ docs/usage/subscriptions.rst | 28 ++++++++ docs/usage/validation.rst | 43 +++++++++++ docs/usage/variables.rst | 32 +++++++++ gql/__init__.py | 9 +++ gql/client.py | 80 ++++++++++++++++++--- gql/gql.py | 11 +++ gql/transport/aiohttp.py | 43 ++++++++--- gql/transport/phoenix_channel_websockets.py | 21 ++++-- gql/transport/requests.py | 5 +- gql/transport/websockets.py | 10 +-- setup.py | 2 + tox.ini | 6 ++ 44 files changed, 982 insertions(+), 32 deletions(-) create mode 100644 docs/Makefile create mode 100644 docs/advanced/async_advanced_usage.rst create mode 100644 docs/advanced/dsl_module.rst create mode 100644 docs/advanced/index.rst create mode 100644 docs/advanced/local_schema.rst create mode 100644 docs/async/async_intro.rst create mode 100644 docs/async/async_usage.rst create mode 100644 docs/async/index.rst create mode 100644 docs/code_examples/aiohttp_async.py create mode 100644 docs/code_examples/aiohttp_sync.py create mode 100644 docs/code_examples/requests_sync.py create mode 100644 docs/code_examples/websockets_async.py create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/intro.rst create mode 100644 docs/make.bat create mode 100644 docs/modules/client.rst create mode 100644 docs/modules/gql.rst create mode 100644 docs/modules/transport.rst create mode 100644 docs/requirements.txt create mode 100644 docs/transports/aiohttp.rst create mode 100644 docs/transports/async_transports.rst create mode 100644 docs/transports/index.rst create mode 100644 docs/transports/phoenix.rst create mode 100644 docs/transports/requests.rst create mode 100644 docs/transports/sync_transports.rst create mode 100644 docs/transports/websockets.rst create mode 100644 docs/usage/basic_usage.rst create mode 100644 docs/usage/headers.rst create mode 100644 docs/usage/index.rst create mode 100644 docs/usage/subscriptions.rst create mode 100644 docs/usage/validation.rst create mode 100644 docs/usage/variables.rst diff --git a/MANIFEST.in b/MANIFEST.in index 72ce48f7..fbaa10b4 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,7 +13,10 @@ include tox.ini include scripts/gql-cli recursive-include tests *.py *.graphql *.cnf *.yaml *.pem +recursive-include docs *.txt *.rst conf.py Makefile make.bat *.jpg *.png *.gif +recursive-include docs/code_examples *.py +prune docs/_build prune gql-checker global-exclude *.py[co] __pycache__ diff --git a/Makefile b/Makefile index a425508a..27913507 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: clean tests +.PHONY: clean tests docs dev-setup: python pip install -e ".[test]" @@ -16,6 +16,10 @@ check: mypy gql tests check-manifest +docs: + rm -rf ./docs/_build + cd docs; make html + clean: find . -name "*.pyc" -delete find . -name "__pycache__" | xargs -I {} rm -rf {} @@ -26,4 +30,5 @@ clean: rm -rf ./gql.egg-info rm -rf ./dist rm -rf ./build + rm -rf ./docs/_build rm -f ./.coverage diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d4bb2cbb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/advanced/async_advanced_usage.rst b/docs/advanced/async_advanced_usage.rst new file mode 100644 index 00000000..4164cb37 --- /dev/null +++ b/docs/advanced/async_advanced_usage.rst @@ -0,0 +1,57 @@ +.. _async_advanced_usage: + +Async advanced usage +==================== + +It is possible to send multiple GraphQL queries (query, mutation or subscription) in parallel, +on the same websocket connection, using asyncio tasks. + +In order to retry in case of connection failure, we can use the great `backoff`_ module. + +.. code-block:: python + + # First define all your queries using a session argument: + + async def execute_query1(session): + result = await session.execute(query1) + print(result) + + async def execute_query2(session): + result = await session.execute(query2) + print(result) + + async def execute_subscription1(session): + async for result in session.subscribe(subscription1): + print(result) + + async def execute_subscription2(session): + async for result in session.subscribe(subscription2): + print(result) + + # Then create a couroutine which will connect to your API and run all your queries as tasks. + # We use a `backoff` decorator to reconnect using exponential backoff in case of connection failure. + + @backoff.on_exception(backoff.expo, Exception, max_time=300) + async def graphql_connection(): + + transport = WebsocketsTransport(url="wss://YOUR_URL") + + client = Client(transport=transport, fetch_schema_from_transport=True) + + async with client as session: + task1 = asyncio.create_task(execute_query1(session)) + task2 = asyncio.create_task(execute_query2(session)) + task3 = asyncio.create_task(execute_subscription1(session)) + task4 = asyncio.create_task(execute_subscription2(session)) + + await asyncio.gather(task1, task2, task3, task4) + + asyncio.run(graphql_connection()) + +Subscriptions tasks can be stopped at any time by running + +.. code-block:: python + + task.cancel() + +.. _backoff: https://github.com/litl/backoff diff --git a/docs/advanced/dsl_module.rst b/docs/advanced/dsl_module.rst new file mode 100644 index 00000000..aa6638df --- /dev/null +++ b/docs/advanced/dsl_module.rst @@ -0,0 +1,34 @@ +Compose queries dynamically +=========================== + +Instead of providing the GraphQL queries as a Python String, it is also possible to create GraphQL queries dynamically. +Using the DSL module, we can create a query using a Domain Specific Language which is created from the schema. + +.. code-block:: python + + from gql.dsl import DSLSchema + + client = Client(schema=StarWarsSchema) + ds = DSLSchema(client) + + query_dsl = ds.Query.hero.select( + ds.Character.id, + ds.Character.name, + ds.Character.friends.select(ds.Character.name,), + ) + +will create a query equivalent to: + +.. code-block:: python + + hero { + id + name + friends { + name + } + } + +.. warning:: + + Please note that the DSL module is still considered experimental in GQL 3 and is subject to changes diff --git a/docs/advanced/index.rst b/docs/advanced/index.rst new file mode 100644 index 00000000..3767112b --- /dev/null +++ b/docs/advanced/index.rst @@ -0,0 +1,9 @@ +Advanced +======== + +.. toctree:: + :maxdepth: 2 + + async_advanced_usage + local_schema + dsl_module diff --git a/docs/advanced/local_schema.rst b/docs/advanced/local_schema.rst new file mode 100644 index 00000000..1ceadb12 --- /dev/null +++ b/docs/advanced/local_schema.rst @@ -0,0 +1,25 @@ +Execution on a local schema +=========================== + +It is also possible to execute queries against a local schema (so without a transport), even +if it is not really useful except maybe for testing. + +.. code-block:: python + + from gql import gql, Client + + from .someSchema import SampleSchema + + client = Client(schema=SampleSchema) + + query = gql(''' + { + hello + } + ''') + + result = client.execute(query) + +See `tests/starwars/test_query.py`_ for an example + +.. _tests/starwars/test_query.py: https://github.com/graphql-python/gql/blob/master/tests/starwars/test_query.py diff --git a/docs/async/async_intro.rst b/docs/async/async_intro.rst new file mode 100644 index 00000000..6d4fea37 --- /dev/null +++ b/docs/async/async_intro.rst @@ -0,0 +1,18 @@ +On previous versions of GQL, the code was `sync` only , it means that when you ran +`execute` on the Client, you could do nothing else in the current Thread and had to wait for +an answer or a timeout from the backend to continue. The only http library was `requests`, allowing only sync usage. + +From the version 3 of GQL, we support `sync` and `async` :ref:`transports ` using `asyncio`_. + +With the :ref:`async transports `, there is now the possibility to execute GraphQL requests +asynchronously, :ref:`allowing to execute multiple requests in parallel if needed `. + +If you don't care or need async functionality, it is still possible, with :ref:`async transports `, +to run the `execute` or `subscribe` methods directly from the Client +(as described in the :ref:`Basic Usage ` example) and GQL will execute the request +in a synchronous manner by running an asyncio event loop itself. + +This won't work though if you already have an asyncio event loop running. In that case you should use +:ref:`Async Usage ` + +.. _asyncio: https://docs.python.org/3/library/asyncio.html diff --git a/docs/async/async_usage.rst b/docs/async/async_usage.rst new file mode 100644 index 00000000..f0183751 --- /dev/null +++ b/docs/async/async_usage.rst @@ -0,0 +1,17 @@ +.. _async_usage: + +Async Usage +=========== + +If you use an :ref:`async transport `, you can use GQL asynchronously using `asyncio`_. + +* put your code in an asyncio coroutine (method starting with :code:`async def`) +* use :code:`async with client as session:` to connect to the backend and provide a session instance +* use the :code:`await` keyword to execute requests: :code:`await session.execute(...)` +* then run your coroutine in an asyncio event loop by running :code:`asyncio.run` + +Example: + +.. literalinclude:: ../code_examples/aiohttp_async.py + +.. _asyncio: https://docs.python.org/3/library/asyncio.html diff --git a/docs/async/index.rst b/docs/async/index.rst new file mode 100644 index 00000000..3f3d2a8a --- /dev/null +++ b/docs/async/index.rst @@ -0,0 +1,10 @@ +Async vs Sync +============= + +.. include:: async_intro.rst + +.. toctree:: + :hidden: + :maxdepth: 1 + + async_usage diff --git a/docs/code_examples/aiohttp_async.py b/docs/code_examples/aiohttp_async.py new file mode 100644 index 00000000..dec11c69 --- /dev/null +++ b/docs/code_examples/aiohttp_async.py @@ -0,0 +1,28 @@ +from gql import gql, AIOHTTPTransport, Client +import asyncio + +async def main(): + + transport = AIOHTTPTransport(url='https://countries.trevorblades.com/graphql') + + # Using `async with` on the client will start a connection on the transport + # and provide a `session` variable to execute queries on this connection + async with Client( + transport=transport, + fetch_schema_from_transport=True, + ) as session: + + # Execute single query + query = gql(''' + query getContinents { + continents { + code + name + } + } + ''') + + result = await session.execute(query) + print(result) + +asyncio.run(main()) diff --git a/docs/code_examples/aiohttp_sync.py b/docs/code_examples/aiohttp_sync.py new file mode 100644 index 00000000..296d4533 --- /dev/null +++ b/docs/code_examples/aiohttp_sync.py @@ -0,0 +1,23 @@ +from gql import gql, Client, AIOHTTPTransport + +# Select your transport with a defined url endpoint +transport = AIOHTTPTransport(url="https://countries.trevorblades.com/") + +# Create a GraphQL client using the defined transport +client = Client(transport=transport, fetch_schema_from_transport=True) + +# Provide a GraphQL query +query = gql( + """ + query getContinents { + continents { + code + name + } + } +""" +) + +# Execute the query on the transport +result = client.execute(query) +print(result) diff --git a/docs/code_examples/requests_sync.py b/docs/code_examples/requests_sync.py new file mode 100644 index 00000000..eb821add --- /dev/null +++ b/docs/code_examples/requests_sync.py @@ -0,0 +1,25 @@ +from gql import gql, Client +from gql.transport.requests import RequestsHTTPTransport + +sample_transport=RequestsHTTPTransport( + url='https://countries.trevorblades.com/', + verify=True, + retries=3, +) + +client = Client( + transport=sample_transport, + fetch_schema_from_transport=True, +) + +query = gql(''' + query getContinents { + continents { + code + name + } + } +''') + +result = client.execute(query) +print(result) diff --git a/docs/code_examples/websockets_async.py b/docs/code_examples/websockets_async.py new file mode 100644 index 00000000..b91b442f --- /dev/null +++ b/docs/code_examples/websockets_async.py @@ -0,0 +1,41 @@ +import logging +logging.basicConfig(level=logging.INFO) + +from gql import gql, Client, WebsocketsTransport +import asyncio + +async def main(): + + transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql') + + # Using `async with` on the client will start a connection on the transport + # and provide a `session` variable to execute queries on this connection + async with Client( + transport=sample_transport, + fetch_schema_from_transport=True, + ) as session: + + # Execute single query + query = gql(''' + query getContinents { + continents { + code + name + } + } + ''') + result = await session.execute(query) + print(result) + + # Request subscription + subscription = gql(''' + subscription { + somethingChanged { + id + } + } + ''') + async for result in session.subscribe(subscription): + print(result) + +asyncio.run(main()) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..f1c281c1 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,77 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('./..')) + + +# -- Project information ----------------------------------------------------- + +project = 'gql 3' +copyright = '2020, graphql-python.org' +author = 'graphql-python.org' + +# The full version, including alpha/beta/rc tags +release = '3.0.0a1' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx_rtd_theme' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'gql-3-doc' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ['_static'] + +# -- AutoDoc configuration ------------------------------------------------- +# autoclass_content = "both" +autodoc_default_options = { + 'members': True, + 'inherited-members': True, + 'special-members': '__init__', + 'undoc-members': True, + 'show-inheritance': True +} +autosummary_generate = True diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..ead330e8 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,28 @@ +Welcome to GQL 3 documentation! +================================= + +.. warning:: + + Please note that the following documentation describes the current version which is currently only available + as a pre-release and needs to be installed with "`--pre`" + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + + intro + usage/index + async/index + transports/index + advanced/index + modules/gql + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/intro.rst b/docs/intro.rst new file mode 100644 index 00000000..9ef1f2db --- /dev/null +++ b/docs/intro.rst @@ -0,0 +1,39 @@ +Introduction +============ + +`GQL 3`_ is a `GraphQL`_ Client for Python 3.6+ which plays nicely with other graphql implementations compatible with the spec. + +Under the hood, it uses `GraphQL-core`_ which is a Python port of `GraphQL.js`_, +the JavaScript reference implementation for GraphQL. + +Installation +------------ + +You can install GQL 3 using pip_:: + + pip install --pre gql + +.. warning:: + + Please note that the following documentation describes the current version which is currently only available + as a pre-release and needs to be installed with "`--pre`" + +After installation, you can start using GQL by importing from the top-level +:mod:`gql` package. + +Reporting Issues and Contributing +--------------------------------- + +Please visit the `GitHub repository for gql`_ if you're interested in the current development or +want to report issues or send pull requests. + +We welcome all kinds of contributions if the coding guidelines are respected. +Please check the `Contributing`_ file to learn how to make a good pull request. + +.. _GraphQL: https://graphql.org/ +.. _GraphQL-core: https://github.com/graphql-python/graphql-core +.. _GraphQL.js: https://github.com/graphql/graphql-js +.. _GQL 3: https://github.com/graphql-python/gql +.. _pip: https://pip.pypa.io/ +.. _GitHub repository for gql: https://github.com/graphql-python/gql +.. _Contributing: https://github.com/graphql-python/gql/blob/master/CONTRIBUTING.md diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..9534b018 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules/client.rst b/docs/modules/client.rst new file mode 100644 index 00000000..954b4e61 --- /dev/null +++ b/docs/modules/client.rst @@ -0,0 +1,6 @@ +Client +====== + +.. currentmodule:: gql.client + +.. automodule:: gql.client diff --git a/docs/modules/gql.rst b/docs/modules/gql.rst new file mode 100644 index 00000000..94121ea3 --- /dev/null +++ b/docs/modules/gql.rst @@ -0,0 +1,22 @@ +Reference +========= + +.. currentmodule:: gql + +.. _top-level-functions: + +Top-Level Functions +------------------- + +.. automodule:: gql + +.. _sub-packages: + +Sub-Packages +------------ + +.. toctree:: + :maxdepth: 1 + + client + transport diff --git a/docs/modules/transport.rst b/docs/modules/transport.rst new file mode 100644 index 00000000..dd4627e0 --- /dev/null +++ b/docs/modules/transport.rst @@ -0,0 +1,16 @@ +Transport +========= + +.. currentmodule:: gql.transport + +.. autoclass:: gql.transport.transport.Transport + +.. autoclass:: gql.transport.local_schema.LocalSchemaTransport + +.. autoclass:: gql.transport.requests.RequestsHTTPTransport + +.. autoclass:: gql.transport.async_transport.AsyncTransport + +.. autoclass:: gql.transport.aiohttp.AIOHTTPTransport + +.. autoclass:: gql.transport.websockets.WebsocketsTransport diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..64431755 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +sphinx>=3.0.0,<4 +sphinx_rtd_theme>=0.4,<1 \ No newline at end of file diff --git a/docs/transports/aiohttp.rst b/docs/transports/aiohttp.rst new file mode 100644 index 00000000..cdca6f45 --- /dev/null +++ b/docs/transports/aiohttp.rst @@ -0,0 +1,13 @@ +AIOHTTPTransport +================ + +This transport uses the `aiohttp`_ library and allows you to send GraphQL queries using the HTTP protocol. + +.. note:: + + GraphQL subscriptions are not supported on the HTTP transport. + For subscriptions you should use the :ref:`websockets transport `. + +.. literalinclude:: ../code_examples/aiohttp_async.py + +.. _aiohttp: https://docs.aiohttp.org diff --git a/docs/transports/async_transports.rst b/docs/transports/async_transports.rst new file mode 100644 index 00000000..9fb1b017 --- /dev/null +++ b/docs/transports/async_transports.rst @@ -0,0 +1,14 @@ +.. _async_transports: + +Async Transports +================ + +Async transports are transports which are using an underlying async library. They allow us to +:ref:`run GraphQL queries asynchronously ` + +.. toctree:: + :maxdepth: 1 + + aiohttp + websockets + phoenix diff --git a/docs/transports/index.rst b/docs/transports/index.rst new file mode 100644 index 00000000..f291c1d0 --- /dev/null +++ b/docs/transports/index.rst @@ -0,0 +1,13 @@ +.. _transports: + +Transports +========== + +GQL Transports are used to define how the connection is made with the backend. +We have different transports for different underlying protocols (http, websockets, ...) + +.. toctree:: + :maxdepth: 2 + + async_transports + sync_transports diff --git a/docs/transports/phoenix.rst b/docs/transports/phoenix.rst new file mode 100644 index 00000000..20a86c3d --- /dev/null +++ b/docs/transports/phoenix.rst @@ -0,0 +1,10 @@ +PhoenixChannelWebsocketsTransport +================================= + +The PhoenixChannelWebsocketsTransport is an **EXPERIMENTAL** async transport which allows you +to execute queries and subscriptions against an `Absinthe`_ backend using the `Phoenix`_ +framework `channels`_. + +.. _Absinthe: http://absinthe-graphql.org +.. _Phoenix: https://www.phoenixframework.org +.. _channels: https://hexdocs.pm/phoenix/Phoenix.Channel.html#content diff --git a/docs/transports/requests.rst b/docs/transports/requests.rst new file mode 100644 index 00000000..d1a5417c --- /dev/null +++ b/docs/transports/requests.rst @@ -0,0 +1,9 @@ +RequestsHTTPTransport +===================== + +The RequestsHTTPTransport is a sync transport using the `requests`_ library +and allows you to send GraphQL queries using the HTTP protocol. + +.. literalinclude:: ../code_examples/requests_sync.py + +.. _requests: https://requests.readthedocs.io diff --git a/docs/transports/sync_transports.rst b/docs/transports/sync_transports.rst new file mode 100644 index 00000000..3ed566d3 --- /dev/null +++ b/docs/transports/sync_transports.rst @@ -0,0 +1,12 @@ +.. _sync_transports: + +Sync Transports +================ + +Sync transports are transports which are using an underlying sync library. +They cannot be used asynchronously. + +.. toctree:: + :maxdepth: 1 + + requests diff --git a/docs/transports/websockets.rst b/docs/transports/websockets.rst new file mode 100644 index 00000000..a082d887 --- /dev/null +++ b/docs/transports/websockets.rst @@ -0,0 +1,71 @@ +.. _websockets_transport: + +WebsocketsTransport +=================== + +The websockets transport implements the `Apollo websockets transport protocol`_. + +This transport allows to do multiple queries, mutations and subscriptions on the same websocket connection. + +.. literalinclude:: ../code_examples/websockets_async.py + +Websockets SSL +-------------- + +If you need to connect to an ssl encrypted endpoint: + +* use _wss_ instead of _ws_ in the url of the transport + +.. code-block:: python + + sample_transport = WebsocketsTransport( + url='wss://SERVER_URL:SERVER_PORT/graphql', + headers={'Authorization': 'token'} + ) + +If you have a self-signed ssl certificate, you need to provide an ssl_context with the server public certificate: + +.. code-block:: python + + import pathlib + import ssl + + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + localhost_pem = pathlib.Path(__file__).with_name("YOUR_SERVER_PUBLIC_CERTIFICATE.pem") + ssl_context.load_verify_locations(localhost_pem) + + sample_transport = WebsocketsTransport( + url='wss://SERVER_URL:SERVER_PORT/graphql', + ssl=ssl_context + ) + +If you have also need to have a client ssl certificate, add: + +.. code-block:: python + + ssl_context.load_cert_chain(certfile='YOUR_CLIENT_CERTIFICATE.pem', keyfile='YOUR_CLIENT_CERTIFICATE_KEY.key') + +Websockets authentication +------------------------- + +There are two ways to send authentication tokens with websockets depending on the server configuration. + +1. Using HTTP Headers + +.. code-block:: python + + sample_transport = WebsocketsTransport( + url='wss://SERVER_URL:SERVER_PORT/graphql', + headers={'Authorization': 'token'} + ) + +2. With a payload in the connection_init websocket message + +.. code-block:: python + + sample_transport = WebsocketsTransport( + url='wss://SERVER_URL:SERVER_PORT/graphql', + init_payload={'Authorization': 'token'} + ) + +.. _Apollo websockets transport protocol: https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md diff --git a/docs/usage/basic_usage.rst b/docs/usage/basic_usage.rst new file mode 100644 index 00000000..d53c18d5 --- /dev/null +++ b/docs/usage/basic_usage.rst @@ -0,0 +1,21 @@ +.. _basic_usage: + +Basic usage +----------- + +In order to execute a GraphQL request against a GraphQL API: + +* create your gql :ref:`transport ` in order to choose the destination url + and the protocol used to communicate with it +* create a gql :class:`Client ` with the selected transport +* parse a query using :func:`gql ` +* execute the query on the client to get the result + +.. literalinclude:: ../code_examples/aiohttp_sync.py + +.. warning:: + + Please note that this basic example won't work if you have an asyncio event loop running. In some + python environments (as with Jupyter which uses IPython) an asyncio event loop is created for you. + In that case you should use instead the :ref:`Async Usage example`. + diff --git a/docs/usage/headers.rst b/docs/usage/headers.rst new file mode 100644 index 00000000..23af64a7 --- /dev/null +++ b/docs/usage/headers.rst @@ -0,0 +1,8 @@ +HTTP Headers +============ + +If you want to add additional http headers for your connection, you can specify these in your transport: + +.. code-block:: python + + transport = AIOHTTPTransport(url='YOUR_URL', headers={'Authorization': 'token'}) diff --git a/docs/usage/index.rst b/docs/usage/index.rst new file mode 100644 index 00000000..2d5d5fd3 --- /dev/null +++ b/docs/usage/index.rst @@ -0,0 +1,11 @@ +Usage +===== + +.. toctree:: + :maxdepth: 2 + + basic_usage + validation + subscriptions + variables + headers diff --git a/docs/usage/subscriptions.rst b/docs/usage/subscriptions.rst new file mode 100644 index 00000000..15645b06 --- /dev/null +++ b/docs/usage/subscriptions.rst @@ -0,0 +1,28 @@ +Subscriptions +============= + +Using the :ref:`websockets transport `, it is possible to execute GraphQL subscriptions: + +.. code-block:: python + + from gql import gql, Client, WebsocketsTransport + + transport = WebsocketsTransport(url='wss://your_server/graphql') + + client = Client( + transport=transport, + fetch_schema_from_transport=True, + ) + + query = gql(''' + subscription yourSubscription { + ... + } + ''') + + for result in client.subscribe(query): + print (result) + +.. note:: + + The websockets transport can also execute queries or mutations, it is not restricted to subscriptions diff --git a/docs/usage/validation.rst b/docs/usage/validation.rst new file mode 100644 index 00000000..0e840cd2 --- /dev/null +++ b/docs/usage/validation.rst @@ -0,0 +1,43 @@ +.. _schema_validation: + +Schema validation +================= + +It a GraphQL schema is provided, gql will validate the queries locally before sending them to the backend. +If no schema is provided, gql will send the query to the backend without local validation. + +You can either provide a schema yourself, or you can request gql to get the schema +from the backend using `introspection`_. + +Using a provided schema +----------------------- + +The schema can be provided as a String (which is usually stored in a .graphql file): + +.. code-block:: python + + with open('path/to/schema.graphql') as f: + schema_str = f.read() + + client = Client(schema=schema_str) + +OR can be created using python classes: + +.. code-block:: python + + from .someSchema import SampleSchema + # SampleSchema is an instance of GraphQLSchema + + client = Client(schema=SampleSchema) + +See `tests/starwars/schema.py`_ for an example of such a schema. + +Using introspection +------------------- + +In order to get the schema directly from the GraphQL Server API using the transport, you need +to set the `fetch_schema_from_transport` argument of Client to True, and the client will +fetch the schema before the execution of the first query. + +.. _introspection: https://graphql.org/learn/introspection +.. _tests/starwars/schema.py: https://github.com/graphql-python/gql/blob/master/tests/starwars/schema.py diff --git a/docs/usage/variables.rst b/docs/usage/variables.rst new file mode 100644 index 00000000..81924c6e --- /dev/null +++ b/docs/usage/variables.rst @@ -0,0 +1,32 @@ +Using variables +=============== + +It is possible to provide variable values with your query by providing a Dict to +the variable_values argument of the `execute` or the `subscribe` methods. + +The variable values will be sent alongside the query in the transport message +(there is no local substitution). + +.. code-block:: python + + query = gql( + """ + query getContinentName ($code: ID!) { + continent (code: $code) { + name + } + } + """ + ) + + params = {"code": "EU"} + + # Get name of continent with code "EU" + result = client.execute(query, variable_values=params) + print(result) + + params = {"code": "AF"} + + # Get name of continent with code "AF" + result = client.execute(query, variable_values=params) + print(result) diff --git a/gql/__init__.py b/gql/__init__.py index bad425d4..fefd91ad 100644 --- a/gql/__init__.py +++ b/gql/__init__.py @@ -1,3 +1,12 @@ +"""The primary :mod:`gql` package includes everything you need to +execute GraphQL requests: + + - the :func:`gql ` method to parse a GraphQL query + - the :class:`Client ` class as the entrypoint to execute requests + and create sessions + - all the transports classes implementing different communication protocols +""" + from .client import Client from .gql import gql from .transport.aiohttp import AIOHTTPTransport diff --git a/gql/client.py b/gql/client.py index 13f67327..af5ae28b 100644 --- a/gql/client.py +++ b/gql/client.py @@ -20,6 +20,25 @@ class Client: + """The Client class is the main entrypoint to execute GraphQL requests + on a GQL transport. + + It can take sync or async transports as argument and can either execute + and subscribe to requests itself with the + :func:`execute ` and + :func:`subscribe ` methods + OR can be used to get a sync or async session depending on the + transport type. + + To connect to an :ref:`async transport ` and get an + :class:`async session `, + use :code:`async with client as session:` + + To connect to a :ref:`sync transport ` and get a + :class:`sync session `, + use :code:`with client as session:` + """ + def __init__( self, schema: Optional[Union[str, GraphQLSchema]] = None, @@ -29,6 +48,16 @@ def __init__( fetch_schema_from_transport: bool = False, execute_timeout: Optional[int] = 10, ): + """Initialize the client with the given parameters. + + :param schema: an optional GraphQL Schema for local validation + See :ref:`schema_validation` + :param transport: The provided :ref:`transport `. + :param fetch_schema_from_transport: Boolean to indicate that if we want to fetch + the schema from the transport using an introspection query + :param execute_timeout: The maximum time in seconds for the execution of a + request before a TimeoutError is raised + """ assert not ( type_def and introspection ), "Cannot provide introspection and type definition at the same time." @@ -81,7 +110,8 @@ def __init__( with self as session: session.fetch_schema() - def validate(self, document): + def validate(self, document: DocumentNode): + """:meta private:""" assert ( self.schema ), "Cannot validate the document locally, you need to pass a schema." @@ -91,21 +121,36 @@ def validate(self, document): raise validation_errors[0] def execute_sync(self, document: DocumentNode, *args, **kwargs) -> Dict: + """:meta private:""" with self as session: return session.execute(document, *args, **kwargs) async def execute_async(self, document: DocumentNode, *args, **kwargs) -> Dict: + """:meta private:""" async with self as session: return await session.execute(document, *args, **kwargs) def execute(self, document: DocumentNode, *args, **kwargs) -> Dict: - """Execute the provided document AST against the configured remote server. + """Execute the provided document AST against the remote server using + the transport provided during init. - This function WILL BLOCK until the result is received from the server. + This function **WILL BLOCK** until the result is received from the server. Either the transport is sync and we execute the query synchronously directly OR the transport is async and we execute the query in the asyncio loop (blocking here until answer). + + This method will: + + - connect using the transport to get a session + - execute the GraphQL request on the transport session + - close the session and close the connection to the server + + If you have multiple requests to send, it is better to get your own session + and execute the requests in your session. + + The extra arguments passed in the method will be passed to the transport + execute method. """ if isinstance(self.transport, AsyncTransport): @@ -135,6 +180,7 @@ def execute(self, document: DocumentNode, *args, **kwargs) -> Dict: async def subscribe_async( self, document: DocumentNode, *args, **kwargs ) -> AsyncGenerator[Dict, None]: + """:meta private:""" async with self as session: generator: AsyncGenerator[Dict, None] = session.subscribe( @@ -228,13 +274,14 @@ def __exit__(self, *args): class SyncClientSession: - """An instance of this class is created when using 'with' on the client. + """An instance of this class is created when using :code:`with` on the client. It contains the sync method execute to send queries - with the sync transports. + on a sync transport using the same session. """ def __init__(self, client: Client): + """:param client: the :class:`client ` used""" self.client = client def _execute(self, document: DocumentNode, *args, **kwargs) -> ExecutionResult: @@ -263,6 +310,10 @@ def execute(self, document: DocumentNode, *args, **kwargs) -> Dict: return result.data def fetch_schema(self) -> None: + """Fetch the GraphQL schema explicitely using introspection. + + Don't use this function and instead set the fetch_schema_from_transport + attribute to True""" execution_result = self.transport.execute(parse(get_introspection_query())) self.client.introspection = execution_result.data self.client.schema = build_client_schema(self.client.introspection) @@ -273,13 +324,15 @@ def transport(self): class AsyncClientSession: - """An instance of this class is created when using 'async with' on the client. + """An instance of this class is created when using :code:`async with` on a + :class:`client `. It contains the async methods (execute, subscribe) to send queries - with the async transports. + on an async transport using the same session. """ def __init__(self, client: Client): + """:param client: the :class:`client ` used""" self.client = client async def fetch_and_validate(self, document: DocumentNode): @@ -323,6 +376,10 @@ async def _subscribe( async def subscribe( self, document: DocumentNode, *args, **kwargs ) -> AsyncGenerator[Dict, None]: + """Coroutine to subscribe asynchronously to the provided document AST + asynchronously using the async transport. + + The extra arguments are passed to the transport subscribe method.""" # Validate and subscribe on the transport async for result in self._subscribe(document, *args, **kwargs): @@ -339,7 +396,6 @@ async def subscribe( async def _execute( self, document: DocumentNode, *args, **kwargs ) -> ExecutionResult: - # Fetch schema from transport if needed and validate document if possible await self.fetch_and_validate(document) @@ -350,6 +406,10 @@ async def _execute( ) async def execute(self, document: DocumentNode, *args, **kwargs) -> Dict: + """Coroutine to execute the provided document AST asynchronously using + the async transport. + + The extra arguments are passed to the transport execute method.""" # Validate and execute on the transport result = await self._execute(document, *args, **kwargs) @@ -367,6 +427,10 @@ async def execute(self, document: DocumentNode, *args, **kwargs) -> Dict: return result.data async def fetch_schema(self) -> None: + """Fetch the GraphQL schema explicitely using introspection. + + Don't use this function and instead set the fetch_schema_from_transport + attribute to True""" execution_result = await self.transport.execute( parse(get_introspection_query()) ) diff --git a/gql/gql.py b/gql/gql.py index 221710ed..903c9609 100644 --- a/gql/gql.py +++ b/gql/gql.py @@ -2,5 +2,16 @@ def gql(request_string: str) -> DocumentNode: + """Given a String containing a GraphQL request, parse it into a Document. + + :param request_string: the GraphQL request as a String + :type request_string: str + :return: a Document which can be later executed or subscribed by a + :class:`Client `, by an + :class:`async session ` or by a + :class:`sync session ` + + :raises GraphQLError: if a syntax error is encountered. + """ source = Source(request_string, "GraphQL request") return parse(source) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index 7d71f3a9..a6451198 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -20,11 +20,10 @@ class AIOHTTPTransport(AsyncTransport): - """Transport to execute GraphQL queries on remote servers with an http connection. + """:ref:`Async Transport ` to execute GraphQL queries + on remote servers with an HTTP connection. - This transport use the aiohttp library with asyncio - - See README.md for Usage + This transport use the aiohttp library with asyncio. """ def __init__( @@ -44,7 +43,11 @@ def __init__( :param cookies: Dict of HTTP cookies. :param auth: BasicAuth object to enable Basic HTTP auth if needed :param ssl: ssl_context of the connection. Use ssl=False to disable encryption - :param client_session_args: Dict of extra args passed to aiohttp.ClientSession + :param client_session_args: Dict of extra args passed to + `aiohttp.ClientSession`_ + + .. _aiohttp.ClientSession: + https://docs.aiohttp.org/en/stable/client_reference.html#aiohttp.ClientSession """ self.url: str = url self.headers: Optional[LooseHeaders] = headers @@ -56,11 +59,13 @@ def __init__( self.session: Optional[aiohttp.ClientSession] = None async def connect(self) -> None: - """Coroutine which will: + """Coroutine which will create an aiohttp ClientSession() as self.session. - - create an aiohttp ClientSession() as self.session + Don't call this coroutine directly on the transport, instead use + :code:`async with` on the client and this coroutine will be executed + to create the session. - Should be cleaned with a call to the close coroutine + Should be cleaned with a call to the close coroutine. """ if self.session is None: @@ -86,6 +91,12 @@ async def connect(self) -> None: raise TransportAlreadyConnected("Transport is already connected") async def close(self) -> None: + """Coroutine which will close the aiohttp session. + + Don't call this coroutine directly on the transport, instead use + :code:`async with` on the client and this coroutine will be executed + when you exit the async context manager. + """ if self.session is not None: await self.session.close() self.session = None @@ -97,11 +108,19 @@ async def execute( operation_name: Optional[str] = None, extra_args: Dict[str, Any] = None, ) -> ExecutionResult: - """Execute the provided document AST against the configured remote server. + """Execute the provided document AST against the configured remote server + using the current session. This uses the aiohttp library to perform a HTTP POST request asynchronously to the remote server. - The result is sent as an ExecutionResult object. + Don't call this coroutine directly on the transport, instead use + :code:`execute` on a client or a session. + + :param document: the parsed GraphQL request + :param variables_values: An optional Dict of variable values + :param operation_name: An optional Operation name for the request + :param extra_args: additional arguments to send to the aiohttp post method + :returns: an ExecutionResult object. """ query_str = print_ast(document) @@ -176,4 +195,8 @@ def subscribe( variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, ) -> AsyncGenerator[ExecutionResult, None]: + """Subscribe is not supported on HTTP. + + :meta private: + """ raise NotImplementedError(" The HTTP transport does not support subscriptions") diff --git a/gql/transport/phoenix_channel_websockets.py b/gql/transport/phoenix_channel_websockets.py index 6e96b72e..aaa6686a 100644 --- a/gql/transport/phoenix_channel_websockets.py +++ b/gql/transport/phoenix_channel_websockets.py @@ -14,19 +14,28 @@ class PhoenixChannelWebsocketsTransport(WebsocketsTransport): + """The PhoenixChannelWebsocketsTransport is an **EXPERIMENTAL** async transport + which allows you to execute queries and subscriptions against an `Absinthe`_ + backend using the `Phoenix`_ framework `channels`_. + + .. _Absinthe: http://absinthe-graphql.org + .. _Phoenix: https://www.phoenixframework.org + .. _channels: https://hexdocs.pm/phoenix/Phoenix.Channel.html#content + """ + def __init__( self, channel_name: str, heartbeat_interval: float = 30, *args, **kwargs ) -> None: + """Initialize the transport with the given parameters. + + :param channel_name: Channel on the server this transport will join + :param heartbeat_interval: Interval in second between each heartbeat messages + sent by the client + """ self.channel_name = channel_name self.heartbeat_interval = heartbeat_interval self.subscription_ids_to_query_ids: Dict[str, int] = {} super(PhoenixChannelWebsocketsTransport, self).__init__(*args, **kwargs) - """Initialize the transport with the given request parameters. - - :param channel_name Channel on the server this transport will join - :param heartbeat_interval Interval in second between each heartbeat messages - sent by the client - """ async def _send_init_message_and_wait_ack(self) -> None: """Join the specified channel and wait for the connection ACK. diff --git a/gql/transport/requests.py b/gql/transport/requests.py index 8eb4b2f8..823d0bc6 100644 --- a/gql/transport/requests.py +++ b/gql/transport/requests.py @@ -17,7 +17,8 @@ class RequestsHTTPTransport(Transport): - """Transport to execute GraphQL queries on remote servers. + """:ref:`Sync Transport ` used to execute GraphQL queries + on remote servers. The transport uses the requests library to send HTTP POST requests. """ @@ -53,7 +54,7 @@ def __init__( :param retries: Pre-setup of the requests' Session for performing retries :param method: HTTP method used for requests. (Default: POST). :param kwargs: Optional arguments that ``request`` takes. - These can be seen at the :requests_: source code or the official :docs_: + These can be seen at the `requests`_ source code or the official `docs`_ .. _requests: https://github.com/psf/requests/blob/master/requests/api.py .. _docs: https://requests.readthedocs.io/en/master/ diff --git a/gql/transport/websockets.py b/gql/transport/websockets.py index b4552b8c..63af4703 100644 --- a/gql/transport/websockets.py +++ b/gql/transport/websockets.py @@ -78,12 +78,11 @@ async def set_exception(self, exception: Exception) -> None: class WebsocketsTransport(AsyncTransport): - """Transport to execute GraphQL queries on remote servers with websocket connection. + """:ref:`Async Transport ` used to execute GraphQL queries on + remote servers with websocket connection. This transport uses asyncio and the websockets library in order to send requests on a websocket connection. - - See README.md for usage. """ def __init__( @@ -97,7 +96,7 @@ def __init__( ack_timeout: int = 10, connect_args: Dict[str, Any] = {}, ) -> None: - """Initialize the transport with the given request parameters. + """Initialize the transport with the given parameters. :param url: The GraphQL server URL. Example: 'wss://server.com:PORT/graphql'. :param headers: Dict of HTTP Headers. @@ -454,7 +453,8 @@ async def execute( variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, ) -> ExecutionResult: - """Execute a GrqphQLQuery. + """Execute the provided document AST against the configured remote server + using the current session. Send a query but close the async generator as soon as we have the first answer. diff --git a/setup.py b/setup.py index 2a240290..27b9ac8e 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,8 @@ "flake8==3.8.1", "isort==4.3.21", "mypy==0.770", + "sphinx>=3.0.0,<4", + "sphinx_rtd_theme>=0.4,<1" ] + tests_require setup( diff --git a/tox.ini b/tox.ini index 36d97dd4..7f4eb158 100644 --- a/tox.ini +++ b/tox.ini @@ -54,6 +54,12 @@ deps = -e.[dev] commands = mypy gql tests +[testenv:docs] +basepython = python3.8 +deps = -e.[dev] +commands = + sphinx-build -b html -nEW docs docs/_build/html + [testenv:manifest] basepython = python3.8 deps = -e.[dev] From 25c243a63f14df3b6fca76c82a027dfa33556878 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 14:53:08 +0200 Subject: [PATCH 10/23] GitHub Actions: do the tests for each push --- .github/workflows/lint.yml | 4 ++-- .github/workflows/tests.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index b36ef4cb..dffc5c4b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: Lint -on: [pull_request] +on: [push, pull_request] jobs: build: @@ -19,4 +19,4 @@ jobs: - name: Run lint and static type checks run: tox env: - TOXENV: flake8,black,import-order,mypy,manifest \ No newline at end of file + TOXENV: flake8,black,import-order,mypy,manifest diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 03f92d6b..5fcdea83 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,6 +1,6 @@ name: Tests -on: [pull_request] +on: [push, pull_request] jobs: build: @@ -23,4 +23,4 @@ jobs: - name: Test with tox run: tox env: - TOXENV: ${{ matrix.toxenv }} \ No newline at end of file + TOXENV: ${{ matrix.toxenv }} From 40f2aafc8cc5d001b2da2142674b5b5dc492fbc8 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 15:09:59 +0200 Subject: [PATCH 11/23] Tests: add pypy3 tests again --- .github/workflows/tests.yml | 2 +- tox.ini | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5fcdea83..5fbad50e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -8,7 +8,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: ["3.6", "3.7", "3.8", "3.9-dev"] + python-version: ["3.6", "3.7", "3.8", "3.9-dev", "pypy3"] steps: - uses: actions/checkout@v2 diff --git a/tox.ini b/tox.ini index 7f4eb158..130a5c62 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] envlist = black,flake8,import-order,mypy,manifest, - py{36,37,38,39-dev} + py{36,37,38,39-dev,py3} [pytest] markers = asyncio @@ -12,6 +12,7 @@ python = 3.7: py37 3.8: py38 3.9: py39-dev + pypy3: pypy3 [testenv] passenv = * @@ -27,7 +28,7 @@ deps = -e.[test] commands = pip install -U setuptools ; run "tox -- tests -s" to show output for debugging - py{36,37,39-dev}: pytest {posargs:tests} + py{36,37,39-dev,py3}: pytest {posargs:tests} py{38}: pytest {posargs:tests --cov-report=term-missing --cov=gql} [testenv:black] From 57bb4aabe4fafdb29aadd824f276b210e3fc297c Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 16:51:35 +0200 Subject: [PATCH 12/23] GitHub Actions: try to send coverage to coveralls.io --- .github/workflows/tests.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5fbad50e..6f58e308 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,3 +24,20 @@ jobs: run: tox env: TOXENV: ${{ matrix.toxenv }} + - if: matrix.python-version == "3.8" + name: Coveralls Parallel + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + flag-name: run-${{ matrix.python-version }} + parallel: true + + coverage: + needs: build + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.github_token }} + parallel-finished: true From c2f1840423ce75cd98fe23d2e745f89568b5aac8 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 16:57:02 +0200 Subject: [PATCH 13/23] GitHub actions: try to send coverage to coveralls.io (2) --- .github/workflows/tests.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6f58e308..f1c2b6d2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,8 +24,7 @@ jobs: run: tox env: TOXENV: ${{ matrix.toxenv }} - - if: matrix.python-version == "3.8" - name: Coveralls Parallel + - name: Coveralls Parallel uses: coverallsapp/github-action@master with: github-token: ${{ secrets.github_token }} From 1ba67a7e6e3758184f983837a6c9cad97f65e20c Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 17:52:09 +0200 Subject: [PATCH 14/23] GitHub Actions: migrating from coveralls to codecov --- .github/workflows/tests.yml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f1c2b6d2..149b450a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -24,19 +24,19 @@ jobs: run: tox env: TOXENV: ${{ matrix.toxenv }} - - name: Coveralls Parallel - uses: coverallsapp/github-action@master - with: - github-token: ${{ secrets.github_token }} - flag-name: run-${{ matrix.python-version }} - parallel: true coverage: - needs: build runs-on: ubuntu-latest - steps: - - name: Coveralls Finished - uses: coverallsapp/github-action@master + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 with: - github-token: ${{ secrets.github_token }} - parallel-finished: true + python-version: 3.8 + - name: Install test dependencies + run: | + python -m pip install --upgrade pip + pip install .[test] + - name: Test with coverage + run: pytest --cov=gql --cov-report=xml tests + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 From e72bd6b11f034b25fd20059e4c5b8f4691b0be70 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 17:56:02 +0200 Subject: [PATCH 15/23] GitHub Actions: fix typo --- .github/workflows/tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 149b450a..5cfaed75 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,6 +27,8 @@ jobs: coverage: runs-on: ubuntu-latest + + steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 From 9e6dc7eca80991bccc0bfa4a63ddae5e25671e84 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 19:16:46 +0200 Subject: [PATCH 16/23] README.md fix badges, add link to doc and leave only basic example (#141) Replace travis badge by a GitHub action badge Replace coveralls badge by a codecov badge Add link to the documentation and leave only basic example in the README.md file Remove last coveralls and travis references --- CONTRIBUTING.md | 4 +- README.md | 395 +++--------------------------------------------- setup.py | 1 - 3 files changed, 24 insertions(+), 376 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9033a304..e4df615a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -133,8 +133,8 @@ and all passing tests. :rocket: 5. Verify that the [Coding guidelines](#coding-guidelines) are respected 6. Verify that the [automated tests](#running-tests) are passing 7. Make a commit and push it to your fork -8. From github, create the pull request. Automated tests from travis -and coveralls will then automatically run the tests and check the code coverage +8. From github, create the pull request. Automated tests from GitHub actions +and codecov will then automatically run the tests and check the code coverage 9. If other modifications are needed, you are free to create more commits and push them on your branch. They'll get added to the PR automatically. diff --git a/README.md b/README.md index 3b3989b6..92981ff1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # GQL -This is a GraphQL client for Python. +This is a GraphQL client for Python 3.6+. Plays nicely with `graphene`, `graphql-core`, `graphql-js` and any other GraphQL implementation compatible with the spec. GQL architecture is inspired by `React-Relay` and `Apollo-Client`. @@ -8,26 +8,38 @@ GQL architecture is inspired by `React-Relay` and `Apollo-Client`. > **WARNING**: Please note that the following documentation describes the current version which is currently only available as a pre-release > The documentation for the 2.x version compatible with python<3.6 is available in the [2.x branch](https://github.com/graphql-python/gql/tree/v2.x) -[![travis][travis-image]][travis-url] +[![GitHub-Actions][gh-image]][gh-url] [![pyversion][pyversion-image]][pyversion-url] [![pypi][pypi-image]][pypi-url] [![Anaconda-Server Badge][conda-image]][conda-url] -[![coveralls][coveralls-image]][coveralls-url] +[![codecov][codecov-image]][codecov-url] -[travis-image]: https://img.shields.io/travis/graphql-python/gql.svg?style=flat -[travis-url]: https://travis-ci.org/graphql-python/gql +[gh-image]: https://github.com/graphql-python/gql/workflows/Tests/badge.svg +[gh-url]: https://github.com/graphql-python/gql/actions?query=workflow%3ATests [pyversion-image]: https://img.shields.io/pypi/pyversions/gql [pyversion-url]: https://pypi.org/project/gql/ [pypi-image]: https://img.shields.io/pypi/v/gql.svg?style=flat [pypi-url]: https://pypi.org/project/gql/ -[coveralls-image]: https://coveralls.io/repos/graphql-python/gql/badge.svg?branch=master&service=github -[coveralls-url]: https://coveralls.io/github/graphql-python/gql?branch=master [conda-image]: https://img.shields.io/conda/vn/conda-forge/gql.svg [conda-url]: https://anaconda.org/conda-forge/gql +[codecov-image]: https://codecov.io/gh/graphql-python/gql/branch/master/graph/badge.svg +[codecov-url]: https://codecov.io/gh/graphql-python/gql -## Installation +## Documentation + +The complete documentation for GQL can be found at +[gql.readthedocs.io](https://gql.readthedocs.io). + +## Features - $ pip install gql +The main features of GQL are: + +* Execute GraphQL queries using [different protocols](https://gql.readthedocs.io/en/latest/transports/index.html) (http, websockets, ...) +* Possibility to [validate the queries locally](https://gql.readthedocs.io/en/latest/usage/validation.html) using a GraphQL schema provided locally or fetched from the backend using an instrospection query +* Supports GraphQL queries, mutations and subscriptions +* Supports [sync or async usage](https://gql.readthedocs.io/en/latest/async/index.html), [allowing concurrent requests](https://gql.readthedocs.io/en/latest/advanced/async_advanced_usage.html#async-advanced-usage) + +## Installation > **WARNING**: Please note that the following documentation describes the current version which is currently only available as a pre-release and needs to be installed with @@ -65,370 +77,7 @@ print(result) > **WARNING**: Please note that this basic example won't work if you have an asyncio event loop running. In some > python environments (as with Jupyter which uses IPython) an asyncio event loop is created for you. In that case you -> should use instead the example in the [AIOHTTPTransport](#HTTP-async-transport) section. - -### Local schema validation - -It is possible to validate a query locally either using a provided schema or by using -[introspection](https://graphql.org/learn/introspection/) to get the schema from the GraphQL API server. - -#### Using a provided schema - -The schema can be provided as a String (which is usually stored in a .graphql file): - -```python -with open('path/to/schema.graphql') as f: - schema_str = f.read() - -client = Client(schema=schema_str) -``` - -OR can be created using python classes: - -```python -from .someSchema import SampleSchema -# SampleSchema is an instance of GraphQLSchema - -client = Client(schema=SampleSchema) -``` - -See [tests/starwars/schema.py](tests/starwars/schema.py) for an example of such a schema. - -#### Using introspection - -In order to get the schema directly from the GraphQL Server API using the transport, you just need -to set the `fetch_schema_from_transport` argument of Client to True - -### HTTP Headers - -If you want to add additional http headers for your connection, you can specify these in your transport: - -```python -transport = AIOHTTPTransport(url='YOUR_URL', headers={'Authorization': 'token'}) -``` - -### GraphQL variables - -You can also provide variable values with your query: - -```python -query = gql( - """ - query getContinentName ($code: ID!) { - continent (code: $code) { - name - } - } -""" -) - -params = {"code": "EU"} - -# Get name of continent with code "EU" -result = client.execute(query, variable_values=params) -print(result) - -params = {"code": "AF"} - -# Get name of continent with code "AF" -result = client.execute(query, variable_values=params) -print(result) -``` - -### GraphQL subscriptions - -Using the websockets transport, it is possible to execute GraphQL subscriptions: - -```python -from gql import gql, Client, WebsocketsTransport - -transport = WebsocketsTransport(url='wss://your_server/graphql') - -client = Client( - transport=transport, - fetch_schema_from_transport=True, -) - -query = gql(''' - subscription yourSubscription { - ... - } -''') - -for result in client.subscribe(query): - print (result) -``` - -> **Note**: the websockets transport can also execute queries or mutations, it is not restricted to subscriptions - -### Execute on a local schema - -It is also possible to execute queries against a local schema (so without a transport). - -```python -from gql import gql, Client - -from .someSchema import SampleSchema - -client = Client(schema=SampleSchema) - -query = gql(''' - { - hello - } -''') - -result = client.execute(query) -``` - -### Compose GraphQL queries dynamically with the DSL module - -Instead of providing the GraphQL queries as a String, it is also possible to create GraphQL queries dynamically. -Using the DSL module, we can create a query using a Domain Specific Language which is created from the schema. - -```python -from gql.dsl import DSLSchema - -client = Client(schema=StarWarsSchema) -ds = DSLSchema(client) - -query_dsl = ds.Query.hero.select( - ds.Character.id, - ds.Character.name, - ds.Character.friends.select(ds.Character.name,), -) -``` - -will create a query equivalent to: - -``` -hero { - id - name - friends { - name - } -} -``` - -See [tests/starwars/test_dsl.py](tests/starwars/test_dsl.py) for examples. - -## Async usage with asyncio - -When using the `execute` or `subscribe` function directly on the client, the execution is synchronous. -It means that we are blocked until we receive an answer from the server and -we cannot do anything else while waiting for this answer. - -It is also possible to use this library asynchronously using [asyncio](https://docs.python.org/3/library/asyncio.html). - -Async Features: -* Execute GraphQL subscriptions (See [using the websockets transport](#Websockets-async-transport)) -* Execute GraphQL queries, mutations and subscriptions in parallel - -To use the async features, you need to use an async transport: -* [AIOHTTPTransport](#HTTP-async-transport) for the HTTP(s) protocols -* [WebsocketsTransport](#Websockets-async-transport) for the ws(s) protocols - -### HTTP async transport - -This transport uses the [aiohttp library](https://docs.aiohttp.org) - -GraphQL subscriptions are not supported on the HTTP transport. -For subscriptions you should use the websockets transport. - -```python -from gql import gql, AIOHTTPTransport, Client -import asyncio - -async def main(): - - transport = AIOHTTPTransport(url='https://countries.trevorblades.com/graphql') - - # Using `async with` on the client will start a connection on the transport - # and provide a `session` variable to execute queries on this connection - async with Client( - transport=transport, - fetch_schema_from_transport=True, - ) as session: - - # Execute single query - query = gql(''' - query getContinents { - continents { - code - name - } - } - ''') - - result = await session.execute(query) - print(result) - -asyncio.run(main()) -``` - -### Websockets async transport - -The websockets transport uses the apollo protocol described here: - -[Apollo websockets transport protocol](https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md) - -This transport allows to do multiple queries, mutations and subscriptions on the same websocket connection. - -```python -import logging -logging.basicConfig(level=logging.INFO) - -from gql import gql, Client, WebsocketsTransport -import asyncio - -async def main(): - - transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql') - - # Using `async with` on the client will start a connection on the transport - # and provide a `session` variable to execute queries on this connection - async with Client( - transport=sample_transport, - fetch_schema_from_transport=True, - ) as session: - - # Execute single query - query = gql(''' - query getContinents { - continents { - code - name - } - } - ''') - result = await session.execute(query) - print(result) - - # Request subscription - subscription = gql(''' - subscription { - somethingChanged { - id - } - } - ''') - async for result in session.subscribe(subscription): - print(result) - -asyncio.run(main()) -``` - -#### Websockets SSL - -If you need to connect to an ssl encrypted endpoint: - -* use _wss_ instead of _ws_ in the url of the transport - -```python -sample_transport = WebsocketsTransport( - url='wss://SERVER_URL:SERVER_PORT/graphql', - headers={'Authorization': 'token'} -) -``` - -If you have a self-signed ssl certificate, you need to provide an ssl_context with the server public certificate: - -```python -import pathlib -import ssl - -ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) -localhost_pem = pathlib.Path(__file__).with_name("YOUR_SERVER_PUBLIC_CERTIFICATE.pem") -ssl_context.load_verify_locations(localhost_pem) - -sample_transport = WebsocketsTransport( - url='wss://SERVER_URL:SERVER_PORT/graphql', - ssl=ssl_context -) -``` - -If you have also need to have a client ssl certificate, add: - -```python -ssl_context.load_cert_chain(certfile='YOUR_CLIENT_CERTIFICATE.pem', keyfile='YOUR_CLIENT_CERTIFICATE_KEY.key') -``` - -#### Websockets authentication - -There are two ways to send authentication tokens with websockets depending on the server configuration. - -1. Using HTTP Headers - -```python -sample_transport = WebsocketsTransport( - url='wss://SERVER_URL:SERVER_PORT/graphql', - headers={'Authorization': 'token'} -) -``` - -2. With a payload in the connection_init websocket message - -```python -sample_transport = WebsocketsTransport( - url='wss://SERVER_URL:SERVER_PORT/graphql', - init_payload={'Authorization': 'token'} -) -``` - -### Async advanced usage - -It is possible to send multiple GraphQL queries (query, mutation or subscription) in parallel, -on the same websocket connection, using asyncio tasks. - -In order to retry in case of connection failure, we can use the great -[backoff](https://github.com/litl/backoff) module. - -```python -# First define all your queries using a session argument: - -async def execute_query1(session): - result = await session.execute(query1) - print(result) - -async def execute_query2(session): - result = await session.execute(query2) - print(result) - -async def execute_subscription1(session): - async for result in session.subscribe(subscription1): - print(result) - -async def execute_subscription2(session): - async for result in session.subscribe(subscription2): - print(result) - -# Then create a couroutine which will connect to your API and run all your queries as tasks. -# We use a `backoff` decorator to reconnect using exponential backoff in case of connection failure. - -@backoff.on_exception(backoff.expo, Exception, max_time=300) -async def graphql_connection(): - - transport = WebsocketsTransport(url="wss://YOUR_URL") - - client = Client(transport=transport, fetch_schema_from_transport=True) - - async with client as session: - task1 = asyncio.create_task(execute_query1(session)) - task2 = asyncio.create_task(execute_query2(session)) - task3 = asyncio.create_task(execute_subscription1(session)) - task4 = asyncio.create_task(execute_subscription2(session)) - - await asyncio.gather(task1, task2, task3, task4) - -asyncio.run(graphql_connection()) -``` - -Subscriptions tasks can be stopped at any time by running - -```python -task.cancel() -``` +> should use instead the [async usage example](https://gql.readthedocs.io/en/latest/async/async_usage.html#async-usage). ## Contributing See [CONTRIBUTING.md](CONTRIBUTING.md) diff --git a/setup.py b/setup.py index 27b9ac8e..4ed2adcd 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,6 @@ ] tests_require = [ - "coveralls==2.0.0", "parse==1.15.0", "pytest==5.4.2", "pytest-asyncio==0.11.0", From 53c7a3202b02c6efe6f0b69fe7c49445b5a9544f Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 21:58:01 +0200 Subject: [PATCH 17/23] Single-sourcing the version in a __version__.py file (#142) --- docs/conf.py | 3 ++- gql/__init__.py | 2 ++ gql/__version__.py | 1 + setup.py | 12 ++++++++++-- 4 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 gql/__version__.py diff --git a/docs/conf.py b/docs/conf.py index f1c281c1..987bc3cd 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,8 @@ author = 'graphql-python.org' # The full version, including alpha/beta/rc tags -release = '3.0.0a1' +from gql import __version__ +release = __version__ # -- General configuration --------------------------------------------------- diff --git a/gql/__init__.py b/gql/__init__.py index fefd91ad..9c2e59a1 100644 --- a/gql/__init__.py +++ b/gql/__init__.py @@ -7,6 +7,7 @@ - all the transports classes implementing different communication protocols """ +from .__version__ import __version__ from .client import Client from .gql import gql from .transport.aiohttp import AIOHTTPTransport @@ -15,6 +16,7 @@ from .transport.websockets import WebsocketsTransport __all__ = [ + "__version__", "gql", "AIOHTTPTransport", "Client", diff --git a/gql/__version__.py b/gql/__version__.py new file mode 100644 index 00000000..7cf50d07 --- /dev/null +++ b/gql/__version__.py @@ -0,0 +1 @@ +__version__ = "3.0.0a1" diff --git a/setup.py b/setup.py index 4ed2adcd..9233eab3 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,5 @@ +import os + from setuptools import setup, find_packages install_requires = [ @@ -28,12 +30,18 @@ "isort==4.3.21", "mypy==0.770", "sphinx>=3.0.0,<4", - "sphinx_rtd_theme>=0.4,<1" + "sphinx_rtd_theme>=0.4,<1", ] + tests_require +# Get version from __version__.py file +current_folder = os.path.abspath(os.path.dirname(__file__)) +about = {} +with open(os.path.join(current_folder, "gql", "__version__.py"), "r") as f: + exec(f.read(), about) + setup( name="gql", - version="3.0.0a1", + version=about["__version__"], description="GraphQL client for Python", long_description=open("README.md").read(), long_description_content_type="text/markdown", From 4d11c895c9b70309b6119814b02f73ddd96e6b9a Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sun, 27 Sep 2020 22:02:06 +0200 Subject: [PATCH 18/23] Bump version number --- gql/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gql/__version__.py b/gql/__version__.py index 7cf50d07..d1dafa84 100644 --- a/gql/__version__.py +++ b/gql/__version__.py @@ -1 +1 @@ -__version__ = "3.0.0a1" +__version__ = "3.0.0a2" From eec9220528cc9ec8703383e07510ae5cb34eebe6 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sat, 3 Oct 2020 15:23:43 +0200 Subject: [PATCH 19/23] Only upload files if the upload_files flag is True --- gql/transport/aiohttp.py | 62 ++++++++++++++++++++++++++++------------ 1 file changed, 43 insertions(+), 19 deletions(-) diff --git a/gql/transport/aiohttp.py b/gql/transport/aiohttp.py index a6451198..f17d3f5b 100644 --- a/gql/transport/aiohttp.py +++ b/gql/transport/aiohttp.py @@ -1,4 +1,5 @@ import json +import logging from ssl import SSLContext from typing import Any, AsyncGenerator, Dict, Optional, Union @@ -18,6 +19,8 @@ TransportServerError, ) +log = logging.getLogger(__name__) + class AIOHTTPTransport(AsyncTransport): """:ref:`Async Transport ` to execute GraphQL queries @@ -107,6 +110,7 @@ async def execute( variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, extra_args: Dict[str, Any] = None, + upload_files: bool = False, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server using the current session. @@ -120,50 +124,70 @@ async def execute( :param variables_values: An optional Dict of variable values :param operation_name: An optional Operation name for the request :param extra_args: additional arguments to send to the aiohttp post method + :param upload_files: Set to True if you want to put files in the variable values :returns: an ExecutionResult object. """ query_str = print_ast(document) - nulled_variable_values = None - files = None - if variable_values: - nulled_variable_values, files = extract_files(variable_values) - payload: Dict[str, Any] = { "query": query_str, } - if nulled_variable_values: - payload["variables"] = nulled_variable_values if operation_name: payload["operationName"] = operation_name - if files: + if upload_files: + + # If the upload_files flag is set, then we need variable_values + assert variable_values is not None + + # If we upload files, we will extract the files present in the + # variable_values dict and replace them by null values + nulled_variable_values, files = extract_files(variable_values) + + # Save the nulled variable values in the payload + payload["variables"] = nulled_variable_values + + # Prepare aiohttp to send multipart-encoded data data = aiohttp.FormData() - # header - file_map = {str(i): [path] for i, path in enumerate(files)} + # Generate the file map # path is nested in a list because the spec allows multiple pointers - # to the same file. But we don't use that. - file_streams = { - str(i): files[path] for i, path in enumerate(files) - } # payload + # to the same file. But we don't support that. + # Will generate something like {"0": ["variables.file"]} + file_map = {str(i): [path] for i, path in enumerate(files)} + # Enumerate the file streams + # Will generate something like {'0': <_io.BufferedReader ...>} + file_streams = {str(i): files[path] for i, path in enumerate(files)} + + # Add the payload to the operations field + operations_str = json.dumps(payload) + log.debug("operations %s", operations_str) data.add_field( - "operations", json.dumps(payload), content_type="application/json" + "operations", operations_str, content_type="application/json" ) - data.add_field("map", json.dumps(file_map), content_type="application/json") + + # Add the file map field + file_map_str = json.dumps(file_map) + log.debug("file_map %s", file_map_str) + data.add_field("map", file_map_str, content_type="application/json") + + # Add the extracted files as remaining fields data.add_fields(*file_streams.items()) - post_args = {"data": data} + post_args: Dict[str, Any] = {"data": data} else: - post_args = {"json": payload} # type: ignore + if variable_values: + payload["variables"] = variable_values + + post_args = {"json": payload} # Pass post_args to aiohttp post method if extra_args: - post_args.update(extra_args) # type: ignore + post_args.update(extra_args) if self.session is None: raise TransportClosed("Transport is not connected") From f647803d0ee59c0d273d2a132cb6539fd6b0e641 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sat, 3 Oct 2020 15:25:00 +0200 Subject: [PATCH 20/23] Adding tests for the file upload functionality --- tests/conftest.py | 22 ++++ tests/test_aiohttp.py | 298 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 320 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index c2edc236..9e266490 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,6 +4,7 @@ import os import pathlib import ssl +import tempfile import types from concurrent.futures import ThreadPoolExecutor @@ -187,6 +188,27 @@ async def send_connection_ack(ws): await ws.send('{"event":"phx_reply", "payload": {"status": "ok"}, "ref": 1}') +class TemporaryFile: + """Class used to generate temporary files for the tests""" + + def __init__(self, content): + + self.file = tempfile.NamedTemporaryFile(mode="w", delete=False) + + with self.file as f: + f.write(content) + + @property + def filename(self): + return self.file.name + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + os.unlink(self.filename) + + def get_server_handler(request): """Get the server handler. diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py index 0e97655f..f80e962b 100644 --- a/tests/test_aiohttp.py +++ b/tests/test_aiohttp.py @@ -11,6 +11,8 @@ TransportServerError, ) +from .conftest import TemporaryFile + query1_str = """ query getContinents { continents { @@ -321,3 +323,299 @@ def test_code(): pass await run_sync_test(event_loop, server, test_code) + + +file_upload_server_answer = '{"data":{"success":true}}' + +file_upload_mutation_1 = """ + mutation($file: Upload!) { + uploadFile(input:{other_var:$other_var, file:$file}) { + success + } + } +""" + +file_upload_mutation_1_operations = ( + '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' + '$other_var, file: $file}) {\\n success\\n }\\n}\\n", "variables": ' + '{"file": null, "other_var": 42}}' +) + +file_upload_mutation_1_map = '{"0": ["variables.file"]}' + +file_1_content = """ +This is a test file +This file will be sent in the GraphQL mutation +""" + + +async def single_upload_handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_1_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_1_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3 is None + + return web.Response(text=file_upload_server_answer, content_type="application/json") + + +@pytest.mark.asyncio +async def test_aiohttp_file_upload(event_loop, aiohttp_server): + app = web.Application() + app.router.add_route("POST", "/", single_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + sample_transport = AIOHTTPTransport(url=url, timeout=10) + + with TemporaryFile(file_1_content) as test_file: + + async with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + + assert success + + +@pytest.mark.asyncio +async def test_aiohttp_file_upload_without_session( + event_loop, aiohttp_server, run_sync_test +): + + app = web.Application() + app.router.add_route("POST", "/", single_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + def test_code(): + sample_transport = AIOHTTPTransport(url=url, timeout=10) + + with TemporaryFile(file_1_content) as test_file: + + client = Client(transport=sample_transport,) + + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + + result = client.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + + assert success + + await run_sync_test(event_loop, server, test_code) + + +file_upload_mutation_2 = """ + mutation($file1: Upload!, $file2: Upload!) { + uploadFile(input:{file1:$file, file2:$file}) { + success + } + } +""" + +file_upload_mutation_2_operations = ( + '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' + 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}\\n", ' + '"variables": {"file1": null, "file2": null}}' +) + +file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' + +file_2_content = """ +This is a second test file +This file will also be sent in the GraphQL mutation +""" + + +@pytest.mark.asyncio +async def test_aiohttp_file_upload_two_files(event_loop, aiohttp_server): + async def handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_2_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_2_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3.name == "1" + field_3_text = await field_3.text() + assert field_3_text == file_2_content + + field_4 = await reader.next() + assert field_4 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + sample_transport = AIOHTTPTransport(url=url, timeout=10) + + with TemporaryFile(file_1_content) as test_file_1: + with TemporaryFile(file_2_content) as test_file_2: + + async with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_2) + + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename + + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = { + "file1": f1, + "file2": f2, + } + + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + f1.close() + f2.close() + + success = result["success"] + + assert success + + +file_upload_mutation_3 = """ + mutation($files: [Upload!]!) { + uploadFiles(input:{files:$files}) { + success + } + } +""" + +file_upload_mutation_3_operations = ( + '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(input: {files: $files})' + ' {\\n success\\n }\\n}\\n", "variables": {"files": [null, null]}}' +) + +file_upload_mutation_3_map = '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' + + +@pytest.mark.asyncio +async def test_aiohttp_file_upload_list_of_two_files(event_loop, aiohttp_server): + async def handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_3_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_3_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_text = await field_2.text() + assert field_2_text == file_1_content + + field_3 = await reader.next() + assert field_3.name == "1" + field_3_text = await field_3.text() + assert field_3_text == file_2_content + + field_4 = await reader.next() + assert field_4 is None + + return web.Response( + text=file_upload_server_answer, content_type="application/json" + ) + + app = web.Application() + app.router.add_route("POST", "/", handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + sample_transport = AIOHTTPTransport(url=url, timeout=10) + + with TemporaryFile(file_1_content) as test_file_1: + with TemporaryFile(file_2_content) as test_file_2: + + async with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_3) + + file_path_1 = test_file_1.filename + file_path_2 = test_file_2.filename + + f1 = open(file_path_1, "rb") + f2 = open(file_path_2, "rb") + + params = {"files": [f1, f2]} + + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + f1.close() + f2.close() + + success = result["success"] + + assert success From 62c6a58176659c719e853d6cddd48f136f8d62d5 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sat, 3 Oct 2020 16:04:48 +0200 Subject: [PATCH 21/23] Add docs --- README.md | 1 + docs/transports/aiohttp.rst | 2 ++ docs/usage/file_upload.rst | 69 +++++++++++++++++++++++++++++++++++++ docs/usage/index.rst | 1 + 4 files changed, 73 insertions(+) create mode 100644 docs/usage/file_upload.rst diff --git a/README.md b/README.md index 92981ff1..9bdd7f76 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,7 @@ The main features of GQL are: * Possibility to [validate the queries locally](https://gql.readthedocs.io/en/latest/usage/validation.html) using a GraphQL schema provided locally or fetched from the backend using an instrospection query * Supports GraphQL queries, mutations and subscriptions * Supports [sync or async usage](https://gql.readthedocs.io/en/latest/async/index.html), [allowing concurrent requests](https://gql.readthedocs.io/en/latest/advanced/async_advanced_usage.html#async-advanced-usage) +* Supports [File uploads](https://gql.readthedocs.io/en/latest/usage/file_upload.html) ## Installation diff --git a/docs/transports/aiohttp.rst b/docs/transports/aiohttp.rst index cdca6f45..a54809cc 100644 --- a/docs/transports/aiohttp.rst +++ b/docs/transports/aiohttp.rst @@ -1,3 +1,5 @@ +.. _aiohttp_transport: + AIOHTTPTransport ================ diff --git a/docs/usage/file_upload.rst b/docs/usage/file_upload.rst new file mode 100644 index 00000000..d900df95 --- /dev/null +++ b/docs/usage/file_upload.rst @@ -0,0 +1,69 @@ +File uploads +============ + +GQL supports file uploads with the :ref:`aiohttp transport ` +using the `GraphQL multipart request spec`_. + +.. _GraphQL multipart request spec: https://github.com/jaydenseric/graphql-multipart-request-spec + +Single File +----------- + +In order to upload a single file, you need to: + +* set the file as a variable value in the mutation +* provide the opened file to the `variable_values` argument of `execute` +* set the `upload_files` argument to True + +.. code-block:: python + + transport = AIOHTTPTransport(url='YOUR_URL') + + client = Client(transport=sample_transport) + + query = gql(''' + mutation($file: Upload!) { + singleUpload(file: $file) { + id + } + } + ''') + + with open("YOUR_FILE_PATH", "rb") as f: + + params = {"file": f} + + result = client.execute( + query, variable_values=params, upload_files=True + ) + +File list +--------- + +It is also possible to upload multiple files using a list. + +.. code-block:: python + + transport = AIOHTTPTransport(url='YOUR_URL') + + client = Client(transport=sample_transport) + + query = gql(''' + mutation($files: [Upload!]!) { + multipleUpload(files: $files) { + id + } + } + ''') + + f1 = open("YOUR_FILE_PATH_1", "rb") + f2 = open("YOUR_FILE_PATH_1", "rb") + + params = {"files": [f1, f2]} + + result = client.execute( + query, variable_values=params, upload_files=True + ) + + f1.close() + f2.close() diff --git a/docs/usage/index.rst b/docs/usage/index.rst index 2d5d5fd3..a7dd4d56 100644 --- a/docs/usage/index.rst +++ b/docs/usage/index.rst @@ -9,3 +9,4 @@ Usage subscriptions variables headers + file_upload From e48861242cc59a66aa8b679a76b25750ddfef8ff Mon Sep 17 00:00:00 2001 From: Leszek Hanusz Date: Sat, 10 Oct 2020 17:21:29 +0200 Subject: [PATCH 22/23] fix file upload tests on windows and add a binary file upload test --- tests/conftest.py | 18 +++++++++++-- tests/test_aiohttp.py | 61 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9e266490..05b1ff14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,6 +7,7 @@ import tempfile import types from concurrent.futures import ThreadPoolExecutor +from typing import Union import pytest import websockets @@ -191,9 +192,22 @@ async def send_connection_ack(ws): class TemporaryFile: """Class used to generate temporary files for the tests""" - def __init__(self, content): + def __init__(self, content: Union[str, bytearray]): - self.file = tempfile.NamedTemporaryFile(mode="w", delete=False) + open_params = {} + + if isinstance(content, str): + + open_params["mode"] = "w" + + # We need to set the newline to '' so that the line returns + # are not replaced to '\r\n' on windows + open_params["newline"] = "" + + else: + open_params["mode"] = "wb" + + self.file = tempfile.NamedTemporaryFile(**open_params, delete=False) with self.file as f: f.write(content) diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py index f80e962b..8f39319f 100644 --- a/tests/test_aiohttp.py +++ b/tests/test_aiohttp.py @@ -443,6 +443,67 @@ def test_code(): await run_sync_test(event_loop, server, test_code) +# This is a sample binary file content containing all possible byte values +binary_file_content = bytes(range(0, 256)) + + +async def binary_upload_handler(request): + + reader = await request.multipart() + + field_0 = await reader.next() + assert field_0.name == "operations" + field_0_text = await field_0.text() + assert field_0_text == file_upload_mutation_1_operations + + field_1 = await reader.next() + assert field_1.name == "map" + field_1_text = await field_1.text() + assert field_1_text == file_upload_mutation_1_map + + field_2 = await reader.next() + assert field_2.name == "0" + field_2_binary = await field_2.read() + assert field_2_binary == binary_file_content + + field_3 = await reader.next() + assert field_3 is None + + return web.Response(text=file_upload_server_answer, content_type="application/json") + + +@pytest.mark.asyncio +async def test_aiohttp_binary_file_upload(event_loop, aiohttp_server): + app = web.Application() + app.router.add_route("POST", "/", binary_upload_handler) + server = await aiohttp_server(app) + + url = server.make_url("/") + + sample_transport = AIOHTTPTransport(url=url, timeout=10) + + with TemporaryFile(binary_file_content) as test_file: + + async with Client(transport=sample_transport,) as session: + + query = gql(file_upload_mutation_1) + + file_path = test_file.filename + + with open(file_path, "rb") as f: + + params = {"file": f, "other_var": 42} + + # Execute query asynchronously + result = await session.execute( + query, variable_values=params, upload_files=True + ) + + success = result["success"] + + assert success + + file_upload_mutation_2 = """ mutation($file1: Upload!, $file2: Upload!) { uploadFile(input:{file1:$file, file2:$file}) { From e92eee2b85d42b36089840544e486d0a91bbeb84 Mon Sep 17 00:00:00 2001 From: Hanusz Leszek Date: Sat, 10 Oct 2020 17:42:42 +0200 Subject: [PATCH 23/23] fix mypy --- tests/conftest.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 05b1ff14..c2a15605 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -194,20 +194,15 @@ class TemporaryFile: def __init__(self, content: Union[str, bytearray]): - open_params = {} + mode = "w" if isinstance(content, str) else "wb" - if isinstance(content, str): + # We need to set the newline to '' so that the line returns + # are not replaced by '\r\n' on windows + newline = "" if isinstance(content, str) else None - open_params["mode"] = "w" - - # We need to set the newline to '' so that the line returns - # are not replaced to '\r\n' on windows - open_params["newline"] = "" - - else: - open_params["mode"] = "wb" - - self.file = tempfile.NamedTemporaryFile(**open_params, delete=False) + self.file = tempfile.NamedTemporaryFile( + mode=mode, newline=newline, delete=False + ) with self.file as f: f.write(content)