Skip to content
Permalink
Browse files
docs(bigquery): consistent use of optional keyword (#153)
* docs(bigquery): consistent use of optional keyword

* docs(bigquery): nit
  • Loading branch information
HemangChothani committed Jun 30, 2020
1 parent a2d5ce9 commit 79d8c61064cca18b596a24b6f738c7611721dd5c
@@ -471,16 +471,16 @@ def _get_sub_prop(container, keys, default=None):
This method works like ``dict.get(key)``, but for nested values.
Arguments:
Args:
container (Dict):
A dictionary which may contain other dictionaries as values.
keys (Iterable):
A sequence of keys to attempt to get the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
default (object):
(Optional) Value to returned if any of the keys are not found.
default (Optional[object]):
Value to returned if any of the keys are not found.
Defaults to ``None``.
Examples:
@@ -514,7 +514,7 @@ def _get_sub_prop(container, keys, default=None):
def _set_sub_prop(container, keys, value):
"""Set a nested value in a dictionary.
Arguments:
Args:
container (Dict):
A dictionary which may contain other dictionaries as values.
keys (Iterable):
@@ -557,7 +557,7 @@ def _set_sub_prop(container, keys, value):
def _del_sub_prop(container, keys):
"""Remove a nested key fro a dictionary.
Arguments:
Args:
container (Dict):
A dictionary which may contain other dictionaries as values.
keys (Iterable):
@@ -25,7 +25,7 @@ class Connection(_http.JSONConnection):
Args:
client (google.cloud.bigquery.client.Client): The client that owns the current connection.
client_info (google.api_core.client_info.ClientInfo): (Optional) instance used to generate user agent.
client_info (Optional[google.api_core.client_info.ClientInfo]): Instance used to generate user agent.
"""

DEFAULT_API_ENDPOINT = "https://bigquery.googleapis.com"
@@ -472,10 +472,9 @@ def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SN
columns in the DataFrame.
filepath (str):
Path to write Parquet file to.
parquet_compression (str):
(optional) The compression codec to use by the the
``pyarrow.parquet.write_table`` serializing method. Defaults to
"SNAPPY".
parquet_compression (Optional[str]):
The compression codec to use by the the ``pyarrow.parquet.write_table``
serializing method. Defaults to "SNAPPY".
https://arrow.apache.org/docs/python/generated/pyarrow.parquet.write_table.html#pyarrow-parquet-write-table
"""
if pyarrow is None:

Large diffs are not rendered by default.

@@ -295,9 +295,9 @@ def from_string(cls, dataset_id, default_project=None):
A dataset ID in standard SQL format. If ``default_project``
is not specified, this must include both the project ID and
the dataset ID, separated by ``.``.
default_project (str):
Optional. The project ID to use when ``dataset_id`` does not
include a project ID.
default_project (Optional[str]):
The project ID to use when ``dataset_id`` does not include a
project ID.
Returns:
DatasetReference:
Loading

0 comments on commit 79d8c61

Please sign in to comment.