Skip to content

Commit

Permalink
fix by ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
YoshitakaNaraoka committed Dec 27, 2023
1 parent 7e43c51 commit 3dabf6a
Show file tree
Hide file tree
Showing 18 changed files with 88 additions and 93 deletions.
4 changes: 2 additions & 2 deletions describe.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def document_collection(resource, path, root_discovery, discovery, css=CSS):
"""
collections = []
methods = []
resource_name = path.split(".")[-2]
path.split(".")[-2]
html = [
"<html><body>",
css,
Expand Down Expand Up @@ -347,7 +347,7 @@ def document_collection(resource, path, root_discovery, discovery, css=CSS):
if methods:
html.append("<h3>Method Details</h3>")
for name in methods:
dname = name.rsplit("_")[0]
name.rsplit("_")[0]
html.append(method(name, getattr(resource, name).__doc__))

html.append("</body></html>")
Expand Down
11 changes: 6 additions & 5 deletions googleapiclient/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
# Library-specific reserved words beyond Python keywords.
RESERVED_WORDS = frozenset(["body"])


# patch _write_lines to avoid munging '\r' into '\n'
# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
class _BytesGenerator(BytesGenerator):
Expand Down Expand Up @@ -427,8 +428,8 @@ def _retrieve_discovery_doc(
pass

try:
service = json.loads(content)
except ValueError as e:
json.loads(content)
except ValueError:
logger.error("Failed to parse as JSON: " + content)
raise InvalidJsonError()
if cache_discovery and cache:
Expand Down Expand Up @@ -611,7 +612,7 @@ def build_from_document(
# Obtain client cert and create mTLS http channel if cert exists.
client_cert_to_use = None
use_client_cert = os.getenv(GOOGLE_API_USE_CLIENT_CERTIFICATE, "false")
if not use_client_cert in ("true", "false"):
if use_client_cert not in ("true", "false"):
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_CLIENT_CERTIFICATE value. Accepted values: true, false"
)
Expand Down Expand Up @@ -656,7 +657,7 @@ def build_from_document(
)
use_mtls_endpoint = os.getenv(GOOGLE_API_USE_MTLS_ENDPOINT, "auto")

if not use_mtls_endpoint in ("never", "auto", "always"):
if use_mtls_endpoint not in ("never", "auto", "always"):
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
Expand Down Expand Up @@ -1265,7 +1266,7 @@ def method(self, **kwargs):
enumDesc = paramdesc.get("enumDescriptions", [])
if enum and enumDesc:
docs.append(" Allowed values\n")
for (name, desc) in zip(enum, enumDesc):
for name, desc in zip(enum, enumDesc):
docs.append(" %s - %s\n" % (name, desc))
if "response" in methodDesc:
if methodName.endswith("_media"):
Expand Down
2 changes: 1 addition & 1 deletion googleapiclient/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,7 +709,7 @@ def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
# allow users to supply custom headers by setting them on the request
# but strip out the ones that are set by default on requests generated by
# API methods like Drive's files().get(fileId=...)
if not k.lower() in ("accept", "accept-encoding", "user-agent"):
if k.lower() not in ("accept", "accept-encoding", "user-agent"):
self._headers[k] = v

@util.positional(1)
Expand Down
2 changes: 1 addition & 1 deletion googleapiclient/mimeparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def fitness_and_quality_parsed(mime_type, parsed_ranges):
best_fitness = -1
best_fit_q = 0
(target_type, target_subtype, target_params) = parse_media_range(mime_type)
for (type, subtype, params) in parsed_ranges:
for type, subtype, params in parsed_ranges:
type_match = type == target_type or type == "*" or target_type == "*"
subtype_match = (
subtype == target_subtype or subtype == "*" or target_subtype == "*"
Expand Down
64 changes: 32 additions & 32 deletions samples-index.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,17 +65,17 @@
def get_lines(name, lines):
"""Return lines that begin with name.
Lines are expected to look like:
Lines are expected to look like:
name: space separated values
name: space separated values
Args:
name: string, parameter name.
lines: iterable of string, lines in the file.
Args:
name: string, parameter name.
lines: iterable of string, lines in the file.
Returns:
List of values in the lines that match.
"""
Returns:
List of values in the lines that match.
"""
retval = []
matches = itertools.ifilter(lambda x: x.startswith(name + ":"), lines)
for line in matches:
Expand All @@ -96,16 +96,16 @@ def wiki_escape(s):
def context_from_sample(api, keywords, dirname, desc, uri):
"""Return info for expanding a sample into a template.
Args:
api: string, name of api.
keywords: list of string, list of keywords for the given api.
dirname: string, directory name of the sample.
desc: string, long description of the sample.
uri: string, uri of the sample code if provided in the README.
Args:
api: string, name of api.
keywords: list of string, list of keywords for the given api.
dirname: string, directory name of the sample.
desc: string, long description of the sample.
uri: string, uri of the sample code if provided in the README.
Returns:
A dictionary of values useful for template expansion.
"""
Returns:
A dictionary of values useful for template expansion.
"""
if uri is None:
uri = BASE_HG_URI + dirname.replace("/", "%2F")
else:
Expand All @@ -131,17 +131,17 @@ def context_from_sample(api, keywords, dirname, desc, uri):
def keyword_context_from_sample(keywords, dirname, desc, uri):
"""Return info for expanding a sample into a template.
Sample may not be about a specific api.
Sample may not be about a specific api.
Args:
keywords: list of string, list of keywords for the given api.
dirname: string, directory name of the sample.
desc: string, long description of the sample.
uri: string, uri of the sample code if provided in the README.
Args:
keywords: list of string, list of keywords for the given api.
dirname: string, directory name of the sample.
desc: string, long description of the sample.
uri: string, uri of the sample code if provided in the README.
Returns:
A dictionary of values useful for template expansion.
"""
Returns:
A dictionary of values useful for template expansion.
"""
if uri is None:
uri = BASE_HG_URI + dirname.replace("/", "%2F")
else:
Expand All @@ -158,13 +158,13 @@ def keyword_context_from_sample(keywords, dirname, desc, uri):
def scan_readme_files(dirname):
"""Scans all subdirs of dirname for README files.
Args:
dirname: string, name of directory to walk.
Args:
dirname: string, name of directory to walk.
Returns:
(samples, keyword_set): list of information about all samples, the union
of all keywords found.
"""
Returns:
(samples, keyword_set): list of information about all samples, the union
of all keywords found.
"""
samples = []
keyword_set = set()

Expand Down
5 changes: 2 additions & 3 deletions samples/blogger/blogger.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ def main(argv):
)

try:

users = service.users()

# Retrieve this user's profile information
Expand All @@ -73,9 +72,9 @@ def main(argv):
for blog in thisusersblogs["items"]:
print("The posts for %s:" % blog["name"])
request = posts.list(blogId=blog["id"])
while request != None:
while request is not None:
posts_doc = request.execute()
if "items" in posts_doc and not (posts_doc["items"] is None):
if "items" in posts_doc and posts_doc["items"] is not None:
for post in posts_doc["items"]:
print(" %s (%s)" % (post["title"], post["url"]))
request = posts.list_next(request, posts_doc)
Expand Down
4 changes: 1 addition & 3 deletions samples/compute/create_instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,7 @@ def main(project, bucket, zone, instance_name, wait=True):
It will take a minute or two for the instance to complete work.
Check this URL: http://storage.googleapis.com/{}/output.png
Once the image is uploaded press enter to delete the instance.
""".format(
bucket
)
""".format(bucket)
)

if wait:
Expand Down
2 changes: 1 addition & 1 deletion samples/coordinate/coordinate.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def main(argv):

pprint.pprint(update_result)

except client.AccessTokenRefreshError as e:
except client.AccessTokenRefreshError:
print(
"The credentials have been revoked or expired, please re-run"
"the application to re-authorize"
Expand Down
4 changes: 1 addition & 3 deletions samples/customsearch/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ def main():
# Build a service object for interacting with the API. Visit
# the Google APIs Console <http://code.google.com/apis/console>
# to get an API key for your own application.
service = build(
"customsearch", "v1", developerKey="<YOUR DEVELOPER KEY>"
)
service = build("customsearch", "v1", developerKey="<YOUR DEVELOPER KEY>")

res = (
service.cse()
Expand Down
4 changes: 1 addition & 3 deletions samples/groupssettings/groupsettings.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,7 @@
with information from the APIs Console <https://code.google.com/apis/console>.
""" % os.path.join(
os.path.dirname(__file__), CLIENT_SECRETS
)
""" % os.path.join(os.path.dirname(__file__), CLIENT_SECRETS)


def access_settings(service, groupId, settings):
Expand Down
5 changes: 1 addition & 4 deletions samples/translate/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,10 @@


def main():

# Build a service object for interacting with the API. Visit
# the Google APIs Console <http://code.google.com/apis/console>
# to get an API key for your own application.
service = build(
"translate", "v2", developerKey="<YOUR DEVELOPER KEY>"
)
service = build("translate", "v2", developerKey="<YOUR DEVELOPER KEY>")
print(
service.translations()
.list(source="en", target="fr", q=["flower", "car"])
Expand Down
2 changes: 1 addition & 1 deletion scripts/buildprbody.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def generate_pr_body(self):

prestable_and_breaking = (
dataframe[
(dataframe["IsStable"] == False)
(dataframe["IsStable"] is False)
& (dataframe["ChangeType"] == ChangeType.DELETED)
][["Name", "Version", "Commit"]]
.drop_duplicates()
Expand Down
7 changes: 4 additions & 3 deletions scripts/changesummary.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,8 @@ def _get_discovery_differences(self, filename):
.reset_index(drop=True, level=1)
# Transpose the DataFrame, Resulting Columns should be
# ["Key", "CurrentValue", "New Value"]
.rename_axis(["Key"], axis=1).transpose()
.rename_axis(["Key"], axis=1)
.transpose()
# Drop the index column
.reset_index()
)
Expand Down Expand Up @@ -208,7 +209,7 @@ def _get_discovery_differences(self, filename):
all_added = (
parent_added_agg[
(parent_added_agg["Proportion"] == 1)
& (parent_added_agg["Added"] == True)
& (parent_added_agg["Added"] is True)
][["Parent", "NumLevels"]]
.sort_values("NumLevels", ascending=True)
.Parent.to_list()
Expand All @@ -220,7 +221,7 @@ def _get_discovery_differences(self, filename):
all_deleted = (
parent_deleted_agg[
(parent_deleted_agg["Proportion"] == 1)
& (parent_deleted_agg["Deleted"] == True)
& (parent_deleted_agg["Deleted"] is True)
][["Parent", "NumLevels"]]
.sort_values("NumLevels", ascending=True)
.Parent.to_list()
Expand Down
6 changes: 2 additions & 4 deletions tests/test__auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,13 +135,11 @@ def test_default_credentials(self):

def test_credentials_from_file(self):
with self.assertRaises(EnvironmentError):
credentials = _auth.credentials_from_file("credentials.json")
_auth.credentials_from_file("credentials.json")

def test_default_credentials_with_scopes_and_quota_project(self):
with self.assertRaises(EnvironmentError):
credentials = _auth.default_credentials(
scopes=["1", "2"], quota_project_id="my-project"
)
_auth.default_credentials(scopes=["1", "2"], quota_project_id="my-project")

def test_with_scopes_non_scoped(self):
credentials = mock.Mock(spec=oauth2client.client.Credentials)
Expand Down

0 comments on commit 3dabf6a

Please sign in to comment.