Skip to content

Commit

Permalink
Enable flake8's check for useless f-strings
Browse files Browse the repository at this point in the history
Has been disabled since the start: #7845
  • Loading branch information
def- committed Apr 19, 2023
1 parent bbf9365 commit e5c3522
Show file tree
Hide file tree
Showing 16 changed files with 70 additions and 70 deletions.
2 changes: 1 addition & 1 deletion bin/pycheck
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,6 @@ if ! try_last_failed; then
try bin/pyactivate -m mypy "$file"
done
fi
try bin/pyactivate -m flake8 --select F --ignore F541 --extend-exclude venv "${flake8_folders[@]}"
try bin/pyactivate -m flake8 --select F --extend-exclude venv "${flake8_folders[@]}"
try bin/pyactivate -m pytest -qq --doctest-modules misc/python
try_finish
2 changes: 1 addition & 1 deletion misc/python/materialize/benches/avro_ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def main() -> None:
TO KAFKA (BROKER '{args.confluent_host}:9092')"""
)
cur.execute(
f"""CREATE SOURCE src
"""CREATE SOURCE src
FROM KAFKA CONNECTION kafka_conn (TOPIC 'bench_data')
FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn"""
)
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/checks/owners.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def validate(self) -> Testdrive:
+ self._create_objects("owner_role_02", 6)
+ self._create_objects("owner_role_03", 7)
+ dedent(
f"""
"""
$ psql-execute command="\\l owner_db*"
\\ List of databases
Name | Owner | Encoding | Collate | Ctype | Access privileges
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/cli/ci_logged_errors_detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def get_known_issues_from_github() -> list[KnownIssue]:
headers["Authorization"] = f"Bearer {token}"

response = requests.get(
f'https://api.github.com/search/issues?q=repo:MaterializeInc/materialize%20type:issue%20in:body%20"ci-regexp%3A"',
'https://api.github.com/search/issues?q=repo:MaterializeInc/materialize%20type:issue%20in:body%20"ci-regexp%3A"',
headers=headers,
)

Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/mzcompose/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _munge_services(
# `allow_host_ports` is `True`
raise UIError(
"programming error: disallowed host port in service {name!r}",
hint=f'Add `"allow_host_ports": True` to the service config to disable this check.',
hint='Add `"allow_host_ports": True` to the service config to disable this check.',
)

if "allow_host_ports" in config:
Expand Down
4 changes: 2 additions & 2 deletions misc/python/materialize/mzcompose/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,7 +668,7 @@ class Localstack(Service):
def __init__(
self,
name: str = "localstack",
image: str = f"localstack/localstack:0.13.1",
image: str = "localstack/localstack:0.13.1",
port: int = 4566,
environment: List[str] = ["HOSTNAME_EXTERNAL=localstack"],
volumes: List[str] = ["/var/run/docker.sock:/var/run/docker.sock"],
Expand All @@ -694,7 +694,7 @@ class Minio(Service):
def __init__(
self,
name: str = "minio",
image: str = f"minio/minio:RELEASE.2022-09-25T15-44-53Z.fips",
image: str = "minio/minio:RELEASE.2022-09-25T15-44-53Z.fips",
setup_materialize: bool = False,
) -> None:
# We can pre-create buckets in minio by creating subdirectories in
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/zippy/peek_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def requires(self) -> Set[Type[Capability]]:
def run(self, c: Composition) -> None:
c.testdrive(
dedent(
f"""
"""
> DROP TABLE IF EXISTS peek_cancellation;
> CREATE TABLE IF NOT EXISTS peek_cancellation (f1 INTEGER);
> INSERT INTO peek_cancellation SELECT generate_series(1, 1000);
Expand Down
12 changes: 6 additions & 6 deletions test/bounded-memory/mzcompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class Scenario:

class PgCdcScenario(Scenario):
PG_SETUP = dedent(
f"""
"""
> CREATE SECRET pgpass AS 'postgres'
> CREATE CONNECTION pg FOR POSTGRES
HOST postgres,
Expand All @@ -67,7 +67,7 @@ class PgCdcScenario(Scenario):
"""
)
MZ_SETUP = dedent(
f"""
"""
> CREATE SOURCE mz_source
IN CLUSTER clusterd
FROM POSTGRES CONNECTION pg (PUBLICATION 'mz_source')
Expand Down Expand Up @@ -124,7 +124,7 @@ class KafkaScenario(Scenario):
)

END_MARKER = dedent(
f"""
"""
$ kafka-ingest format=avro key-format=avro topic=topic1 schema=${{value-schema}} key-schema=${{key-schema}}
"ZZZ" {{"f1": "END MARKER"}}
"""
Expand Down Expand Up @@ -187,7 +187,7 @@ class KafkaScenario(Scenario):
+ "\n".join(
[
dedent(
f"""
"""
$ postgres-execute connection=postgres://postgres:postgres@postgres
UPDATE t1 SET f2 = f2 + 1;
"""
Expand Down Expand Up @@ -289,7 +289,7 @@ class KafkaScenario(Scenario):
)
+ KafkaScenario.END_MARKER
+ dedent(
f"""
"""
# Expect just the two MARKERs
> SELECT * FROM v1;
2
Expand Down Expand Up @@ -323,7 +323,7 @@ class KafkaScenario(Scenario):
]
)
+ dedent(
f"""
"""
> SELECT * FROM v1;
0
"""
Expand Down
8 changes: 4 additions & 4 deletions test/cloudtest/test_secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,11 @@ def test_missing_secret(mz: MaterializeApplication) -> None:
# Restart the storage computed and confirm that the source errors out properly

cluster_id, replica_id = mz.environmentd.sql_query(
f"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'to_be_killed'"
"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'to_be_killed'"
)[0]
pod_name = cluster_pod_name(cluster_id, replica_id, 0)

mz.kubectl("exec", pod_name, "--", "bash", "-c", f"kill -9 `pidof clusterd`")
mz.kubectl("exec", pod_name, "--", "bash", "-c", "kill -9 `pidof clusterd`")
wait(condition="condition=Ready", resource=f"{pod_name}")

mz.testdrive.run(
Expand Down Expand Up @@ -209,9 +209,9 @@ def test_missing_secret(mz: MaterializeApplication) -> None:
"--",
"bash",
"-c",
f"kill -9 `pidof environmentd`",
"kill -9 `pidof environmentd`",
)
wait(condition="condition=Ready", resource=f"pod/environmentd-0")
wait(condition="condition=Ready", resource="pod/environmentd-0")

mz.testdrive.run(
input=dedent(
Expand Down
2 changes: 1 addition & 1 deletion test/cloudtest/test_storage_shared_fate.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def kill_clusterd(
mz: MaterializeApplication, compute_id: int, signal: str = "SIGKILL"
) -> None:
cluster_id, replica_id = mz.environmentd.sql_query(
f"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'storage_shared_fate_replica'"
"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'storage_shared_fate_replica'"
)[0]

pod_name = cluster_pod_name(cluster_id, replica_id, compute_id)
Expand Down
10 changes: 5 additions & 5 deletions test/cluster/mzcompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,7 +570,7 @@ def workflow_test_github_15496(c: Composition) -> None:
)
c.testdrive(
dedent(
f"""
"""
> SET cluster = cluster1;
# Run a query that would generate a panic before the fix.
Expand Down Expand Up @@ -618,7 +618,7 @@ def workflow_test_github_17177(c: Composition) -> None:
)
c.testdrive(
dedent(
f"""
"""
# Set data for test up
> SET cluster = cluster1;
Expand Down Expand Up @@ -719,7 +719,7 @@ def workflow_test_github_17510(c: Composition) -> None:
)
c.testdrive(
dedent(
f"""
"""
> SET cluster = cluster1;
# Run a queries that would generate panics before the fix.
Expand Down Expand Up @@ -860,7 +860,7 @@ def workflow_test_github_17509(c: Composition) -> None:
)
c.testdrive(
dedent(
f"""
"""
> SET cluster = cluster1;
# The query below would return a null previously, but now fails cleanly.
Expand Down Expand Up @@ -1320,7 +1320,7 @@ def check_mz_subscriptions(expected: Tuple) -> None:
future.
"""
output = c.sql_query(
f"""
"""
SELECT s.user, c.name, t.name
FROM mz_internal.mz_subscriptions s
JOIN mz_clusters c ON (c.id = s.cluster_id)
Expand Down
16 changes: 8 additions & 8 deletions test/feature-benchmark/scenarios.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ class KafkaEnvelopeNoneBytesScalability(ScenarioBig):
def shared(self) -> List[Action]:
return [
TdAction(
f"""
"""
$ kafka-create-topic topic=kafka-scalability partitions=8
"""
),
Expand Down Expand Up @@ -1039,7 +1039,7 @@ def shared(self) -> Action:

def before(self) -> Action:
return TdAction(
f"""
"""
> DROP SOURCE IF EXISTS mz_source_pgcdc;
"""
)
Expand Down Expand Up @@ -1075,7 +1075,7 @@ class PgCdcStreaming(PgCdc):

def shared(self) -> Action:
return TdAction(
f"""
"""
$ postgres-execute connection=postgres://postgres:postgres@postgres
ALTER USER postgres WITH replication;
DROP SCHEMA IF EXISTS public CASCADE;
Expand All @@ -1088,7 +1088,7 @@ def shared(self) -> Action:

def before(self) -> Action:
return TdAction(
f"""
"""
> DROP SOURCE IF EXISTS s1;
$ postgres-execute connection=postgres://postgres:postgres@postgres
Expand Down Expand Up @@ -1144,7 +1144,7 @@ class QueryLatency(Coordinator):
"""Measure the time it takes to run SELECT 1 queries"""

def benchmark(self) -> MeasurementSource:
selects = "\n".join(f"> SELECT 1\n1\n" for i in range(0, self.n()))
selects = "\n".join("> SELECT 1\n1\n" for i in range(0, self.n()))

return Td(
f"""
Expand All @@ -1170,7 +1170,7 @@ class ConnectionLatency(Coordinator):

def benchmark(self) -> MeasurementSource:
connections = "\n".join(
f"""
"""
$ postgres-execute connection=postgres://materialize:materialize@${{testdrive.materialize-sql-addr}}
SELECT 1;
"""
Expand Down Expand Up @@ -1205,7 +1205,7 @@ def benchmark(self) -> BenchmarkingSequence:
return [
Lambda(lambda e: e.RestartMz()),
Td(
f"""
"""
> SELECT 1;
/* B */
1
Expand All @@ -1222,7 +1222,7 @@ class StartupLoaded(Scenario):
def shared(self) -> Action:
return TdAction(
self.schema()
+ f"""
+ """
$ kafka-create-topic topic=startup-time
$ kafka-ingest format=avro topic=startup-time schema=${{schema}} repeat=1
Expand Down
4 changes: 2 additions & 2 deletions test/feature-benchmark/scenarios_subscribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def insert(self) -> str:
class SubscribeParallelTable(SubscribeParallel):
def create_subscribe_source(self) -> str:
return dedent(
f"""
"""
> DROP TABLE IF EXISTS s1;
> CREATE TABLE s1 (f1 TEXT);
"""
Expand All @@ -89,7 +89,7 @@ def insert(self) -> str:
class SubscribeParallelTableWithIndex(SubscribeParallel):
def create_subscribe_source(self) -> str:
return dedent(
f"""
"""
> DROP TABLE IF EXISTS s1;
> CREATE TABLE s1 (f1 INTEGER);
> CREATE DEFAULT INDEX ON s1;
Expand Down
Loading

0 comments on commit e5c3522

Please sign in to comment.