Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

python: Remove unnecessary f-strings #18821

Merged
merged 2 commits into from
Apr 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion bin/pycheck
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,6 @@ if ! try_last_failed; then
try bin/pyactivate -m mypy "$file"
done
fi
try bin/pyactivate -m flake8 --select F --ignore F541 --extend-exclude venv "${flake8_folders[@]}"
try bin/pyactivate -m flake8 --select F --extend-exclude venv "${flake8_folders[@]}"
try bin/pyactivate -m pytest -qq --doctest-modules misc/python
try_finish
2 changes: 1 addition & 1 deletion ci/cleanup/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def clean_up_sqs() -> None:


def clean_up_ec2() -> None:
print(f"Terminating scratch ec2 instances whose age exceeds the deletion time")
print("Terminating scratch ec2 instances whose age exceeds the deletion time")
olds = [i["InstanceId"] for i in scratch.get_old_instances()]
if olds:
print(f"Instances to delete: {olds}")
Expand Down
2 changes: 1 addition & 1 deletion ci/deploy/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def include_image(image: mzbuild.Image) -> bool:
# tags.
return image.publish and (not buildkite_tag or image.mainline)

print(f"--- Tagging Docker images")
print("--- Tagging Docker images")
deps = [
repo.resolve_dependencies(image for image in repo if include_image(image))
for repo in repos
Expand Down
2 changes: 1 addition & 1 deletion ci/deploy_mz/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def main() -> None:
mzbuild.Repository(Path("."), Arch.AARCH64, coverage=False),
]

print(f"--- Tagging Docker images")
print("--- Tagging Docker images")
deps = [[repo.resolve_dependencies([repo.images["mz"]])["mz"]] for repo in repos]

mzbuild.publish_multiarch_images(f"v{VERSION}", deps)
Expand Down
6 changes: 3 additions & 3 deletions ci/deploy_mz/linux.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ def main() -> None:
repo = mzbuild.Repository(Path("."), coverage=False)
target = f"{repo.rd.arch}-unknown-linux-gnu"

print(f"--- Checking version")
print("--- Checking version")
assert repo.rd.cargo_workspace.crates["mz"].version == VERSION

print(f"--- Building mz")
print("--- Building mz")
deps = repo.resolve_dependencies([repo.images["mz"]])
deps.ensure()
# Extract the mz binary from the Docker image.
Expand All @@ -47,7 +47,7 @@ def main() -> None:
print(f"--- Uploading {target} binary tarball")
deploy_util.deploy_tarball(target, mz)

print(f"--- Publishing Debian package")
print("--- Publishing Debian package")
filename = f"mz_{VERSION}_{repo.rd.arch.go_str()}.deb"
print(f"Publishing {filename}")
spawn.runv(
Expand Down
4 changes: 2 additions & 2 deletions ci/test/cargo-test/mzcompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
port="30123:30123",
allow_host_ports=True,
extra_environment=[
f"KAFKA_ADVERTISED_LISTENERS=HOST://localhost:30123,PLAINTEXT://kafka:9092",
"KAFKA_ADVERTISED_LISTENERS=HOST://localhost:30123,PLAINTEXT://kafka:9092",
"KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=HOST:PLAINTEXT,PLAINTEXT:PLAINTEXT",
],
),
Expand Down Expand Up @@ -60,7 +60,7 @@ def workflow_default(c: Composition, parser: WorkflowArgumentParser) -> None:
env=dict(
os.environ,
ZOOKEEPER_ADDR=f"localhost:{c.default_port('zookeeper')}",
KAFKA_ADDRS=f"localhost:30123",
KAFKA_ADDRS="localhost:30123",
SCHEMA_REGISTRY_URL=f"http://localhost:{c.default_port('schema-registry')}",
POSTGRES_URL=postgres_url,
COCKROACH_URL=cockroach_url,
Expand Down
2 changes: 1 addition & 1 deletion ci/test/dev_tag.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def main() -> None:
mzbuild.Repository(Path("."), Arch.X86_64, coverage=False),
mzbuild.Repository(Path("."), Arch.AARCH64, coverage=False),
]
print(f"--- Tagging development Docker images")
print("--- Tagging development Docker images")
deps = [
repo.resolve_dependencies(image for image in repo if image.publish)
for repo in repos
Expand Down
4 changes: 2 additions & 2 deletions misc/dbt-materialize/dbt/adapters/materialize/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ def parse(cls, raw_index) -> Optional["MaterializeIndexConfig"]:
dbt.exceptions.CompilationError(f"Could not parse index config: {msg}")
except TypeError:
dbt.exceptions.CompilationError(
f"Invalid index config:\n"
"Invalid index config:\n"
f" Got: {raw_index}\n"
f' Expected a dictionary with at minimum a "columns" key'
' Expected a dictionary with at minimum a "columns" key'
)


Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/benches/avro_ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def main() -> None:
TO KAFKA (BROKER '{args.confluent_host}:9092')"""
)
cur.execute(
f"""CREATE SOURCE src
"""CREATE SOURCE src
FROM KAFKA CONNECTION kafka_conn (TOPIC 'bench_data')
FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn"""
)
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/checks/owners.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def validate(self) -> Testdrive:
+ self._create_objects("owner_role_02", 6)
+ self._create_objects("owner_role_03", 7)
+ dedent(
f"""
"""
$ psql-execute command="\\l owner_db*"
\\ List of databases
Name | Owner | Encoding | Collate | Ctype | Access privileges
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/ci_util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def get_artifacts() -> Any:
if "CI" not in os.environ:
return []

ui.header(f"Getting artifact informations from Buildkite")
ui.header("Getting artifact informations from Buildkite")
build = os.environ["BUILDKITE_BUILD_NUMBER"]
build_id = os.environ["BUILDKITE_BUILD_ID"]
job = os.environ["BUILDKITE_JOB_ID"]
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/cli/ci_logged_errors_detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def get_known_issues_from_github() -> list[KnownIssue]:
headers["Authorization"] = f"Bearer {token}"

response = requests.get(
f'https://api.github.com/search/issues?q=repo:MaterializeInc/materialize%20type:issue%20in:body%20"ci-regexp%3A"',
'https://api.github.com/search/issues?q=repo:MaterializeInc/materialize%20type:issue%20in:body%20"ci-regexp%3A"',
headers=headers,
)

Expand Down
8 changes: 4 additions & 4 deletions misc/python/materialize/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
KNOWN_PROGRAMS = ["environmentd", "sqllogictest"]
REQUIRED_SERVICES = ["clusterd"]

DEFAULT_POSTGRES = f"postgres://root@localhost:26257/materialize"
DEFAULT_POSTGRES = "postgres://root@localhost:26257/materialize"

# sets entitlements on the built binary, e.g. environmentd, so you can inspect it with Instruments
MACOS_ENTITLEMENTS_DATA = """
Expand Down Expand Up @@ -186,10 +186,10 @@ def main() -> int:
# Setting the listen addresses below to 0.0.0.0 is required
# to allow Prometheus running in Docker (misc/prometheus)
# access these services to scrape metrics.
f"--internal-http-listen-addr=0.0.0.0:6878",
f"--orchestrator=process",
"--internal-http-listen-addr=0.0.0.0:6878",
"--orchestrator=process",
f"--orchestrator-process-secrets-directory={mzdata}/secrets",
f"--orchestrator-process-tcp-proxy-listen-addr=0.0.0.0",
"--orchestrator-process-tcp-proxy-listen-addr=0.0.0.0",
f"--orchestrator-process-prometheus-service-discovery-directory={mzdata}/prometheus",
f"--persist-consensus-url={args.postgres}?options=--search_path=consensus",
f"--persist-blob-url=file://{mzdata}/persist/blob",
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/mzcompose/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _munge_services(
# `allow_host_ports` is `True`
raise UIError(
"programming error: disallowed host port in service {name!r}",
hint=f'Add `"allow_host_ports": True` to the service config to disable this check.',
hint='Add `"allow_host_ports": True` to the service config to disable this check.',
)

if "allow_host_ports" in config:
Expand Down
8 changes: 4 additions & 4 deletions misc/python/materialize/mzcompose/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(
# TODO(benesch): remove the following environment variables
# after v0.38 ships, since these environment variables will be
# baked into the Docker image.
f"MZ_ORCHESTRATOR=process",
"MZ_ORCHESTRATOR=process",
# Please think twice before forwarding additional environment
# variables from the host, as it's easy to write tests that are
# then accidentally dependent on the state of the host machine.
Expand Down Expand Up @@ -133,7 +133,7 @@ def __init__(

command += [
"--orchestrator-process-tcp-proxy-listen-addr=0.0.0.0",
f"--orchestrator-process-prometheus-service-discovery-directory=/mzdata/prometheus",
"--orchestrator-process-prometheus-service-discovery-directory=/mzdata/prometheus",
]

command += options
Expand Down Expand Up @@ -668,7 +668,7 @@ class Localstack(Service):
def __init__(
self,
name: str = "localstack",
image: str = f"localstack/localstack:0.13.1",
image: str = "localstack/localstack:0.13.1",
port: int = 4566,
environment: List[str] = ["HOSTNAME_EXTERNAL=localstack"],
volumes: List[str] = ["/var/run/docker.sock:/var/run/docker.sock"],
Expand All @@ -694,7 +694,7 @@ class Minio(Service):
def __init__(
self,
name: str = "minio",
image: str = f"minio/minio:RELEASE.2022-09-25T15-44-53Z.fips",
image: str = "minio/minio:RELEASE.2022-09-25T15-44-53Z.fips",
setup_materialize: bool = False,
) -> None:
# We can pre-create buckets in minio by creating subdirectories in
Expand Down
8 changes: 4 additions & 4 deletions misc/python/materialize/optbench/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,14 @@ def explain(self, timing: bool, dialect: Dialect = Dialect.MZ) -> str:

if dialect == Dialect.PG:
if timing:
return "\n".join([f"EXPLAIN (ANALYZE, TIMING TRUE)", self.query])
return "\n".join(["EXPLAIN (ANALYZE, TIMING TRUE)", self.query])
else:
return "\n".join([f"EXPLAIN", self.query])
return "\n".join(["EXPLAIN", self.query])
else:
if timing:
return "\n".join([f"EXPLAIN WITH(timing)", self.query])
return "\n".join(["EXPLAIN WITH(timing)", self.query])
else:
return "\n".join([f"EXPLAIN", self.query])
return "\n".join(["EXPLAIN", self.query])


class ExplainOutput:
Expand Down
18 changes: 9 additions & 9 deletions misc/python/materialize/xcompile.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,16 +82,16 @@ def cargo(
sysroot = spawn.capture([f"{_target}-cc", "-print-sysroot"]).strip()
rustflags += [f"-L{sysroot}/lib"]
extra_env = {
f"CMAKE_SYSTEM_NAME": "Linux",
"CMAKE_SYSTEM_NAME": "Linux",
f"CARGO_TARGET_{_target_env}_LINKER": f"{_target}-cc",
f"CARGO_TARGET_DIR": str(ROOT / "target-xcompile"),
f"TARGET_AR": f"{_target}-ar",
f"TARGET_CPP": f"{_target}-cpp",
f"TARGET_CC": f"{_target}-cc",
f"TARGET_CXX": f"{_target}-c++",
f"TARGET_CXXSTDLIB": "static=stdc++",
f"TARGET_LD": f"{_target}-ld",
f"TARGET_RANLIB": f"{_target}-ranlib",
"CARGO_TARGET_DIR": str(ROOT / "target-xcompile"),
"TARGET_AR": f"{_target}-ar",
"TARGET_CPP": f"{_target}-cpp",
"TARGET_CC": f"{_target}-cc",
"TARGET_CXX": f"{_target}-c++",
"TARGET_CXXSTDLIB": "static=stdc++",
"TARGET_LD": f"{_target}-ld",
"TARGET_RANLIB": f"{_target}-ranlib",
}
else:
# NOTE(benesch): The required Rust flags have to be duplicated with
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/zippy/peek_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def requires(self) -> Set[Type[Capability]]:
def run(self, c: Composition) -> None:
c.testdrive(
dedent(
f"""
"""
> DROP TABLE IF EXISTS peek_cancellation;
> CREATE TABLE IF NOT EXISTS peek_cancellation (f1 INTEGER);
> INSERT INTO peek_cancellation SELECT generate_series(1, 1000);
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/zippy/postgres_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def __init__(self, capabilities: Capabilities) -> None:

def run(self, c: Composition) -> None:
if self.new_postgres_table:
primary_key = f"PRIMARY KEY" if self.postgres_table.has_pk else ""
primary_key = "PRIMARY KEY" if self.postgres_table.has_pk else ""
c.testdrive(
dedent(
f"""
Expand Down
12 changes: 6 additions & 6 deletions test/bounded-memory/mzcompose.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class Scenario:

class PgCdcScenario(Scenario):
PG_SETUP = dedent(
f"""
"""
> CREATE SECRET pgpass AS 'postgres'
> CREATE CONNECTION pg FOR POSTGRES
HOST postgres,
Expand All @@ -67,7 +67,7 @@ class PgCdcScenario(Scenario):
"""
)
MZ_SETUP = dedent(
f"""
"""
> CREATE SOURCE mz_source
IN CLUSTER clusterd
FROM POSTGRES CONNECTION pg (PUBLICATION 'mz_source')
Expand Down Expand Up @@ -124,7 +124,7 @@ class KafkaScenario(Scenario):
)

END_MARKER = dedent(
f"""
"""
$ kafka-ingest format=avro key-format=avro topic=topic1 schema=${{value-schema}} key-schema=${{key-schema}}
"ZZZ" {{"f1": "END MARKER"}}
"""
Expand Down Expand Up @@ -187,7 +187,7 @@ class KafkaScenario(Scenario):
+ "\n".join(
[
dedent(
f"""
"""
$ postgres-execute connection=postgres://postgres:postgres@postgres
UPDATE t1 SET f2 = f2 + 1;
"""
Expand Down Expand Up @@ -289,7 +289,7 @@ class KafkaScenario(Scenario):
)
+ KafkaScenario.END_MARKER
+ dedent(
f"""
"""
# Expect just the two MARKERs
> SELECT * FROM v1;
2
Expand Down Expand Up @@ -323,7 +323,7 @@ class KafkaScenario(Scenario):
]
)
+ dedent(
f"""
"""
> SELECT * FROM v1;
0
"""
Expand Down
2 changes: 1 addition & 1 deletion test/cloudtest/test_compute_shared_fate.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def kill_clusterd(
mz: MaterializeApplication, compute_id: int, signal: str = "SIGKILL"
) -> None:
cluster_id, replica_id = mz.environmentd.sql_query(
f"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'shared_fate_replica'"
"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'shared_fate_replica'"
)[0]

pod_name = cluster_pod_name(cluster_id, replica_id, compute_id)
Expand Down
6 changes: 3 additions & 3 deletions test/cloudtest/test_crash.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_crash_storage(mz: MaterializeApplication) -> None:
populate(mz, 1)

[cluster_id, replica_id] = mz.environmentd.sql_query(
f"SELECT s.cluster_id, r.id FROM mz_sources s JOIN mz_cluster_replicas r ON r.cluster_id = s.cluster_id WHERE s.name = 's1'"
"SELECT s.cluster_id, r.id FROM mz_sources s JOIN mz_cluster_replicas r ON r.cluster_id = s.cluster_id WHERE s.name = 's1'"
)[0]
pod_name = cluster_pod_name(cluster_id, replica_id)

Expand All @@ -106,8 +106,8 @@ def restarts(p: V1Pod) -> int:

def get_replica() -> Tuple[V1Pod, V1StatefulSet]:
"""Find the stateful set for the replica of the default cluster"""
compute_pod_name = f"cluster-u1-replica-1-0"
ss_name = f"cluster-u1-replica-1"
compute_pod_name = "cluster-u1-replica-1-0"
ss_name = "cluster-u1-replica-1"
compute_pod = mz.environmentd.api().read_namespaced_pod(
compute_pod_name, mz.environmentd.namespace()
)
Expand Down
8 changes: 4 additions & 4 deletions test/cloudtest/test_secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,11 +171,11 @@ def test_missing_secret(mz: MaterializeApplication) -> None:
# Restart the storage computed and confirm that the source errors out properly

cluster_id, replica_id = mz.environmentd.sql_query(
f"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'to_be_killed'"
"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'to_be_killed'"
)[0]
pod_name = cluster_pod_name(cluster_id, replica_id, 0)

mz.kubectl("exec", pod_name, "--", "bash", "-c", f"kill -9 `pidof clusterd`")
mz.kubectl("exec", pod_name, "--", "bash", "-c", "kill -9 `pidof clusterd`")
wait(condition="condition=Ready", resource=f"{pod_name}")

mz.testdrive.run(
Expand Down Expand Up @@ -209,9 +209,9 @@ def test_missing_secret(mz: MaterializeApplication) -> None:
"--",
"bash",
"-c",
f"kill -9 `pidof environmentd`",
"kill -9 `pidof environmentd`",
)
wait(condition="condition=Ready", resource=f"pod/environmentd-0")
wait(condition="condition=Ready", resource="pod/environmentd-0")

mz.testdrive.run(
input=dedent(
Expand Down
2 changes: 1 addition & 1 deletion test/cloudtest/test_ssh_tunnels.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def test_ssh_tunnels(mz: MaterializeApplication) -> None:
no_reset=True,
)

environmentd_pod_name = f"pod/environmentd-0"
environmentd_pod_name = "pod/environmentd-0"

# Kill environmentd to force a restart, to test reloading secrets on restart
mz.kubectl(
Expand Down
2 changes: 1 addition & 1 deletion test/cloudtest/test_storage_shared_fate.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def kill_clusterd(
mz: MaterializeApplication, compute_id: int, signal: str = "SIGKILL"
) -> None:
cluster_id, replica_id = mz.environmentd.sql_query(
f"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'storage_shared_fate_replica'"
"SELECT cluster_id, id FROM mz_cluster_replicas WHERE name = 'storage_shared_fate_replica'"
)[0]

pod_name = cluster_pod_name(cluster_id, replica_id, compute_id)
Expand Down
Loading