Skip to content

Commit

Permalink
Use f-string instead of in Airflow providers (#33752)
Browse files Browse the repository at this point in the history
  • Loading branch information
hussein-awala committed Aug 26, 2023
1 parent faf3253 commit c90eec9
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 20 deletions.
6 changes: 2 additions & 4 deletions airflow/providers/apache/hive/sensors/metastore_partition.py
Expand Up @@ -72,7 +72,7 @@ def poke(self, context: Context) -> Any:
self.first_poke = False
if "." in self.table:
self.schema, self.table = self.table.split(".")
self.sql = """
self.sql = f"""
SELECT 'X'
FROM PARTITIONS A0
LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID
Expand All @@ -81,7 +81,5 @@ def poke(self, context: Context) -> Any:
B0.TBL_NAME = '{self.table}' AND
C0.NAME = '{self.schema}' AND
A0.PART_NAME = '{self.partition_name}';
""".format(
self=self
)
"""
return super().poke(context)
8 changes: 3 additions & 5 deletions airflow/providers/databricks/hooks/databricks.py
Expand Up @@ -75,11 +75,9 @@ def is_terminal(self) -> bool:
"""True if the current state is a terminal state."""
if self.life_cycle_state not in RUN_LIFE_CYCLE_STATES:
raise AirflowException(
(
"Unexpected life cycle state: {}: If the state has "
"been introduced recently, please check the Databricks user "
"guide for troubleshooting information"
).format(self.life_cycle_state)
f"Unexpected life cycle state: {self.life_cycle_state}: If the state has "
"been introduced recently, please check the Databricks user "
"guide for troubleshooting information"
)
return self.life_cycle_state in ("TERMINATED", "SKIPPED", "INTERNAL_ERROR")

Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/elasticsearch/hooks/elasticsearch.py
Expand Up @@ -116,11 +116,11 @@ def get_uri(self) -> str:

login = ""
if conn.login:
login = "{conn.login}:{conn.password}@".format(conn=conn)
login = f"{conn.login}:{conn.password}@"
host = conn.host
if conn.port is not None:
host += f":{conn.port}"
uri = "{conn.conn_type}+{conn.schema}://{login}{host}/".format(conn=conn, login=login, host=host)
uri = f"{conn.conn_type}+{conn.schema}://{login}{host}/"

extras_length = len(conn.extra_dejson)
if not extras_length:
Expand Down
10 changes: 4 additions & 6 deletions airflow/providers/google/cloud/hooks/bigquery.py
Expand Up @@ -3033,12 +3033,10 @@ def _api_resource_configs_duplication_check(
) -> None:
if key in config_dict and value != config_dict[key]:
raise ValueError(
"Values of {param_name} param are duplicated. "
"{dict_name} contained {param_name} param "
"in `query` config and {param_name} was also provided "
"with arg to run_query() method. Please remove duplicates.".format(
param_name=key, dict_name=config_dict_name
)
f"Values of {key} param are duplicated. "
f"{config_dict_name} contained {key} param "
f"in `query` config and {key} was also provided "
"with arg to run_query() method. Please remove duplicates."
)


Expand Down
6 changes: 3 additions & 3 deletions airflow/providers/google/cloud/operators/functions.py
Expand Up @@ -282,9 +282,9 @@ def _verify_upload_url_and_no_zip_path(self) -> None:
if self._is_present_and_empty(self.body, GCF_SOURCE_UPLOAD_URL):
if not self.zip_path:
raise AirflowException(
"Parameter '{url}' is empty in the body and argument '{path}' "
"is missing or empty. You need to have non empty '{path}' "
"when '{url}' is present and empty.".format(url=GCF_SOURCE_UPLOAD_URL, path=GCF_ZIP_PATH)
f"Parameter '{GCF_SOURCE_UPLOAD_URL}' is empty in the body and argument '{GCF_ZIP_PATH}' "
f"is missing or empty. You need to have non empty '{GCF_ZIP_PATH}' "
f"when '{GCF_SOURCE_UPLOAD_URL}' is present and empty."
)

def _verify_upload_url_and_zip_path(self) -> None:
Expand Down

0 comments on commit c90eec9

Please sign in to comment.