diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index 5a440f39ccb95..13f7c2a2b2fef 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -220,7 +220,7 @@ def check_for_bucket(self, bucket_name: str | None = None) -> bool: # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_bucket return_code = int(e.response["Error"]["Code"]) if return_code == 404: - self.log.error('Bucket "%s" does not exist', bucket_name) + self.log.info('Bucket "%s" does not exist', bucket_name) elif return_code == 403: self.log.error( 'Access to bucket "%s" is forbidden or there was an error with the request', bucket_name diff --git a/tests/system/providers/amazon/aws/example_emr_serverless.py b/tests/system/providers/amazon/aws/example_emr_serverless.py index 1d5e6e471ddff..c2838f85859d5 100644 --- a/tests/system/providers/amazon/aws/example_emr_serverless.py +++ b/tests/system/providers/amazon/aws/example_emr_serverless.py @@ -18,6 +18,8 @@ from datetime import datetime +import boto3 + from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.providers.amazon.aws.operators.emr import ( @@ -49,7 +51,8 @@ env_id = test_context[ENV_ID_KEY] role_arn = test_context[ROLE_ARN_KEY] bucket_name = f"{env_id}-emr-serverless-bucket" - entryPoint = "s3://us-east-1.elasticmapreduce/emr-containers/samples/wordcount/scripts/wordcount.py" + region = boto3.session.Session().region_name + entryPoint = f"s3://{region}.elasticmapreduce/emr-containers/samples/wordcount/scripts/wordcount.py" create_s3_bucket = S3CreateBucketOperator(task_id="create_s3_bucket", bucket_name=bucket_name) SPARK_JOB_DRIVER = {