From 4c8e033c0ee7d28963d504a9216205155f20f58f Mon Sep 17 00:00:00 2001 From: John Bampton Date: Sat, 24 Oct 2020 18:06:04 +1000 Subject: [PATCH] Fix spelling and grammar (#11814) --- airflow/executors/celery_executor.py | 2 +- airflow/jobs/scheduler_job.py | 2 +- .../providers/amazon/aws/example_dags/example_datasync_2.py | 2 +- airflow/providers/amazon/aws/operators/datasync.py | 4 ++-- airflow/www/api/experimental/endpoints.py | 2 +- tests/api/common/experimental/test_mark_tasks.py | 2 +- tests/providers/amazon/aws/hooks/test_s3.py | 2 +- tests/providers/amazon/aws/operators/test_datasync.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index dc274ebe51a70..dcde695e6768a 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -251,7 +251,7 @@ def _process_tasks(self, task_tuples_to_send: List[TaskInstanceInCelery]) -> Non # Store the Celery task_id in the event buffer. This will get "overwritten" if the task # has another event, but that is fine, because the only other events are success/failed at - # which point we dont need the ID anymore anyway + # which point we don't need the ID anymore anyway self.event_buffer[key] = (State.QUEUED, result.task_id) # If the task runs _really quickly_ we may already have a result! diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index 49f8870fe6278..1b0ff23f5737e 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -763,7 +763,7 @@ def __init__( self.num_runs = num_runs # In specific tests, we want to stop the parse loop after the _files_ have been parsed a certain - # number of times. This is only to support testing, and is n't something a user is likely to want to + # number of times. This is only to support testing, and isn't something a user is likely to want to # conifugre -- they'll want num_runs self.num_times_parse_dags = num_times_parse_dags self._processor_poll_interval = processor_poll_interval diff --git a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py index d4c7091db4fff..5db7ee33cbd40 100644 --- a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py +++ b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py @@ -19,7 +19,7 @@ - Try to get a TaskArn. If one exists, update it. - If no tasks exist, try to create a new DataSync Task. - - If source and destination locations dont exist for the new task, create them first + - If source and destination locations don't exist for the new task, create them first - If many tasks exist, raise an Exception - After getting or creating a DataSync Task, run it diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index b70ed96c44dbb..9cee0987cd83d 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -254,7 +254,7 @@ def choose_task(self, task_arn_list: list) -> Optional[str]: if len(task_arn_list) == 1: return task_arn_list[0] if self.allow_random_task_choice: - # Items are unordered so we dont want to just take + # Items are unordered so we don't want to just take # the [0] one as it implies ordered items were received # from AWS and might lead to confusion. Rather explicitly # choose a random one @@ -268,7 +268,7 @@ def choose_location(self, location_arn_list: List[str]) -> Optional[str]: if len(location_arn_list) == 1: return location_arn_list[0] if self.allow_random_location_choice: - # Items are unordered so we dont want to just take + # Items are unordered so we don't want to just take # the [0] one as it implies ordered items were received # from AWS and might lead to confusion. Rather explicitly # choose a random one diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 025bfdef5caf7..6d8000f3f42fa 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -328,7 +328,7 @@ def latest_dag_runs(): 'dag_run_url': url_for('Airflow.graph', dag_id=dagrun.dag_id, execution_date=dagrun.execution_date) }) - return jsonify(items=payload) # old flask versions dont support jsonifying arrays + return jsonify(items=payload) # old flask versions don't support jsonifying arrays @api_experimental.route('/pools/', methods=['GET']) diff --git a/tests/api/common/experimental/test_mark_tasks.py b/tests/api/common/experimental/test_mark_tasks.py index b4b09a6af1cbb..c9879d4c367c1 100644 --- a/tests/api/common/experimental/test_mark_tasks.py +++ b/tests/api/common/experimental/test_mark_tasks.py @@ -149,7 +149,7 @@ def test_mark_tasks_now(self): self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]], State.FAILED, snapshot) - # dont alter other tasks + # don't alter other tasks snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates) task = self.dag1.get_task("runme_0") altered = set_state(tasks=[task], execution_date=self.execution_dates[0], diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/tests/providers/amazon/aws/hooks/test_s3.py index 6c00945310f25..337352e257dc7 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/tests/providers/amazon/aws/hooks/test_s3.py @@ -117,7 +117,7 @@ def test_list_prefixes_paged(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) - # we dont need to test the paginator that's covered by boto tests + # we don't need to test the paginator that's covered by boto tests keys = ["%s/b" % i for i in range(2)] dirs = ["%s/" % i for i in range(2)] for key in keys: diff --git a/tests/providers/amazon/aws/operators/test_datasync.py b/tests/providers/amazon/aws/operators/test_datasync.py index 1a2663598fc62..1fb9f1e84d0fe 100644 --- a/tests/providers/amazon/aws/operators/test_datasync.py +++ b/tests/providers/amazon/aws/operators/test_datasync.py @@ -436,7 +436,7 @@ def test_get_one_task(self, mock_get_conn): mock_get_conn.return_value = self.client # ### Begin tests: - # Make sure we dont cheat + # Make sure we don't cheat self.set_up_operator() self.assertEqual(self.datasync.task_arn, None)