Skip to content

Commit

Permalink
Fix spelling and grammar (#11814)
Browse files Browse the repository at this point in the history
  • Loading branch information
jbampton committed Oct 24, 2020
1 parent 0218bcf commit 4c8e033
Show file tree
Hide file tree
Showing 8 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion airflow/executors/celery_executor.py
Expand Up @@ -251,7 +251,7 @@ def _process_tasks(self, task_tuples_to_send: List[TaskInstanceInCelery]) -> Non

# Store the Celery task_id in the event buffer. This will get "overwritten" if the task
# has another event, but that is fine, because the only other events are success/failed at
# which point we dont need the ID anymore anyway
# which point we don't need the ID anymore anyway
self.event_buffer[key] = (State.QUEUED, result.task_id)

# If the task runs _really quickly_ we may already have a result!
Expand Down
2 changes: 1 addition & 1 deletion airflow/jobs/scheduler_job.py
Expand Up @@ -763,7 +763,7 @@ def __init__(

self.num_runs = num_runs
# In specific tests, we want to stop the parse loop after the _files_ have been parsed a certain
# number of times. This is only to support testing, and is n't something a user is likely to want to
# number of times. This is only to support testing, and isn't something a user is likely to want to
# conifugre -- they'll want num_runs
self.num_times_parse_dags = num_times_parse_dags
self._processor_poll_interval = processor_poll_interval
Expand Down
Expand Up @@ -19,7 +19,7 @@
- Try to get a TaskArn. If one exists, update it.
- If no tasks exist, try to create a new DataSync Task.
- If source and destination locations dont exist for the new task, create them first
- If source and destination locations don't exist for the new task, create them first
- If many tasks exist, raise an Exception
- After getting or creating a DataSync Task, run it
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/operators/datasync.py
Expand Up @@ -254,7 +254,7 @@ def choose_task(self, task_arn_list: list) -> Optional[str]:
if len(task_arn_list) == 1:
return task_arn_list[0]
if self.allow_random_task_choice:
# Items are unordered so we dont want to just take
# Items are unordered so we don't want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
Expand All @@ -268,7 +268,7 @@ def choose_location(self, location_arn_list: List[str]) -> Optional[str]:
if len(location_arn_list) == 1:
return location_arn_list[0]
if self.allow_random_location_choice:
# Items are unordered so we dont want to just take
# Items are unordered so we don't want to just take
# the [0] one as it implies ordered items were received
# from AWS and might lead to confusion. Rather explicitly
# choose a random one
Expand Down
2 changes: 1 addition & 1 deletion airflow/www/api/experimental/endpoints.py
Expand Up @@ -328,7 +328,7 @@ def latest_dag_runs():
'dag_run_url': url_for('Airflow.graph', dag_id=dagrun.dag_id,
execution_date=dagrun.execution_date)
})
return jsonify(items=payload) # old flask versions dont support jsonifying arrays
return jsonify(items=payload) # old flask versions don't support jsonifying arrays


@api_experimental.route('/pools/<string:name>', methods=['GET'])
Expand Down
2 changes: 1 addition & 1 deletion tests/api/common/experimental/test_mark_tasks.py
Expand Up @@ -149,7 +149,7 @@ def test_mark_tasks_now(self):
self.verify_state(self.dag1, [task.task_id], [self.execution_dates[0]],
State.FAILED, snapshot)

# dont alter other tasks
# don't alter other tasks
snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
task = self.dag1.get_task("runme_0")
altered = set_state(tasks=[task], execution_date=self.execution_dates[0],
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/amazon/aws/hooks/test_s3.py
Expand Up @@ -117,7 +117,7 @@ def test_list_prefixes_paged(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)

# we dont need to test the paginator that's covered by boto tests
# we don't need to test the paginator that's covered by boto tests
keys = ["%s/b" % i for i in range(2)]
dirs = ["%s/" % i for i in range(2)]
for key in keys:
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/amazon/aws/operators/test_datasync.py
Expand Up @@ -436,7 +436,7 @@ def test_get_one_task(self, mock_get_conn):
mock_get_conn.return_value = self.client
# ### Begin tests:

# Make sure we dont cheat
# Make sure we don't cheat
self.set_up_operator()
self.assertEqual(self.datasync.task_arn, None)

Expand Down

0 comments on commit 4c8e033

Please sign in to comment.