Skip to content

Commit

Permalink
[AIRFLOW-2889] Fix typos detected by github.com/client9/misspell (#3732)
Browse files Browse the repository at this point in the history
  • Loading branch information
seratch authored and Tao Feng committed Aug 12, 2018
1 parent f999ce2 commit b78c7fb
Show file tree
Hide file tree
Showing 22 changed files with 28 additions and 28 deletions.
Expand Up @@ -32,7 +32,7 @@
# the spark jar task will NOT run until the notebook task completes
# successfully.
#
# The definition of a succesful run is if the run has a result_state of "SUCCESS".
# The definition of a successful run is if the run has a result_state of "SUCCESS".
# For more information about the state of a run refer to
# https://docs.databricks.com/api/latest/jobs.html#runstate

Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/azure_fileshare_hook.py
Expand Up @@ -100,7 +100,7 @@ def list_directories_and_files(self, share_name, directory_name=None, **kwargs):

def create_directory(self, share_name, directory_name, **kwargs):
"""
Create a new direcotry on a Azure File Share.
Create a new directory on a Azure File Share.
:param share_name: Name of the share.
:type share_name: str
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/bigquery_hook.py
Expand Up @@ -627,7 +627,7 @@ def run_query(self,

if query_params:
if self.use_legacy_sql:
raise ValueError("Query paramaters are not allowed when using "
raise ValueError("Query parameters are not allowed when using "
"legacy SQL")
else:
configuration['query']['queryParameters'] = query_params
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/emr_hook.py
Expand Up @@ -23,7 +23,7 @@

class EmrHook(AwsHook):
"""
Interact with AWS EMR. emr_conn_id is only neccessary for using the
Interact with AWS EMR. emr_conn_id is only necessary for using the
create_job_flow method.
"""

Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/gcp_dataproc_hook.py
Expand Up @@ -235,6 +235,6 @@ def wait(self, operation):
DataProcHook,
"await",
deprecation.deprecated(
DataProcHook.wait, "renamed to 'wait' for Python3.7 compatability"
DataProcHook.wait, "renamed to 'wait' for Python3.7 compatibility"
),
)
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/qubole_hook.py
Expand Up @@ -125,7 +125,7 @@ def execute(self, context):

def kill(self, ti):
"""
Kill (cancel) a Qubole commmand
Kill (cancel) a Qubole command
:param ti: Task Instance of the dag, used to determine the Quboles command id
:return: response from Qubole
"""
Expand Down
4 changes: 2 additions & 2 deletions airflow/contrib/hooks/salesforce_hook.py
Expand Up @@ -53,14 +53,14 @@ def __init__(
:param conn_id: the name of the connection that has the parameters
we need to connect to Salesforce.
The conenction shoud be type `http` and include a
The connection shoud be type `http` and include a
user's security token in the `Extras` field.
.. note::
For the HTTP connection type, you can include a
JSON structure in the `Extras` field.
We need a user's security token to connect to Salesforce.
So we define it in the `Extras` field as:
`{"security_token":"YOUR_SECRUITY_TOKEN"}`
`{"security_token":"YOUR_SECURITY_TOKEN"}`
"""
self.conn_id = conn_id
self._args = args
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/gcs_to_bq.py
Expand Up @@ -86,7 +86,7 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator):
for other formats.
:type allow_jagged_rows: bool
:param max_id_key: If set, the name of a column in the BigQuery table
that's to be loaded. Thsi will be used to select the MAX value from
that's to be loaded. This will be used to select the MAX value from
BigQuery after the load occurs. The results will be returned by the
execute() command, which in turn gets stored in XCom for future
operators to use. This can be helpful with incremental loads--during
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/mlengine_operator_utils.py
Expand Up @@ -160,7 +160,7 @@ def validate_err_and_count(summary):
then the `dag`'s `default_args['model_name']` will be used.
:type model_name: string
:param version_name: Used to indicate a model version to use for prediciton,
:param version_name: Used to indicate a model version to use for prediction,
in combination with model_name. Cannot be used together with model_uri.
See MLEngineBatchPredictionOperator for more detail. If None, then the
`dag`'s `default_args['version_name']` will be used.
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/qubole_check_operator.py
Expand Up @@ -28,7 +28,7 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator):
"""
Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects
a command that will be executed on QDS.
By default, each value on first row of the result of this Qubole Commmand
By default, each value on first row of the result of this Qubole Command
is evaluated using python ``bool`` casting. If any of the
values return ``False``, the check is failed and errors out.
Expand Down
Expand Up @@ -28,7 +28,7 @@ <h4>
<li role="presentation" class="active"><a href="#home" aria-controls="fields" role="tab" data-toggle="tab">Fields</a></li>
<li role="presentation"><a href="#data" aria-controls="data" role="tab" data-toggle="tab">Sample Data</a></li>
<li role="presentation"><a href="#partitions" aria-controls="partitions" role="tab" data-toggle="tab">Partitions</a></li>
<li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Atributes</a></li>
<li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Attributes</a></li>
<li role="presentation"><a href="#parameters" aria-controls="parameters" role="tab" data-toggle="tab">Parameters</a></li>
<li role="presentation"><a href="#ddl" aria-controls="ddl" role="tab" data-toggle="tab">DDL</a></li>
</ul>
Expand Down
2 changes: 1 addition & 1 deletion airflow/hooks/mysql_hook.py
Expand Up @@ -121,7 +121,7 @@ def bulk_dump(self, table, tmp_file):
def _serialize_cell(cell, conn):
"""
MySQLdb converts an argument to a literal
when passing those seperately to execute. Hence, this method does nothing.
when passing those separately to execute. Hence, this method does nothing.
:param cell: The cell to insert into the table
:type cell: object
Expand Down
2 changes: 1 addition & 1 deletion airflow/models.py
Expand Up @@ -942,7 +942,7 @@ def init_on_load(self):
@property
def try_number(self):
"""
Return the try number that this task number will be when it is acutally
Return the try number that this task number will be when it is actually
run.
If the TI is currently running, this will match the column in the
Expand Down
2 changes: 1 addition & 1 deletion airflow/operators/hive_to_druid.py
Expand Up @@ -164,7 +164,7 @@ def construct_ingest_query(self, static_path, columns):
:type columns: list
"""

# backward compatibilty for num_shards,
# backward compatibility for num_shards,
# but target_partition_size is the default setting
# and overwrites the num_shards
num_shards = self.num_shards
Expand Down
6 changes: 3 additions & 3 deletions airflow/sensors/hdfs_sensor.py
Expand Up @@ -88,12 +88,12 @@ def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
if ignore_copying:
log = LoggingMixin().log
regex_builder = "^.*\.(%s$)$" % '$|'.join(ignored_ext)
ignored_extentions_regex = re.compile(regex_builder)
ignored_extensions_regex = re.compile(regex_builder)
log.debug(
'Filtering result for ignored extensions: %s in files %s',
ignored_extentions_regex.pattern, map(lambda x: x['path'], result)
ignored_extensions_regex.pattern, map(lambda x: x['path'], result)
)
result = [x for x in result if not ignored_extentions_regex.match(x['path'])]
result = [x for x in result if not ignored_extensions_regex.match(x['path'])]
log.debug('HdfsSensor.poke: after ext filter result is %s', result)
return result

Expand Down
6 changes: 3 additions & 3 deletions dev/airflow-pr
Expand Up @@ -714,8 +714,8 @@ def standardize_jira_ref(text, only_jira=False):
'[AIRFLOW-5954][MLLIB] Top by key'
>>> standardize_jira_ref("[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl")
'[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl'
>>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility accross versions.")
'[AIRFLOW-1094] Support MiMa for reporting binary compatibility accross versions.'
>>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility across versions.")
'[AIRFLOW-1094] Support MiMa for reporting binary compatibility across versions.'
>>> standardize_jira_ref("[WIP] [AIRFLOW-1146] Vagrant support for Spark")
'[AIRFLOW-1146][WIP] Vagrant support for Spark'
>>> standardize_jira_ref("AIRFLOW-1032. If Yarn app fails before registering, app master stays aroun...")
Expand Down Expand Up @@ -942,7 +942,7 @@ def cli():
status = run_cmd('git status --porcelain', echo_cmd=False)
if status:
msg = (
'You have uncomitted changes in this branch. Running this tool\n'
'You have uncommitted changes in this branch. Running this tool\n'
'will delete them permanently. Continue?')
if click.confirm(click.style(msg, fg='red', bold=True)):
run_cmd('git reset --hard', echo_cmd=False)
Expand Down
2 changes: 1 addition & 1 deletion docs/howto/write-logs.rst
Expand Up @@ -11,7 +11,7 @@ directory.
In addition, users can supply a remote location for storing logs and log
backups in cloud storage.

In the Airflow Web UI, local logs take precedance over remote logs. If local logs
In the Airflow Web UI, local logs take precedence over remote logs. If local logs
can not be found or accessed, the remote logs will be displayed. Note that logs
are only sent to remote storage once a task completes (including failure). In other
words, remote logs for running tasks are unavailable. Logs are stored in the log
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci/kubernetes/kube/secrets.yaml
Expand Up @@ -20,6 +20,6 @@ metadata:
name: airflow-secrets
type: Opaque
data:
# The sql_alchemy_conn value is a base64 encoded represenation of this connection string:
# The sql_alchemy_conn value is a base64 encoded representation of this connection string:
# postgresql+psycopg2://root:root@postgres-airflow:5432/airflow
sql_alchemy_conn: cG9zdGdyZXNxbCtwc3ljb3BnMjovL3Jvb3Q6cm9vdEBwb3N0Z3Jlcy1haXJmbG93OjU0MzIvYWlyZmxvdwo=
4 changes: 2 additions & 2 deletions tests/contrib/hooks/test_bigquery_hook.py
Expand Up @@ -52,12 +52,12 @@ def test_throws_exception_with_invalid_query(self):
self.assertIn('Reason: ', str(context.exception), "")

@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_suceeds_with_explicit_legacy_query(self):
def test_succeeds_with_explicit_legacy_query(self):
df = self.instance.get_pandas_df('select 1', dialect='legacy')
self.assertEqual(df.iloc(0)[0][0], 1)

@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
def test_suceeds_with_explicit_std_query(self):
def test_succeeds_with_explicit_std_query(self):
df = self.instance.get_pandas_df(
'select * except(b) from (select 1 a, 2 b)', dialect='standard')
self.assertEqual(df.iloc(0)[0][0], 1)
Expand Down
2 changes: 1 addition & 1 deletion tests/contrib/operators/test_ecs_operator.py
Expand Up @@ -181,7 +181,7 @@ def test_check_success_tasks_raises_pending(self):
self.assertIn("'lastStatus': 'PENDING'", str(e.exception))
client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])

def test_check_success_tasks_raises_mutliple(self):
def test_check_success_tasks_raises_multiple(self):
client_mock = mock.Mock()
self.ecs.client = client_mock
self.ecs.arn = 'arn'
Expand Down
2 changes: 1 addition & 1 deletion tests/core.py
Expand Up @@ -831,7 +831,7 @@ def test_bad_trigger_rule(self):
with self.assertRaises(AirflowException):
DummyOperator(
task_id='test_bad_trigger',
trigger_rule="non_existant",
trigger_rule="non_existent",
dag=self.dag)

def test_terminate_task(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/models.py
Expand Up @@ -97,7 +97,7 @@ def test_dag_as_context_manager(self):
"""
Test DAG as a context manager.
When used as a context manager, Operators are automatically added to
the DAG (unless they specifiy a different DAG)
the DAG (unless they specify a different DAG)
"""
dag = DAG(
'dag',
Expand Down

0 comments on commit b78c7fb

Please sign in to comment.