Skip to content

Commit

Permalink
Apache provider docstring improvements (#31730)
Browse files Browse the repository at this point in the history
  • Loading branch information
uranusjr committed Jun 7, 2023
1 parent 51419a7 commit 1389078
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 34 deletions.
8 changes: 4 additions & 4 deletions airflow/providers/apache/druid/transfers/hive_to_druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@


class HiveToDruidOperator(BaseOperator):
"""
Moves data from Hive to Druid, [del]note that for now the data is loaded
into memory before being pushed to Druid, so this operator should
be used for smallish amount of data.[/del].
"""Moves data from Hive to Druid.
[del]note that for now the data is loaded into memory before being pushed to
Druid, so this operator should be used for smallish amount of data.[/del]
:param sql: SQL query to execute against the Druid database. (templated)
:param druid_datasource: the datasource you want to ingest into in druid
Expand Down
7 changes: 4 additions & 3 deletions airflow/providers/apache/hive/operators/hive_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,11 @@


class HiveStatsCollectionOperator(BaseOperator):
"""Gathers partition statistics using a dynamically generated Presto query.
"""Gather partition statistics and insert them into MySQL.
The collected stats are inserted into a MySQL table with this format. Stats
overwrite themselves if you rerun the same date/partition.
Statistics are gathered with a dynamically generated Presto query and
inserted with this format. Stats overwrite themselves if you rerun the
same date/partition.
.. code-block:: sql
Expand Down
9 changes: 5 additions & 4 deletions airflow/providers/apache/hive/transfers/vertica_to_hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,12 @@ def __init__(

@classmethod
def type_map(cls, vertica_type):
"""
Vertica-python datatype.py does not provide the full type mapping access.
Manual hack.
"""Manually hack Vertica-Python type mapping.
The stock datatype.py does not provide the full type mapping access.
Reference: https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py
Reference:
https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py
"""
type_map = {
5: "BOOLEAN",
Expand Down
4 changes: 1 addition & 3 deletions airflow/providers/apache/kylin/operators/kylin_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@


class KylinCubeOperator(BaseOperator):
"""
This operator is used to submit request about kylin build/refresh/merge,
and can track job status . so users can easier to build kylin job.
"""Submit request about Kylin build/refresh/merge and track job status.
For more detail information in
`Apache Kylin <http://kylin.apache.org/>`_
Expand Down
8 changes: 4 additions & 4 deletions airflow/providers/apache/livy/triggers/livy.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def serialize(self) -> tuple[str, dict[str, Any]]:
)

async def run(self) -> AsyncIterator[TriggerEvent]:
"""
Checks if the _polling_interval > 0, in that case it pools Livy for
batch termination asynchronously.
else returns the success response.
"""Run the trigger.
If ``_polling_interval > 0``, this pools Livy for batch termination
asynchronously. Otherwise the success response is created immediately.
"""
try:
if self._polling_interval > 0:
Expand Down
8 changes: 5 additions & 3 deletions airflow/providers/apache/spark/hooks/spark_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,12 @@ def __init__(
self._env: dict[str, Any] | None = None

def _resolve_should_track_driver_status(self) -> bool:
"""Whether this hook should poll the Spark driver status.
"""Check if we should track the driver status.
If this returns True, the hook would send subsequent spark-submit status
requests after the initial spark-submit request.
If so, we should send subsequent spark-submit status requests after the
initial spark-submit request.
:return: if the driver status should be tracked
"""
return "spark://" in self._connection["master"] and self._connection["deploy_mode"] == "cluster"

Expand Down
24 changes: 11 additions & 13 deletions airflow/providers/apache/sqoop/hooks/sqoop.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@


class SqoopHook(BaseHook):
"""
This hook is a wrapper around the sqoop 1 binary. To be able to use the hook
it is required that "sqoop" is in the PATH.
"""Wrapper around the sqoop 1 binary.
To be able to use the hook, it is required that "sqoop" is in the PATH.
Additional arguments that can be passed via the 'extra' JSON field of the
sqoop connection:
Expand Down Expand Up @@ -95,8 +95,7 @@ def cmd_mask_password(self, cmd_orig: list[str]) -> list[str]:
return cmd

def popen(self, cmd: list[str], **kwargs: Any) -> None:
"""
Remote Popen.
"""Remote Popen.
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
Expand Down Expand Up @@ -223,9 +222,9 @@ def import_table(
extra_import_options: dict[str, Any] | None = None,
schema: str | None = None,
) -> Any:
"""
Imports table from remote location to target dir. Arguments are
copies of direct sqoop command line arguments.
"""Import table from remote location to target dir.
Arguments are copies of direct sqoop command line arguments.
:param table: Table to read
:param schema: Schema name
Expand Down Expand Up @@ -266,8 +265,7 @@ def import_query(
driver: Any | None = None,
extra_import_options: dict[str, Any] | None = None,
) -> Any:
"""
Imports a specific query from the rdbms to hdfs.
"""Import a specific query from the rdbms to hdfs.
:param query: Free format query to run
:param target_dir: HDFS destination dir
Expand Down Expand Up @@ -375,9 +373,9 @@ def export_table(
extra_export_options: dict[str, Any] | None = None,
schema: str | None = None,
) -> None:
"""
Exports Hive table to remote location. Arguments are copies of direct
sqoop command line Arguments.
"""Export Hive table to remote location.
Arguments are copies of direct Sqoop command line Arguments
:param table: Table remote destination
:param schema: Schema name
Expand Down

0 comments on commit 1389078

Please sign in to comment.