Skip to content

Commit

Permalink
Add call to Super call in apache providers (#7820)
Browse files Browse the repository at this point in the history
  • Loading branch information
kaxil committed Mar 23, 2020
1 parent b86bf79 commit 7e6372a
Show file tree
Hide file tree
Showing 10 changed files with 12 additions and 0 deletions.
1 change: 1 addition & 0 deletions airflow/providers/apache/cassandra/hooks/cassandra.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ class CassandraHook(BaseHook, LoggingMixin):
For details of the Cluster config, see cassandra.cluster.
"""
def __init__(self, cassandra_conn_id: str = 'cassandra_default'):
super().__init__()
conn = self.get_connection(cassandra_conn_id)

conn_config = {}
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/druid/hooks/druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ def __init__(
timeout=1,
max_ingestion_time=None):

super().__init__()
self.druid_ingest_conn_id = druid_ingest_conn_id
self.timeout = timeout
self.max_ingestion_time = max_ingestion_time
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/hdfs/hooks/hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class HDFSHook(BaseHook):
"""
def __init__(self, hdfs_conn_id='hdfs_default', proxy_user=None,
autoconfig=False):
super().__init__()
if not snakebite_loaded:
raise ImportError(
'This HDFSHook implementation requires snakebite, but '
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/hdfs/hooks/webhdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class WebHDFSHook(BaseHook):
"""

def __init__(self, webhdfs_conn_id='webhdfs_default', proxy_user=None):
super().__init__()
self.webhdfs_conn_id = webhdfs_conn_id
self.proxy_user = proxy_user

Expand Down
3 changes: 3 additions & 0 deletions airflow/providers/apache/hive/hooks/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def __init__(
mapred_queue=None,
mapred_queue_priority=None,
mapred_job_name=None):
super().__init__()
conn = self.get_connection(hive_cli_conn_id)
self.hive_cli_params = conn.extra_dejson.get('hive_cli_params', '')
self.use_beeline = conn.extra_dejson.get('use_beeline', False)
Expand Down Expand Up @@ -480,6 +481,7 @@ class HiveMetastoreHook(BaseHook):
MAX_PART_COUNT = 32767

def __init__(self, metastore_conn_id='metastore_default'):
super().__init__()
self.conn_id = metastore_conn_id
self.metastore = self.get_metastore_client()

Expand Down Expand Up @@ -784,6 +786,7 @@ class HiveServer2Hook(BaseHook):
``extra`` of your connection in the UI
"""
def __init__(self, hiveserver2_conn_id='hiveserver2_default'):
super().__init__()
self.hiveserver2_conn_id = hiveserver2_conn_id

def get_conn(self, schema=None):
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/pig/hooks/pig.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class PigCliHook(BaseHook):
def __init__(
self,
pig_cli_conn_id="pig_cli_default"):
super().__init__()
conn = self.get_connection(pig_cli_conn_id)
self.pig_properties = conn.extra_dejson.get('pig_properties', '')
self.conn = conn
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/pinot/hooks/pinot.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def __init__(self,
conn_id="pinot_admin_default",
cmd_path="pinot-admin.sh",
pinot_admin_system_exit=False):
super().__init__()
conn = self.get_connection(conn_id)
self.host = conn.host
self.port = str(conn.port)
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/spark/hooks/spark_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def __init__(self,
verbose=True,
yarn_queue='default'
):
super().__init__()
self._sql = sql
self._conf = conf
self._conn = self.get_connection(conn_id)
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/spark/hooks/spark_submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ def __init__(self,
env_vars=None,
verbose=False,
spark_binary=None):
super().__init__()
self._conf = conf or {}
self._conn_id = conn_id
self._files = files
Expand Down
1 change: 1 addition & 0 deletions airflow/providers/apache/sqoop/hooks/sqoop.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def __init__(self, conn_id='sqoop_default', verbose=False,
num_mappers=None, hcatalog_database=None,
hcatalog_table=None, properties=None):
# No mutable types in the default parameters
super().__init__()
self.conn = self.get_connection(conn_id)
connection_parameters = self.conn.extra_dejson
self.job_tracker = connection_parameters.get('job_tracker', None)
Expand Down

0 comments on commit 7e6372a

Please sign in to comment.