Skip to content

Commit

Permalink
Fixes proposed by codacy
Browse files Browse the repository at this point in the history
  • Loading branch information
argenisleon committed Sep 13, 2018
1 parent 5e26e17 commit 1e96fb7
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 26 deletions.
2 changes: 1 addition & 1 deletion optimus/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from pyspark.sql.types import StructField, StructType, StringType

# Helpers
from optimus.helpers.checkit import is_tuple, is_, is_list, is_one_element, is_list_of_tuples
from optimus.helpers.checkit import is_tuple, is_, is_one_element, is_list_of_tuples
from optimus.helpers.functions import get_spark_dtypes_object
from optimus.spark import Spark

Expand Down
2 changes: 1 addition & 1 deletion optimus/dataframe/columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -1118,7 +1118,7 @@ def nest(input_cols, output_col, shape="string", separator=""):
df = self

if has_(input_cols, F.Column):
"Transform non Column data to lit"
# Transform non Column data to lit
columns = [F.lit(col) if not is_(col, F.Column) else col for col in input_cols]
else:
columns = parse_columns(self, input_cols)
Expand Down
13 changes: 7 additions & 6 deletions optimus/enricher.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,14 +217,15 @@ def copy_collection(self, source_name, dest_name):

source = self.db[source_name]

logging.info('Dropping', dest_name, 'collection')
logging.info("Dropping {dest_name} collection".format(dest_name=dest_name))
self.db[dest_name].drop()
# if data exist in the collection drop it

pipeline = [{"$match": {}},
{"$out": dest_name},
]
logging.info('Copying', source_name, 'collection to', dest_name, 'collection ...')
logging.info("Copying {source_name} collection to {dest_name} collection ...".format(source_name=source_name,
dest_name=dest_name))

source.aggregate(pipeline)
logging.info('Done')
Expand Down Expand Up @@ -284,7 +285,7 @@ def drop_keys(collection_name, keys):
:return:
"""
for key in tqdm_notebook(keys, desc='Processing cols'):
logging.info('Dropping', key, 'field')
logging.info("Dropping {key}".format(key=key))
collection_name.update_many({}, {'$unset': {key: 1}})

def drop_collection(self, collection_name):
Expand Down Expand Up @@ -376,7 +377,7 @@ def create_missing_fields(self, cols, collection_name=None):
source = self.collection

for c in tqdm_notebook(cols, total=len(cols), desc='Processing cols'):
logging.info('Inserting', c)
logging.info("Inserting {c}".format(c=c))
if c:
source.update_many(
{c: {'$exists': False}},
Expand All @@ -387,7 +388,7 @@ def create_missing_fields(self, cols, collection_name=None):
}
);
else:
logging.info('Field', c, 'could not be added')
logging.info("Field {c} could not be added".format(c=c))

def cast(self, collection_name, field, convert_to):
"""
Expand Down Expand Up @@ -417,4 +418,4 @@ def cast(self, collection_name, field, convert_to):
collection.update_one({'_id': c['_id']}, {'$set': {field: val}})

except ValueError:
logging.info('Could not convert "', val, '" to', convert_to)
logging.info("Could not convert '{val}' to '{convert_to}'".format(val=val, convert_to=convert_to))
18 changes: 0 additions & 18 deletions optimus/io/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
import tempfile
from urllib.request import Request, urlopen

from kombu import Consumer

from optimus.helpers.raiseit import RaiseIt
from optimus.spark import Spark

Expand Down Expand Up @@ -118,22 +116,6 @@ def avro(path, *args, **kwargs):

return df


"""
@staticmethod
def rabbit_mq():
def process_message(body, message):
print("The body is {}".format(body))
message.ack()
with Consumer(conn, queues=queue, callbacks=[process_message], accept=["application/json"]):
line = conn.drain_events(timeout=5)
print(line)
# conn.heartbeat_check()
"""


class Downloader(object):
def __init__(self, data_def):
self.data_def = data_def
Expand Down

0 comments on commit 1e96fb7

Please sign in to comment.