-
Notifications
You must be signed in to change notification settings - Fork 0
/
ingestion.py
62 lines (44 loc) · 1.61 KB
/
ingestion.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import pandas as pd
import numpy as np
import os
import json
from datetime import datetime
import logging
logger = logging.getLogger(__name__)
#Load config.json and get input and output paths
with open('config.json','r') as f:
config = json.load(f)
input_folder_path = config['input_folder_path']
output_folder_path = config['output_folder_path']
#############Function for data ingestion
def merge_multiple_dataframe():
"""
Function to ingest data
check, read,aggregate, record and clean the datasets, and write to an output file"""
logger.info('starting data ingestion process')
# check for datasets
filenames = next(os.walk(input_folder_path), (None, None, []))[2] # [] if no file
# compile the datasets together
data_list = []
for file in filenames:
data_list.append(pd.read_csv(os.path.join(input_folder_path, file)))
print(data_list)
data = pd.concat(data_list)
# remove duplicates
data = data.drop_duplicates(ignore_index=True)
# Write to an output file
data_path = os.path.join(output_folder_path, 'finaldata.csv')
try:
data.to_csv(data_path, index=False)
except FileNotFoundError:
os.mkdir(output_folder_path)
data.to_csv(data_path, index=False)
# saving a record of the ingestion
record_path = os.path.join(output_folder_path, 'ingestedfiles.txt')
with open(record_path, 'w') as f:
for file in filenames:
f.write(file + '\n')
logger.info(f"record of ingestion saved in {record_path}")
return data
if __name__ == '__main__':
merge_multiple_dataframe()