Skip to content

Commit

Permalink
#78 - Feature/11503 backend download handled data
Browse files Browse the repository at this point in the history
  • Loading branch information
latamen.aitmeddour committed Jul 26, 2021
1 parent 4a16a80 commit a1059f3
Show file tree
Hide file tree
Showing 6 changed files with 467 additions and 211 deletions.
16 changes: 9 additions & 7 deletions service_zone/backend/flask/neocampus/config.py.sample
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
MONGO_URL = "xxxx"

SWIFT_AUTHURL = "http://xxxx:8080/auth/v1.0"
SWIFT_USER = 'xx:xx'
SWIFT_KEY = 'xxxx'

SWIFT_FILES_DIRECTORY = "xxx"
MONGO_URL = "xxxx"

SWIFT_AUTHURL = "http://xxxx:8080/auth/v1.0"
SWIFT_USER = 'xx:xx'
SWIFT_KEY = 'xxxx'

SWIFT_FILES_DIRECTORY = "xxx"

INFLUX_URL = "xxxx"
258 changes: 186 additions & 72 deletions service_zone/backend/flask/neocampus/routers/mongo_data.py
Original file line number Diff line number Diff line change
@@ -1,72 +1,186 @@
from flask import Blueprint, jsonify, request
from ..services import mongo
from datetime import datetime

mongo_data_bp = Blueprint('mongo_data_bp', __name__)


@mongo_data_bp.route('/raw-data', methods=['POST'])
def get_metadata():
try:
params = {
'filetype': request.get_json()['filetype'],
'datatype': request.get_json()['datatype'],
'beginDate': request.get_json()['beginDate'],
'endDate': request.get_json()['endDate']
}
except:
return jsonify({'error': 'Missing required fields.'})

if(("limit" in request.get_json() and "offset" not in request.get_json()) or ("limit" not in request.get_json() and "offset" in request.get_json())):
return jsonify({'error': 'Limit and offset have to be sent together.'})

if("limit" in request.get_json() and "offset" in request.get_json()):
params['limit'] = request.get_json()['limit']
params['offset'] = request.get_json()['offset']

date_format = "%Y-%m-%d"

try:
convertedBeginDate = datetime.strptime(params['beginDate'], date_format)
convertedEndDate = datetime.strptime(params['endDate'], date_format)
except Exception as e:
return jsonify({'error': str(e)})

if(convertedBeginDate > convertedEndDate):
params['beginDate'] = params['beginDate'] + " 23:59:59"
params['endDate'] = params['endDate'] + " 00:00:00"
date_format = "%Y-%m-%d %H:%M:%S"

convertedBeginDateTemp = datetime.strptime(params['beginDate'], date_format)
convertedBeginDate = datetime.strptime(params['endDate'], date_format)
convertedEndDate = convertedBeginDateTemp

if(convertedBeginDate == convertedEndDate):
params['beginDate'] = params['beginDate'] + " 00:00:00"
params['endDate'] = params['endDate'] + " 23:59:59"
date_format = "%Y-%m-%d %H:%M:%S"

convertedBeginDate = datetime.strptime(params['beginDate'], date_format)
convertedEndDate = datetime.strptime(params['endDate'], date_format)

params['beginDate'] = convertedBeginDate
params['endDate'] = convertedEndDate

nb_objects, mongo_collections = mongo.get_metadata("neOCampus", params)
mongo_collections = list(mongo_collections)

output = {'objects': []}
for obj in mongo_collections:
output['objects'].append({
'original_object_name': obj['original_object_name'],
"swift_container": obj['swift_container'],
"content_type": obj['content_type'],
'swift_object_id': obj['swift_object_id'],
'other_data': obj['other_data'],
'swift_user': obj['swift_user'],
'creation_date': obj['creation_date']
})

output['length'] = nb_objects

return jsonify({'result': output})
from flask import Blueprint, jsonify, request, send_file
from ..services import mongo, influxdb
from datetime import datetime
import json, io, zipfile, time
import pandas as pd

mongo_data_bp = Blueprint('mongo_data_bp', __name__)


@mongo_data_bp.route('/raw-data', methods=['POST'])
def get_metadata():
try:
params = {
'filetype': request.get_json()['filetype'],
'datatype': request.get_json()['datatype'],
'beginDate': request.get_json()['beginDate'],
'endDate': request.get_json()['endDate']
}
except:
return jsonify({'error': 'Missing required fields.'})

if(("limit" in request.get_json() and "offset" not in request.get_json()) or ("limit" not in request.get_json() and "offset" in request.get_json())):
return jsonify({'error': 'Limit and offset have to be sent together.'})

if("limit" in request.get_json() and "offset" in request.get_json()):
params['limit'] = request.get_json()['limit']
params['offset'] = request.get_json()['offset']

date_format = "%Y-%m-%d"

try:
convertedBeginDate = datetime.strptime(params['beginDate'], date_format)
convertedEndDate = datetime.strptime(params['endDate'], date_format)
except Exception as e:
return jsonify({'error': str(e)})

if(convertedBeginDate > convertedEndDate):
params['beginDate'] = params['beginDate'] + " 23:59:59"
params['endDate'] = params['endDate'] + " 00:00:00"
date_format = "%Y-%m-%d %H:%M:%S"

convertedBeginDateTemp = datetime.strptime(params['beginDate'], date_format)
convertedBeginDate = datetime.strptime(params['endDate'], date_format)
convertedEndDate = convertedBeginDateTemp

if(convertedBeginDate == convertedEndDate):
params['beginDate'] = params['beginDate'] + " 00:00:00"
params['endDate'] = params['endDate'] + " 23:59:59"
date_format = "%Y-%m-%d %H:%M:%S"

convertedBeginDate = datetime.strptime(params['beginDate'], date_format)
convertedEndDate = datetime.strptime(params['endDate'], date_format)

params['beginDate'] = convertedBeginDate
params['endDate'] = convertedEndDate

nb_objects, mongo_collections = mongo.get_metadata("neOCampus", params)
mongo_collections = list(mongo_collections)

output = {'objects': []}
for obj in mongo_collections:
output['objects'].append({
'original_object_name': obj['original_object_name'],
"swift_container": obj['swift_container'],
"content_type": obj['content_type'],
'swift_object_id': obj['swift_object_id'],
'other_data': obj['other_data'],
'swift_user': obj['swift_user'],
'creation_date': obj['creation_date']
})

output['length'] = nb_objects

return jsonify({'result': output})

@mongo_data_bp.route('/handled-data-list', methods=['POST'])
def get_handled_data_list():
result = {}

'''try:
params = {
'datatype': request.get_json()['datatype'],
'beginDate': request.get_json()['beginDate'],
'endDate': request.get_json()['endDate']
}
except:
return jsonify({'error': 'Missing required fields.'})'''



result = {}

influxDB = influxdb.get_handled_data()
mongoDB = mongo.get_handled_data()

import sys

# If there is Influx data
if influxDB:
metadata_influx_file = {
'filename': 'InfluxDB.csv',
'filesize': sys.getsizeof(influxDB)
}
result['influxDB'] = metadata_influx_file

# If there is Mongo data
if mongoDB :
metadata_mongo_file = {
'filename': 'MongoDB.json',
'filesize': sys.getsizeof(mongoDB)
}
result['MongoDB'] = metadata_mongo_file

return jsonify(result)

@mongo_data_bp.route('/handled-data-file', methods=['POST'])
def get_handled_data_zipped_file():
'''try:
params = {
'datatype': request.get_json()['datatype'],
'beginDate': request.get_json()['beginDate'],
'endDate': request.get_json()['endDate']
}
except:
return jsonify({'error': 'Missing required fields.'})'''

# Result
result = {
'MongoDB': {},
'InfluxDB': {}
}

# If MongoDB file has been selected
if 'mongodb_file' in request.get_json():
if request.get_json()["mongodb_file"]:
# MongoDB data
result['MongoDB'] = mongo.get_handled_data()

# If InfluxDB file has been selected
if 'influxdb_file' in request.get_json():
if request.get_json()["influxdb_file"]:
# InfluxDB data
result['InfluxDB'] = influxdb.get_handled_data()

memory_file = io.BytesIO()
with zipfile.ZipFile(memory_file, 'w') as zip_file:
files = result

# JSON FILE - MONGODB
if(result['MongoDB']):
data = zipfile.ZipInfo("MongoDB.json")
# Datetime
data.date_time = time.localtime(time.time())[:6]

# Compression method
data.compress_type = zipfile.ZIP_DEFLATED

# Writing JSON file into zipped result
zip_file.writestr(data, result["MongoDB"])

# CSV FILE - INFLUXDB
if(result['InfluxDB']):
data = zipfile.ZipInfo("InfluxDB.csv")
data.date_time = time.localtime(time.time())[:6]
data.compress_type = zipfile.ZIP_DEFLATED

# Parsing CSV Reader from InfluxDB to Pandas DataFrame
# To make possible CSV Data zipped
df = pd.DataFrame(result['InfluxDB'])
csv_bytes = df.to_csv().encode('utf-8')

# Writing CSV file into zipped result
zip_file.writestr(data, csv_bytes)

# Position cursor - Necessary to change cursor at the beginning of the file
memory_file.seek(0)

# Tests if zip_file in memory is not empty - ZipFile class puts 22 kilobytes by default in zipped memory file
if memory_file.getbuffer().nbytes > 22:
return send_file(
memory_file,
attachment_filename='handled_data.zip',
as_attachment=True
)
else:
return jsonify({'msg': "No content available."})
81 changes: 81 additions & 0 deletions service_zone/backend/flask/neocampus/routers/swift_file.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
<<<<<<< HEAD
import os
import uuid
from zipfile import ZipFile
Expand Down Expand Up @@ -79,3 +80,83 @@ def storage():
content_type, mongodb_url, other_data)

return jsonify({"response": "Done !"})
=======
import os
import uuid
from zipfile import ZipFile
import base64
from flask import Blueprint, jsonify, current_app, request, send_from_directory
from ..services import swift, mongo

swift_file_bp = Blueprint('swift_file_bp', __name__)


@swift_file_bp.route('/swift-files', methods=['POST'])
def swift_files():
swift_files = []
zip_file_name = f'{str(uuid.uuid4().hex)}.zip'
zip_path = os.path.join(current_app.config['SWIFT_FILES_DIRECTORY'], zip_file_name)
zip_obj = ZipFile(os.path.join(current_app.root_path, zip_path), 'w')

for co in request.get_json():
container_name = co["container_name"]
object_id = co["object_id"]
file_path = swift.download_object_file(container_name, object_id)
swift_files.append({
"container_name": container_name,
"object_id": object_id,
"object_file": os.path.join(request.host_url, file_path)
})
zip_obj.write(os.path.join(current_app.root_path, file_path), os.path.basename(file_path))
zip_obj.close()

result = {
'swift_files': swift_files,
'swift_zip': os.path.join(request.host_url, zip_path),
}
return jsonify(result)


@swift_file_bp.route('/cache-swift-files/<path:filename>')
def download(filename):
swift_files_directory = os.path.join(current_app.root_path, current_app.config['SWIFT_FILES_DIRECTORY'])
return send_from_directory(directory=swift_files_directory, filename=filename)

@swift_file_bp.route('/storage', methods=['POST'])
def storage():
# id_type = request.get_json()["idType"]
file = request.get_json()["file"]
filename = request.get_json()["filename"]
other_meta = request.get_json()["othermeta"]
type_file = request.get_json()["typeFile"]

data_file = file.split(",")
data_file = data_file[1]
data_file = base64.b64decode(data_file)

# FIXME : put in DAG to Apache Airflow handling and get picture content from MongoDB
# Else pictures will not be printed after download

#data_file = str(data_file)
#data_file = data_file.split("'")
#file_content = ''.join(map(str.capitalize, data_file[1:]))

file_content = data_file

container_name = "neOCampus"
mongodb_url = current_app.config['MONGO_URL']
user = current_app.config['SWIFT_USER']
key = current_app.config['SWIFT_KEY']
authurl = current_app.config['SWIFT_AUTHURL']
content_type = type_file
application = None
data_process = "custom"
processed_data_area_service = ["MongoDB"]
other_data = other_meta

mongo.insert_datalake(file_content, user, key, authurl, container_name, filename,
processed_data_area_service, data_process, application,
content_type, mongodb_url, other_data)

return jsonify({"response": "Done !"})
>>>>>>> 56dcfbe... #78 - Added API routes to get metadata handled datalist and zipped file about MongoDB and InfluxDB

0 comments on commit a1059f3

Please sign in to comment.