diff --git a/.gitignore b/.gitignore index 340c1a4..cb9611b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# Ignoring shape predictor +app/data/classifiers/shape_predictor_68_face_landmarks.dat + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/app/__init__.py b/app/__init__.py index 7fe3a92..adf9907 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,14 +1,19 @@ import os import logging.config +from celery import Celery from flask import Flask from flask_cors import CORS from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow +from flask_socketio import SocketIO +from cssi.core import CSSI + from config import CONFIG BASE_DIR = os.path.abspath(os.path.dirname(__file__)) -LOG_FILES_PATH = os.path.split(BASE_DIR)[0] + '/logs' +LOG_FILES_PATH = os.path.join(os.path.split(BASE_DIR)[0], "logs") +LOGGER_CONFIG_PATH = os.path.join(os.path.split(BASE_DIR)[0], "config", "logging.conf") # Try to create a log folder try: @@ -17,31 +22,63 @@ except OSError: pass -# load logging config file -logging.config.fileConfig('config/logging.conf', disable_existing_loggers=False) -# init file logger -logger = logging.getLogger('CSSI_REST_API') +# Load logging config file +logging.config.fileConfig(LOGGER_CONFIG_PATH, disable_existing_loggers=False) +# Init file logger +logger = logging.getLogger('cssi.api') + +# set `socketio` and `engineio` log level to `ERROR` +logging.getLogger('socketio').setLevel(logging.ERROR) +logging.getLogger('engineio').setLevel(logging.ERROR) +cssi = CSSI(shape_predictor="app/data/classifiers/shape_predictor_68_face_landmarks.dat", debug=False, config_file="config.cssi") db = SQLAlchemy() ma = Marshmallow() +socketio = SocketIO() +celery = Celery(__name__, + broker=os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'), + backend=os.environ.get('CELERY_BACKEND', 'redis://localhost:6379/0')) +celery.config_from_object('celeryconfig') + +# Import models to register them with SQLAlchemy +from app.models import * # noqa + +# Import celery task to register them with Celery workers +from . import tasks # noqa + +# Import Socket.IO events to register them with Flask-SocketIO +from . import events # noqa -def create_app(config_name): +def create_app(config_name=None, main=True): + if config_name is None: + config_name = os.environ.get('CSSI_CONFIG', 'default') app = Flask(__name__) - CORS(app, support_credentials=True) app.config.from_object(CONFIG[config_name]) + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # disabling sqlalchemy event system + CORS(app, support_credentials=True) # Add CORS support + CONFIG[config_name].init_app(app) root = CONFIG[config_name].APPLICATION_ROOT - # flask migrate doesn't recognize the tables without this import - from app.models import Application, Genre, ApplicationType, Session, Questionnaire - # Set up extensions db.init_app(app) + if main: + # Initialize socketio server and attach it to the message queue. + socketio.init_app(app, + message_queue=app.config['SOCKETIO_MESSAGE_QUEUE']) + else: + # Initialize socketio to emit events through through the message queue. + socketio.init_app(None, + message_queue=app.config['SOCKETIO_MESSAGE_QUEUE'], + async_mode='threading') + + celery.conf.update(CONFIG[config_name].CELERY_CONFIG) + # Create app blueprints from app.routes.v1 import main as main_blueprint app.register_blueprint(main_blueprint, url_prefix=root + '/') diff --git a/app/data/classifiers/readme.txt b/app/data/classifiers/readme.txt new file mode 100644 index 0000000..177460f --- /dev/null +++ b/app/data/classifiers/readme.txt @@ -0,0 +1,2 @@ +use the bellow link to download the shape predictor file. +http://dlib.net/files/shape_predictor_68_face_landmarks.dat.bz2 \ No newline at end of file diff --git a/app/events.py b/app/events.py new file mode 100644 index 0000000..c34dc4b --- /dev/null +++ b/app/events.py @@ -0,0 +1,64 @@ +import logging + +from app.models import Session +from . import socketio, celery, db +from .tasks import calculate_latency, persist_frames, record_sentiment +from .utils import decode_base64 + +logger = logging.getLogger('cssi.api') + + +@socketio.on("test/init") +def on_test_init(session_id): + from .wsgi_aux import app + with app.app_context(): + session = Session.query.filter_by(id=session_id).first() + if session is not None: + if session.status == 'initialized': + session.status = 'started' + db.session.commit() + socketio.send({"status": "success", "message": "The test session started successfully."}, json=True) + logger.info("Successfully initialized the test session. ID: {0}".format(session_id)) + + +@socketio.on("test/start") +def on_test_start(head_frame, scene_frame, session_id, latency_interval=2): + _head_frame = head_frame["head_frame"] + _scene_frame = scene_frame["scene_frame"] + latency_frame_count = 10 + + # decoding head-frame image(base64) string to OpenCV compatible format + _head_frame_decoded = decode_base64(_head_frame) + + # decoding scene-frame image(base64) string to OpenCV compatible format + _scene_frame_decoded = decode_base64(_scene_frame) + + # chain the two tasks which persist the frames and pass them to + # the latency worker after the specified time interval. + result = persist_frames.delay(head_frame=_head_frame, scene_frame=_scene_frame, limit=latency_frame_count) + + if result: + calculate_latency.delay(session_id["session_id"], limit=latency_frame_count) + + record_sentiment.apply_async(args=[_head_frame_decoded, session_id["session_id"]], expires=10) + + +@socketio.on("test/stop") +def on_test_stop(session_id): + from .wsgi_aux import app + with app.app_context(): + session = Session.query.filter_by(id=session_id).first() + if session is not None: + if session.status == 'started': + session.status = 'completed' + db.session.commit() + socketio.send({"status": "success", "message": "The test session completed successfully."}, json=True) + celery.control.purge() # stop all celery workers + logger.info("The test session was terminated successfully. ID: {0}".format(session_id)) + + +@socketio.on("disconnect") +def on_disconnect(): + """A Socket.IO client has disconnected.""" + celery.control.purge() # stop all celery workers + logger.info("The Socket.IO client disconnected.") diff --git a/app/models/application.py b/app/models/application.py index 50b2754..a5e0fb4 100644 --- a/app/models/application.py +++ b/app/models/application.py @@ -13,6 +13,7 @@ class Application(db.Model): developer = db.Column(db.String(100), nullable=False) type_id = db.Column(db.Integer, db.ForeignKey('application_type.id', use_alter=True, name='fk_type_id'), nullable=False) description = db.Column(db.String(250), nullable=False) + public_sharing = db.Column(db.Boolean, nullable=False, default=False) creation_date = db.Column(db.TIMESTAMP, server_default=db.func.current_timestamp(), nullable=False) genre_id = db.Column(db.Integer, db.ForeignKey('genre.id', use_alter=True, name='fk_genre_id'), nullable=False) sessions = db.relationship('Session', backref='app', lazy='dynamic') @@ -36,5 +37,6 @@ class ApplicationSchema(ma.Schema): developer = fields.String(required=True, validate=validate.Length(1, 100)) type = fields.Nested(ApplicationTypeSchema, dump_only=True) description = fields.String(required=True, validate=validate.Length(1, 250)) - creation_date = fields.DateTime() + creation_date = fields.DateTime(dump_only=True) genre = fields.Nested(GenreSchema, dump_only=True) + public_sharing = fields.Boolean(required=True) diff --git a/app/models/application_type.py b/app/models/application_type.py index c93015a..0ddb96a 100644 --- a/app/models/application_type.py +++ b/app/models/application_type.py @@ -50,7 +50,7 @@ def __repr__(self): class ApplicationTypeSchema(ma.Schema): - id = fields.Integer() + id = fields.Integer(dump_only=True) name = fields.String(required=True) display_name = fields.String(required=True) display_name_full = fields.String(required=True) diff --git a/app/models/genre.py b/app/models/genre.py index 89a0ecf..88fd884 100644 --- a/app/models/genre.py +++ b/app/models/genre.py @@ -48,6 +48,6 @@ def __repr__(self): class GenreSchema(ma.Schema): - id = fields.Integer() + id = fields.Integer(dump_only=True) name = fields.String(required=True) display_name = fields.String(required=True) diff --git a/app/models/questionnaire.py b/app/models/questionnaire.py index 3ae3522..b9a368d 100644 --- a/app/models/questionnaire.py +++ b/app/models/questionnaire.py @@ -1,4 +1,4 @@ -from marshmallow import fields, validate +from marshmallow import fields from .. import db, ma @@ -19,14 +19,8 @@ def __repr__(self): return '' % self.id -class SymptomSchema(ma.Schema): - name = fields.String(required=False) - display_name = fields.String(required=False) - score = fields.String(required=False) - - class QuestionnaireSchema(ma.Schema): id = fields.Integer(dump_only=True) - pre = fields.List(fields.Nested(SymptomSchema), dump_only=True) - post = fields.List(fields.Nested(SymptomSchema), dump_only=True) - creation_date = fields.DateTime() + pre = fields.Dict(required=True) + post = fields.Dict(required=False) + creation_date = fields.DateTime(dump_only=True) diff --git a/app/models/session.py b/app/models/session.py index d71089b..ee8319b 100644 --- a/app/models/session.py +++ b/app/models/session.py @@ -1,4 +1,5 @@ from marshmallow import fields, validate +from sqlalchemy.ext.mutable import MutableList from .. import db, ma from .application import ApplicationSchema from .questionnaire import QuestionnaireSchema @@ -8,14 +9,15 @@ class Session(db.Model): __tablename__ = 'session' id = db.Column(db.Integer, primary_key=True) + status = db.Column(db.String(25), nullable=False, default='initialised') app_id = db.Column(db.Integer, db.ForeignKey('application.id', use_alter=True, name='fk_app_id'), nullable=False) creation_date = db.Column(db.TIMESTAMP, server_default=db.func.current_timestamp(), nullable=False) expected_emotions = db.Column(db.JSON, nullable=False) questionnaire_id = db.Column(db.Integer, db.ForeignKey('questionnaire.id', use_alter=True, name='fk_questionnaire_id'), nullable=False) cssi_score = db.Column(db.Float, nullable=False, default=0) - latency_scores = db.Column(db.JSON, nullable=False, default={}) + latency_scores = db.Column(MutableList.as_mutable(db.JSON), nullable=False, default=[]) total_latency_score = db.Column(db.Float, nullable=False, default=0) - sentiment_scores = db.Column(db.JSON, nullable=False, default={}) + sentiment_scores = db.Column(MutableList.as_mutable(db.JSON), nullable=False, default=[]) total_sentiment_score = db.Column(db.Float, nullable=False, default=0) questionnaire_scores = db.Column(db.JSON, nullable=True, default={}) total_questionnaire_score = db.Column(db.Float, nullable=False, default=0) @@ -31,7 +33,16 @@ def __repr__(self): class SessionSchema(ma.Schema): id = fields.Integer(dump_only=True) - creation_date = fields.DateTime() - expected_emotions = fields.List(fields.String(), dump_only=True) + status = fields.String() + creation_date = fields.DateTime(dump_only=True) + expected_emotions = fields.List(fields.String(), required=True) app = fields.Nested(ApplicationSchema, dump_only=True) questionnaire = fields.Nested(QuestionnaireSchema, dump_only=True) + cssi_score = fields.Float() + latency_scores = fields.List(fields.Dict()) + total_latency_score = fields.Float() + sentiment_scores = fields.List(fields.Dict()) + total_sentiment_score = fields.Float() + questionnaire_scores = fields.Dict() + total_questionnaire_score = fields.Float() + diff --git a/app/routes/v1/application.py b/app/routes/v1/application.py index 32a82fb..a0f327a 100644 --- a/app/routes/v1/application.py +++ b/app/routes/v1/application.py @@ -20,10 +20,10 @@ import traceback from flask_cors import cross_origin from flask import Blueprint, jsonify, request -from app.models import Application, ApplicationType,ApplicationTypeSchema, ApplicationSchema, Genre, GenreSchema +from app.models import Application, ApplicationType, ApplicationTypeSchema, ApplicationSchema, Genre, GenreSchema from app import db -logger = logging.getLogger('CSSI_REST_API') +logger = logging.getLogger('cssi.api') application = Blueprint('application', __name__) @@ -73,17 +73,6 @@ def get_application_genres(): @cross_origin(supports_credentials=True) def create_application(): """Create a new Application""" - - json_data = request.get_json(force=True) - - if not json_data: - return jsonify({'status': 'error', 'message': 'No input was provided.'}), 400 - - # Validate and deserialize input - data, errors = application_schema.load(json_data) - if errors: - return jsonify({'status': 'error', 'message': 'Incorrect format of data provided.', 'data': errors}), 422 - name = request.json['name'] identifier = str(uuid.uuid4().hex) developer = request.json['developer'] @@ -99,7 +88,8 @@ def create_application(): if not genre: return {'status': 'error', 'message': 'Invalid Genre Type'}, 400 - new_application = Application(name=name, identifier=identifier, developer=developer, type=type, description=description, genre=genre) + new_application = Application(name=name, identifier=identifier, + developer=developer, type=type, description=description, genre=genre) db.session.add(new_application) db.session.commit() @@ -112,7 +102,8 @@ def create_application(): @application.after_request def after_request(response): """Logs a debug message on every successful request.""" - logger.debug('%s %s %s %s %s', request.remote_addr, request.method, request.scheme, request.full_path, response.status) + logger.debug('%s %s %s %s %s', request.remote_addr, request.method, + request.scheme, request.full_path, response.status) return response @@ -120,5 +111,6 @@ def after_request(response): def exceptions(e): """Logs an error message and stacktrace if a request ends in error.""" tb = traceback.format_exc() - logger.error('%s %s %s %s 5xx INTERNAL SERVER ERROR\n%s', request.remote_addr, request.method, request.scheme, request.full_path, tb) + logger.error('%s %s %s %s 5xx INTERNAL SERVER ERROR\n%s', request.remote_addr, + request.method, request.scheme, request.full_path, tb) return e.status_code diff --git a/app/routes/v1/questionnaire.py b/app/routes/v1/questionnaire.py index 1370997..be21216 100644 --- a/app/routes/v1/questionnaire.py +++ b/app/routes/v1/questionnaire.py @@ -17,7 +17,7 @@ from flask import Blueprint, jsonify, request from flask_cors import cross_origin -from app.models import Questionnaire, ApplicationType, QuestionnaireSchema, Genre +from app.models import Questionnaire, QuestionnaireSchema from app import db questionnaire = Blueprint('questionnaire', __name__) @@ -44,6 +44,17 @@ def get_questionnaire(id): return jsonify({'status': 'success', 'message': None, 'data': result}), 200 +@questionnaire.route('//post', methods=['PATCH']) +@cross_origin(supports_credentials=True) +def update_questionnaire(id): + """Get questionnaire when an id is passed in""" + questionnaire = Questionnaire.query.get(id) + questionnaire.post = request.json['post'] + db.session.commit() + result = questionnaire_schema.dump(questionnaire).data + return jsonify({'status': 'success', 'message': 'Successfully added the post questionnaire', 'data': result}), 200 + + @questionnaire.route('/', methods=['POST']) @cross_origin(supports_credentials=True) def create_questionnaire(): diff --git a/app/routes/v1/session.py b/app/routes/v1/session.py index e116a12..bed592f 100644 --- a/app/routes/v1/session.py +++ b/app/routes/v1/session.py @@ -20,6 +20,8 @@ from app.models import Session, SessionSchema, Application, Questionnaire from app import db +from app import cssi + session = Blueprint('session', __name__) session_schema = SessionSchema(strict=True) @@ -40,21 +42,73 @@ def get_sessions_list(): def get_session(id): """Get info on a session when an id is passed in""" session = Session.query.get(id) - result = sessions_schema.dump(session).data + result = session_schema.dump(session).data return jsonify({'status': 'success', 'message': None, 'data': result}), 200 +@session.route('/', methods=['PUT']) +@cross_origin(supports_credentials=True) +def update_session(id): + """Update information when the session comes to an end.""" + session = Session.query.get(id) + + # get all the final scores + latency_score = cssi.latency.generate_final_score(scores=session.latency_scores) + sentiment_score = cssi.sentiment.generate_final_score(all_emotions=session.sentiment_scores, expected_emotions=session.expected_emotions) + questionnaire_score = cssi.questionnaire.generate_final_score(pre=session.questionnaire.pre, post=session.questionnaire.post) + cssi_score = cssi.generate_cssi_score(tl=latency_score, ts=sentiment_score, tq=questionnaire_score) + + # set the scores in the session + session.total_latency_score = latency_score + session.total_sentiment_score = sentiment_score + session.total_questionnaire_score = questionnaire_score + session.cssi_score = cssi_score + + # get a breakdown of the questionnaire scores and set it in the session + [pre_n, pre_o, pre_d, pre_ts], [post_n, post_o, post_d, post_ts] = cssi.questionnaire.generate_score_breakdown(pre=session.questionnaire.pre, post=session.questionnaire.post) + q_score_breakdown = { + "pre": { + "N": pre_n, + "O": pre_o, + "D": pre_d, + "TS": pre_ts + }, + "post": { + "N": post_n, + "O": post_o, + "D": post_d, + "TS": post_ts + } + } + session.questionnaire_scores = q_score_breakdown + + session.status = "completed" + db.session.commit() + + result = session_schema.dump(session).data + + return jsonify({'status': 'success', 'message': 'Successfully updated the session data', 'data': result}), 200 + + +@session.route('//status', methods=['PATCH']) +@cross_origin(supports_credentials=True) +def update_session_status(id): + """Update session status""" + session = Session.query.get(id) + session.status = request.json['status'] + db.session.commit() + result = session_schema.dump(session).data + return jsonify({'status': 'success', 'message': 'Successfully update the session status', 'data': result}), 200 + + @session.route('/', methods=['POST']) @cross_origin(supports_credentials=True) def create_session(): """Create a new Session""" - app = Application.query.filter_by(id=request.json['app']).first() - questionnaire = Questionnaire.query.filter_by(id=request.json['questionnaire_id']).first() + questionnaire = Questionnaire.query.filter_by(id=request.json['questionnaire']).first() expected_emotions = request.json['expected_emotions'] - print(questionnaire) - # validate application type if not app: return {'status': 'error', 'message': 'Invalid application.'}, 400 diff --git a/app/tasks.py b/app/tasks.py new file mode 100644 index 0000000..052724d --- /dev/null +++ b/app/tasks.py @@ -0,0 +1,120 @@ +import json +import logging +import redis +from datetime import datetime + +from . import celery, cssi, db +from app.models import Session +from .utils import decode_base64 + +logger = logging.getLogger('cssi.api') + + +@celery.task +def calculate_latency(session_id, limit): + """Celery task which handles latency score generation and persistence""" + from .wsgi_aux import app + with app.app_context(): + head_key = "head-frames" + scene_key = "scene-frames" + + r = redis.StrictRedis(host='localhost', port=6379, db=0) + head_frames_raw = get_frames_from_redis(r=r, key=head_key, limit=limit) + scene_frames_raw = get_frames_from_redis(r=r, key=scene_key, limit=limit) + + head_stream = [] + scene_stream = [] + + for data in head_frames_raw: + head_stream.append(decode_base64(data)) + + for data in scene_frames_raw: + scene_stream.append(decode_base64(data)) + + _, phf_pitch, phf_yaw, phf_roll = cssi.latency.calculate_head_pose(frame=head_stream[0]) + _, chf_pitch, chf_yaw, chf_roll = cssi.latency.calculate_head_pose(frame=head_stream[1]) + _, _, ff_angles, sf_angles = cssi.latency.calculate_camera_pose(first_frame=scene_stream[0], + second_frame=scene_stream[1], crop=True, + crop_direction='horizontal') + + head_angles = [[phf_pitch, phf_yaw, phf_roll], [chf_pitch, chf_yaw, chf_roll]] + camera_angles = [ff_angles, sf_angles] + + latency_score = cssi.latency.generate_rotation_latency_score(head_angles=head_angles, + camera_angles=camera_angles) + + head_movement = cssi.latency.check_for_head_movement(head_stream) + logger.debug("Head movement detected: {0}".format(head_movement)) + + pst = cssi.latency.calculate_pst(scene_stream, 10) + logger.debug("Pixel switching time: {0}".format(pst)) + + session = Session.query.filter_by(id=session_id).first() + if session is not None: + new_score = {'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'score': latency_score} + session.latency_scores.append(new_score) + db.session.commit() + + +@celery.task +def record_sentiment(head_frame, session_id): + """Celery task which handles sentiment score generation and persistence""" + from .wsgi_aux import app + with app.app_context(): + sentiment = cssi.sentiment.generate_sentiment_score(frame=head_frame) + session = Session.query.filter_by(id=session_id).first() + if session is not None: + if sentiment is not None: + new_score = {'timestamp': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'sentiment': sentiment} + session.sentiment_scores.append(new_score) + db.session.commit() + + +@celery.task +def persist_frames(head_frame, scene_frame, limit): + r = redis.StrictRedis(host='localhost', port=6379, db=0) + is_complete = save_frames_on_redis(r=r, head_frame=head_frame, scene_frame=scene_frame, limit=limit) + return is_complete + + +def save_frames_on_redis(r, head_frame, scene_frame, limit): + """Store dictionary on redis""" + head_key = "head-frames" + scene_key = "scene-frames" + head_frame_count = 0 + scene_frame_count = 0 + if r.exists(head_key) and r.exists(scene_key): + head_values = json.loads(r.get(head_key)) + head_values.append(head_frame) + head_frame_count = len(head_values) + + scene_values = json.loads(r.get(scene_key)) + scene_values.append(scene_frame) + scene_frame_count = len(scene_values) + else: + head_values = [head_frame] + scene_values = [scene_frame] + + r = redis.StrictRedis(host='localhost') + + r.set(head_key, json.dumps(head_values)) + r.set(scene_key, json.dumps(scene_values)) + + if head_frame_count and scene_frame_count >= limit: + return True + + return False + + +def get_frames_from_redis(r, key, limit): + count = 0 + frames = [] + if r.exists(key): + frames = json.loads(r.get(key)) + count = len(frames) + + if count >= limit: + r.delete(key) + logger.debug("Cleaning frame stream - key: {0}".format(key)) + + return frames diff --git a/app/utils.py b/app/utils.py new file mode 100644 index 0000000..b670dfd --- /dev/null +++ b/app/utils.py @@ -0,0 +1,38 @@ +import time +import base64 +from io import BytesIO +from PIL import Image + +from flask import url_for as _url_for, current_app, _request_ctx_stack + + +def timestamp(): + """Return the current timestamp as an integer.""" + return int(time.time()) + + +def url_for(*args, **kwargs): + """ + url_for replacement that works even when there is no request context. + """ + if '_external' not in kwargs: + kwargs['_external'] = False + reqctx = _request_ctx_stack.top + if reqctx is None: + if kwargs['_external']: + raise RuntimeError('Cannot generate external URLs without a ' + 'request context.') + with current_app.test_request_context(): + return _url_for(*args, **kwargs) + return _url_for(*args, **kwargs) + + +def decode_base64(base64_str): + """decodes a base64 image string""" + starter = base64_str.find(',') + image_data = base64_str[starter + 1:] + image_data = bytes(image_data, encoding="ascii") + image = Image.open(BytesIO(base64.b64decode(image_data))) + if image.mode != "RGB": + image = image.convert("RGB") + return image diff --git a/app/wsgi_aux.py b/app/wsgi_aux.py new file mode 100644 index 0000000..3755ae6 --- /dev/null +++ b/app/wsgi_aux.py @@ -0,0 +1,7 @@ +import os + +from . import create_app + +# Create an application instance that auxiliary processes such as Celery +# workers can use +app = create_app(os.environ.get('CSSI_CONFIG', 'default'), main=False) \ No newline at end of file diff --git a/celeryconfig.py b/celeryconfig.py new file mode 100644 index 0000000..a77fc13 --- /dev/null +++ b/celeryconfig.py @@ -0,0 +1,7 @@ + +# global Celery options that apply to all configurations + +# enable the pickle serializer +task_serializer = 'pickle' +result_serializer = 'pickle' +accept_content = ['pickle'] \ No newline at end of file diff --git a/config.cssi b/config.cssi new file mode 100644 index 0000000..d0226b4 --- /dev/null +++ b/config.cssi @@ -0,0 +1,20 @@ +[run] +plugins = + heartrate.plugin + ecg.plugin + +[latency] +latency_weight = 50 +latency_boundary = 3 + +[sentiment] +sentiment_weight = 30 + +[questionnaire] +questionnaire_weight = 20 + +[heartrate.plugin] +weight = 0 + +[ecg.plugin] +weight = 0 \ No newline at end of file diff --git a/config/config.py b/config/config.py index dd7f2c2..ded27de 100644 --- a/config/config.py +++ b/config/config.py @@ -34,7 +34,7 @@ import logging import os -logger = logging.getLogger('CSSI_REST_API') +logger = logging.getLogger('cssi.api') ENVIRONMENT_FILE_NAME = '.env' BASE_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -65,6 +65,12 @@ class Config: APP_NAME = os.environ.get('APP_NAME') or 'CSSI_REST_API' APPLICATION_ROOT = os.environ.get('APPLICATION_ROOT') or '/api/v1' + CELERY_BROKER_URL = os.environ.get( + 'CELERY_BROKER_URL', 'redis://localhost:6379') + CELERY_CONFIG = {} + SOCKETIO_MESSAGE_QUEUE = os.environ.get( + 'SOCKETIO_MESSAGE_QUEUE', os.environ.get('CELERY_BROKER_URL', + 'redis://')) if os.environ.get('SECRET_KEY'): SECRET_KEY = os.environ.get('SECRET_KEY') @@ -92,7 +98,11 @@ class DevelopmentConfig(Config): DEBUG = True SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \ - 'sqlite:///' + os.path.join(BASE_DIR, 'cssi-dev.sqlite') + 'sqlite:///' + os.path.join(BASE_DIR, 'cssi-dev.sqlite') + CELERY_BROKER_URL = os.environ.get( + 'DEV_CELERY_BROKER_URL', 'redis://localhost:6379') + CELERY_BACKEND = os.environ.get('DEV_CELERY_BACKEND') or \ + 'sqlite:///' + os.path.join(BASE_DIR, 'celery-dev.sqlite') @classmethod def init_app(cls, app): @@ -113,7 +123,13 @@ class TestingConfig(Config): TESTING = True SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \ - 'sqlite:///' + os.path.join(BASE_DIR, 'cssi-test.sqlite') + 'sqlite:///' + os.path.join(BASE_DIR, 'cssi-test.sqlite') + CELERY_BROKER_URL = os.environ.get( + 'TEST_CELERY_BROKER_URL', 'redis://localhost:6379') + CELERY_BACKEND = os.environ.get('TEST_CELERY_BACKEND') or \ + 'sqlite:///' + os.path.join(BASE_DIR, 'celery-test.sqlite') + CELERY_CONFIG = {'CELERY_ALWAYS_EAGER': True} + SOCKETIO_MESSAGE_QUEUE = None @classmethod def init_app(cls, app): @@ -133,7 +149,11 @@ class ProductionConfig(Config): """ SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \ - 'sqlite:///' + os.path.join(BASE_DIR, 'cssi.sqlite') + 'sqlite:///' + os.path.join(BASE_DIR, 'cssi.sqlite') + CELERY_BROKER_URL = os.environ.get( + 'CELERY_BROKER_URL', 'redis://localhost:6379') + CELERY_BACKEND = os.environ.get('CELERY_BACKEND') or \ + 'sqlite:///' + os.path.join(BASE_DIR, 'celery.sqlite') SSL_DISABLE = (os.environ.get('SSL_DISABLE') or 'True') == 'True' @classmethod diff --git a/config/logging.conf b/config/logging.conf index 9e8042c..9b6433c 100644 --- a/config/logging.conf +++ b/config/logging.conf @@ -1,5 +1,5 @@ [loggers] -keys=root,cssRestApi +keys=root,cssiapi [handlers] keys=consoleHandler, fileHandler @@ -11,10 +11,10 @@ keys=formatter level=DEBUG handlers=consoleHandler -[logger_cssRestApi] +[logger_cssiapi] level=DEBUG handlers=fileHandler -qualname=CSSI_REST_API +qualname=cssi.api propagate=0 [handler_consoleHandler] @@ -30,4 +30,4 @@ formatter=formatter args=('logs/api.log','a+', 5*1024*1024, 2, None, 0) [formatter_formatter] -format=%(asctime)s - %(name)s - %(levelname)-8s - [%(module)s.%(filename)s:%(lineno)d] - %(message)s \ No newline at end of file +format=%(asctime)s - %(name)s - %(levelname)-8s - [%(module)s.%(filename)s:%(lineno)d] - %(message)s \ No newline at end of file diff --git a/manage.py b/manage.py index e7c3b85..ee12068 100644 --- a/manage.py +++ b/manage.py @@ -13,19 +13,98 @@ """ import os +import subprocess +import sys +import eventlet from flask_migrate import Migrate, MigrateCommand -from flask_script import Manager +from flask_script import Manager, Command, Server as _Server, Option -from app import create_app, db +from app import create_app, db, socketio -app = create_app(os.getenv('FLASK_CONFIG') or 'default') +eventlet.monkey_patch() + +app = create_app(os.getenv('CSSI_CONFIG') or 'default') manager = Manager(app) migrate = Migrate(app, db) manager.add_command('db', MigrateCommand) + +class Server(_Server): + help = description = 'Runs the Socket.IO web server' + + def get_options(self): + options = ( + Option('-h', '--host', + dest='host', + default=self.host), + + Option('-p', '--port', + dest='port', + type=int, + default=self.port), + + Option('-d', '--debug', + action='store_true', + dest='use_debugger', + help=('enable the Werkzeug debugger (DO NOT use in ' + 'production code)'), + default=self.use_debugger), + Option('-D', '--no-debug', + action='store_false', + dest='use_debugger', + help='disable the Werkzeug debugger', + default=self.use_debugger), + + Option('-r', '--reload', + action='store_true', + dest='use_reloader', + help=('monitor Python files for changes (not 100%% safe ' + 'for production use)'), + default=self.use_reloader), + Option('-R', '--no-reload', + action='store_false', + dest='use_reloader', + help='do not monitor Python files for changes', + default=self.use_reloader), + ) + return options + + def __call__(self, app, host, port, use_debugger, use_reloader): + # override the default runserver command to start a Socket.IO server + if use_debugger is None: + use_debugger = app.debug + if use_debugger is None: + use_debugger = True + if use_reloader is None: + use_reloader = app.debug + socketio.run(app, + host=host, + port=port, + debug=use_debugger, + use_reloader=use_reloader, + **self.server_options) + + +manager.add_command("runserver", Server()) + + +class CeleryWorker(Command): + """Starts the celery worker.""" + name = 'celery' + capture_all_args = True + + def run(self, argv): + ret = subprocess.call( + ['celery', 'worker', '-A', 'app.celery', '--loglevel=info'] + argv) + sys.exit(ret) + + +manager.add_command("celery", CeleryWorker()) + + @manager.command def create_metadata(): """Create the table metadata. @@ -38,6 +117,7 @@ def create_metadata(): Genre.seed() ApplicationType.seed() + @manager.command def test(): """Run the unit tests. @@ -51,15 +131,20 @@ def test(): @manager.command -def recreate_db(): +def recreate_db(drop_first=False): """Recreates a local database Not safe to use in production. """ - db.drop_all() + if drop_first: + db.drop_all() db.create_all() db.session.commit() if __name__ == '__main__': + if sys.argv[1] == 'test' or sys.argv[1] == 'lint': + # small hack, to ensure that Flask-Script uses the testing + # configuration if we are going to run the tests + os.environ['CSSI_CONFIG'] = 'testing' manager.run() diff --git a/migrations/versions/89ee48680e79_.py b/migrations/versions/926a10218c82_.py similarity index 65% rename from migrations/versions/89ee48680e79_.py rename to migrations/versions/926a10218c82_.py index 4f39656..3710bc6 100644 --- a/migrations/versions/89ee48680e79_.py +++ b/migrations/versions/926a10218c82_.py @@ -1,17 +1,17 @@ """empty message -Revision ID: 89ee48680e79 -Revises: cb018c771a53 -Create Date: 2019-04-16 04:22:10.614121 +Revision ID: 926a10218c82 +Revises: e8e7340162d7 +Create Date: 2019-04-24 17:38:12.147730 """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import mysql + # revision identifiers, used by Alembic. -revision = '89ee48680e79' -down_revision = 'cb018c771a53' +revision = '926a10218c82' +down_revision = 'e8e7340162d7' branch_labels = None depends_on = None @@ -20,23 +20,15 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_foreign_key('fk_type_id', 'application', 'application_type', ['type_id'], ['id'], use_alter=True) op.create_foreign_key('fk_genre_id', 'application', 'genre', ['genre_id'], ['id'], use_alter=True) - op.alter_column('questionnaire', 'post', - existing_type=mysql.TEXT(), - nullable=True) - op.drop_column('questionnaire', 'session_id') - op.create_foreign_key('fk_app_id', 'session', 'application', ['app_id'], ['id'], use_alter=True) op.create_foreign_key('fk_questionnaire_id', 'session', 'questionnaire', ['questionnaire_id'], ['id'], use_alter=True) + op.create_foreign_key('fk_app_id', 'session', 'application', ['app_id'], ['id'], use_alter=True) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint('fk_questionnaire_id', 'session', type_='foreignkey') op.drop_constraint('fk_app_id', 'session', type_='foreignkey') - op.add_column('questionnaire', sa.Column('session_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False)) - op.alter_column('questionnaire', 'post', - existing_type=mysql.TEXT(), - nullable=False) + op.drop_constraint('fk_questionnaire_id', 'session', type_='foreignkey') op.drop_constraint('fk_genre_id', 'application', type_='foreignkey') op.drop_constraint('fk_type_id', 'application', type_='foreignkey') # ### end Alembic commands ### diff --git a/migrations/versions/cb018c771a53_.py b/migrations/versions/e8e7340162d7_.py similarity index 83% rename from migrations/versions/cb018c771a53_.py rename to migrations/versions/e8e7340162d7_.py index bd2e305..409d2b9 100644 --- a/migrations/versions/cb018c771a53_.py +++ b/migrations/versions/e8e7340162d7_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: cb018c771a53 +Revision ID: e8e7340162d7 Revises: -Create Date: 2019-04-16 01:05:44.197617 +Create Date: 2019-04-24 17:10:02.488638 """ from alembic import op @@ -10,7 +10,7 @@ # revision identifiers, used by Alembic. -revision = 'cb018c771a53' +revision = 'e8e7340162d7' down_revision = None branch_labels = None depends_on = None @@ -25,6 +25,7 @@ def upgrade(): sa.Column('developer', sa.String(length=100), nullable=False), sa.Column('type_id', sa.Integer(), nullable=False), sa.Column('description', sa.String(length=250), nullable=False), + sa.Column('public_sharing', sa.Boolean(), nullable=False), sa.Column('creation_date', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), sa.Column('genre_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['genre_id'], ['genre.id'], name='fk_genre_id', use_alter=True), @@ -46,25 +47,24 @@ def upgrade(): ) op.create_table('questionnaire', sa.Column('id', sa.Integer(), nullable=False), - sa.Column('pre', sa.TEXT(), nullable=False), - sa.Column('post', sa.TEXT(), nullable=False), + sa.Column('pre', sa.JSON(), nullable=False), + sa.Column('post', sa.JSON(), nullable=False), sa.Column('creation_date', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('session_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['session_id'], ['session.id'], name='fk_session_id', use_alter=True), sa.PrimaryKeyConstraint('id') ) op.create_table('session', sa.Column('id', sa.Integer(), nullable=False), + sa.Column('status', sa.String(length=25), nullable=False), sa.Column('app_id', sa.Integer(), nullable=False), sa.Column('creation_date', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), - sa.Column('expected_emotions', sa.TEXT(), nullable=False), + sa.Column('expected_emotions', sa.JSON(), nullable=False), + sa.Column('questionnaire_id', sa.Integer(), nullable=False), sa.Column('cssi_score', sa.Float(), nullable=False), - sa.Column('latency_scores', sa.TEXT(), nullable=False), + sa.Column('latency_scores', sa.JSON(), nullable=False), sa.Column('total_latency_score', sa.Float(), nullable=False), - sa.Column('sentiment_scores', sa.TEXT(), nullable=False), + sa.Column('sentiment_scores', sa.JSON(), nullable=False), sa.Column('total_sentiment_score', sa.Float(), nullable=False), - sa.Column('questionnaire_id', sa.Integer(), nullable=False), - sa.Column('questionnaire_scores', sa.TEXT(), nullable=True), + sa.Column('questionnaire_scores', sa.JSON(), nullable=True), sa.Column('total_questionnaire_score', sa.Float(), nullable=False), sa.ForeignKeyConstraint(['app_id'], ['application.id'], name='fk_app_id', use_alter=True), sa.ForeignKeyConstraint(['questionnaire_id'], ['questionnaire.id'], name='fk_questionnaire_id', use_alter=True), diff --git a/requirements.txt b/requirements.txt index c901e77..83b48ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,16 @@ +celery==4.3.0 +eventlet==0.19.0 flask==0.12.2 flask_script==2.0.6 flask_migrate==2.1.1 -marshmallow==2.14.0 flask_sqlalchemy==2.3.2 flask_marshmallow==0.8.0 +marshmallow==2.14.0 marshmallow-sqlalchemy==0.13.2 +flake8==2.5.4 +flask-cors +flask-socketio==3.3.2 PyMySQL==0.9.3 -flask-cors \ No newline at end of file +redis==3.2.1 +Pillow +numpy>=1.16.2 \ No newline at end of file