From e4dbbed7e8d369beea2a2eb633288aa194c25987 Mon Sep 17 00:00:00 2001 From: chartisdev Date: Wed, 13 Jul 2022 15:57:36 +0530 Subject: [PATCH 1/4] added changes for anomaly subdimensional downloads --- frontend/src/components/Anomaly/index.jsx | 6 +++++- frontend/src/redux/actions/Anomaly.js | 9 +++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/Anomaly/index.jsx b/frontend/src/components/Anomaly/index.jsx index 47b6f378d..498b80a26 100644 --- a/frontend/src/components/Anomaly/index.jsx +++ b/frontend/src/components/Anomaly/index.jsx @@ -92,7 +92,11 @@ const Anomaly = ({ kpi, anomalystatus, dashboard }) => { }; const handleDownloadClick = () => { - dispatch(anomalyDownloadCsv(kpi)); + const params = + dimension?.value && value?.value + ? { dimension: dimension?.value, value: value?.value } + : {}; + dispatch(anomalyDownloadCsv(kpi, params)); }; const handleDimensionChange = (e) => { diff --git a/frontend/src/redux/actions/Anomaly.js b/frontend/src/redux/actions/Anomaly.js index 499c92582..d6a77ddfa 100644 --- a/frontend/src/redux/actions/Anomaly.js +++ b/frontend/src/redux/actions/Anomaly.js @@ -220,11 +220,16 @@ export const anomalyDownloadFailure = () => { }; }; -export const anomalyDownloadCsv = (id) => { +export const anomalyDownloadCsv = (id, params) => { return async (dispatch) => { dispatch(anomalyDownloadRequest()); + const url = `${BASE_URL}/api/downloads/${id}/anomaly_data`; + const finalUrl = + Object.keys(params).length === 0 && params.constructor === Object + ? url + : attachParams(url, params); const { data, error, status } = await getRequest({ - url: `${BASE_URL}/api/downloads/${id}/anomaly_data` + url: finalUrl }); if (error) { dispatch(anomalyDownloadFailure()); From 9f4be1d04d98574dce2768816fe09d459ff241b6 Mon Sep 17 00:00:00 2001 From: chartisdev Date: Wed, 13 Jul 2022 16:58:43 +0530 Subject: [PATCH 2/4] added className instead of class --- frontend/src/components/Anomaly/index.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/Anomaly/index.jsx b/frontend/src/components/Anomaly/index.jsx index 498b80a26..39ed690c3 100644 --- a/frontend/src/components/Anomaly/index.jsx +++ b/frontend/src/components/Anomaly/index.jsx @@ -617,7 +617,7 @@ const Anomaly = ({ kpi, anomalystatus, dashboard }) => { {(dimension || value) && (
clearSubdimFilters()}> Clear filter From eed50d246c04a4d3da2dbe3d37fce7901c534549 Mon Sep 17 00:00:00 2001 From: Samyak S Sarnayak Date: Wed, 13 Jul 2022 17:04:09 +0530 Subject: [PATCH 3/4] feat: add dim and value params to anomaly data download Allows downloading subdim data. --- chaos_genius/views/anomaly_data_view.py | 22 ------ chaos_genius/views/download_view.py | 97 ++++++++++++++++++++++--- 2 files changed, 85 insertions(+), 34 deletions(-) diff --git a/chaos_genius/views/anomaly_data_view.py b/chaos_genius/views/anomaly_data_view.py index 0603be875..40eeb9eaa 100644 --- a/chaos_genius/views/anomaly_data_view.py +++ b/chaos_genius/views/anomaly_data_view.py @@ -507,28 +507,6 @@ def convert_to_graph_json( return graph_data -def get_overall_data_points(kpi_id: int, n: int = 60) -> List: - """Retrieve overall data points for a KPI for the last n days.""" - kpi_info = get_kpi_data_from_id(kpi_id) - if not kpi_info["anomaly_params"]: - return [] - - end_date = get_anomaly_output_end_date(kpi_info) - - start_date = end_date - timedelta(days=n) - start_date = start_date.strftime("%Y-%m-%d %H:%M:%S") - - return ( - AnomalyDataOutput.query.filter( - (AnomalyDataOutput.kpi_id == kpi_id) - & (AnomalyDataOutput.data_datetime >= start_date) - & (AnomalyDataOutput.anomaly_type == "overall") - ) - .order_by(AnomalyDataOutput.data_datetime) - .all() - ) - - def get_overall_data(kpi_id: int, end_date: datetime, n=90): """Retrieve overall data for a KPI for the last n days from end_date. diff --git a/chaos_genius/views/download_view.py b/chaos_genius/views/download_view.py index 5248b08c7..4c1f39741 100644 --- a/chaos_genius/views/download_view.py +++ b/chaos_genius/views/download_view.py @@ -1,4 +1,11 @@ -from flask import Blueprint, Response, jsonify, request +import logging +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +from flask.blueprints import Blueprint +from flask.globals import request +from flask.json import jsonify +from flask.wrappers import Response from chaos_genius.controllers.kpi_controller import get_kpi_data_from_id from chaos_genius.core.rca.rca_utils.api_utils import ( @@ -6,16 +13,50 @@ rca_analysis, rca_hierarchical_data_all_dims, ) +from chaos_genius.databases.models.anomaly_data_model import AnomalyDataOutput +from chaos_genius.databases.models.kpi_model import Kpi from chaos_genius.utils.utils import iter_csv -from chaos_genius.views.anomaly_data_view import get_overall_data_points +from chaos_genius.views.anomaly_data_view import get_anomaly_output_end_date blueprint = Blueprint("downloads", __name__) +logger = logging.getLogger(__name__) ANOMALY_DATA_DATETIME_FORMAT = "%a %-d %B %H:%M:%S %Y" CHART_DATA_DATETIME_FORMAT = "%a %-d %B %Y" +def get_anomaly_data_points( + kpi_id: int, + end_date: datetime, + n: int = 60, + subdim: Optional[Dict[str, str]] = None, +) -> List[AnomalyDataOutput]: + """Retrieve overall data points for a KPI for the last n days. + + If subdim is not specified, overall data points are returned. + """ + start_date = end_date - timedelta(days=n) + start_date = start_date.strftime("%Y-%m-%d %H:%M:%S") + + filters = [ + AnomalyDataOutput.kpi_id == kpi_id, + AnomalyDataOutput.data_datetime >= start_date, + ] + + if not subdim: + filters.append(AnomalyDataOutput.anomaly_type == "overall") + else: + filters.append(AnomalyDataOutput.anomaly_type == "subdim") + filters.append(AnomalyDataOutput.series_type == subdim) + + return ( + AnomalyDataOutput.query.filter(*filters) + .order_by(AnomalyDataOutput.data_datetime) + .all() + ) + + @blueprint.route("//anomaly_data", methods=["GET"]) def download_anomaly_data(kpi_id: int): """API Endpoint to download overall KPI anomaly data in CSV form. @@ -23,11 +64,35 @@ def download_anomaly_data(kpi_id: int): Data is downloaded for the last 60 days by default """ try: - data_points = get_overall_data_points(kpi_id) + dimension = request.args.get("dimension", default=None) + value = request.args.get("value", default=None) + + kpi: Optional[Kpi] = Kpi.get_by_id(kpi_id) + if not kpi: + raise Exception(f"KPI with ID {kpi_id} does not exist.") + if not kpi.anomaly_params: + raise Exception(f"Anomaly was not set up for KPI: {kpi_id}") + + end_date = get_anomaly_output_end_date(kpi.as_dict) + + if dimension and value: + logger.info( + "Downloading subdim anomaly data for KPI: %d, subdim: %s=%s", + kpi_id, + dimension, + value, + ) + data_points = get_anomaly_data_points( + kpi_id, end_date, subdim={dimension: value} + ) + else: + logger.info("Downloading overall anomaly data for KPI: %d", kpi_id) + data_points = get_anomaly_data_points(kpi_id, end_date) + if not data_points: - raise Exception(f"No anomaly data found for KPI id {kpi_id}") + raise Exception(f"No anomaly data found for KPI: {kpi_id}") - def row_gen(data_points): + def row_gen(data_points: List[AnomalyDataOutput]): csv_headers = [ "datetime", "value", @@ -50,12 +115,20 @@ def row_gen(data_points): ] yield attr_list + if kpi.anomaly_params["frequency"] == "D": + end_date_str = end_date.strftime("%Y-%m-%d") + else: + end_date_str = end_date.strftime("%Y-%m-%dT%H-%M-%S") + suffix = "" + if dimension and value: + suffix = f"_{dimension}_{value}" + filename = f"chaosgenius_{kpi.name}_anomaly_data_{end_date_str}{suffix}.csv" + response = Response(iter_csv(row_gen(data_points)), mimetype="text/csv") - response.headers[ - "Content-Disposition" - ] = f"attachment; filename=KPI-{kpi_id}-anomaly-data.csv" + response.headers["Content-Disposition"] = f'attachment; filename="{filename}"' return response - except Exception as e: + except Exception as e: # noqa: B902 + logger.error("Error while downloading anomaly data", exc_info=e) return jsonify( {"status": "failure", "message": f"Downloading data failed: {e}"} ) @@ -87,7 +160,7 @@ def row_gen(data_points): "Content-Disposition" ] = f"attachment; filename=KPI-{kpi}-panel-chart-data.csv" return response - except Exception as e: + except Exception as e: # noqa: B902 return jsonify( {"status": "failure", "message": f"chart data download failed: {e}"} ) @@ -146,7 +219,7 @@ def row_gen(data_list): "Content-Disposition" ] = f"attachment; filename=KPI-{kpi_id}-DeepDrills.csv" return response - except Exception as e: + except Exception as e: # noqa: B902 status = "failure" message = f"Error downloading hierarchical data: {e}" return jsonify({"status": status, "message": message}) @@ -197,7 +270,7 @@ def row_gen(data): "Content-Disposition" ] = f"attachment; filename=KPI-{kpi_id}-DeepDrills-multidim.csv" return response - except Exception as e: + except Exception as e: # noqa: B902 status = "failure" message = f"Error downloading multidim analysis data: {e}" return jsonify({"status": status, "message": message}) From 042553c6aa217a777421974de1a732b2ab8f4dc8 Mon Sep 17 00:00:00 2001 From: Samyak S Sarnayak Date: Fri, 15 Jul 2022 09:23:19 +0530 Subject: [PATCH 4/4] fix(download): sanitize file name --- chaos_genius/views/download_view.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/chaos_genius/views/download_view.py b/chaos_genius/views/download_view.py index 4c1f39741..868d126ef 100644 --- a/chaos_genius/views/download_view.py +++ b/chaos_genius/views/download_view.py @@ -15,7 +15,7 @@ ) from chaos_genius.databases.models.anomaly_data_model import AnomalyDataOutput from chaos_genius.databases.models.kpi_model import Kpi -from chaos_genius.utils.utils import iter_csv +from chaos_genius.utils.utils import iter_csv, make_path_safe from chaos_genius.views.anomaly_data_view import get_anomaly_output_end_date blueprint = Blueprint("downloads", __name__) @@ -122,7 +122,10 @@ def row_gen(data_points: List[AnomalyDataOutput]): suffix = "" if dimension and value: suffix = f"_{dimension}_{value}" - filename = f"chaosgenius_{kpi.name}_anomaly_data_{end_date_str}{suffix}.csv" + filename = ( + f"chaosgenius_{make_path_safe(kpi.name)}_anomaly_data_{end_date_str}" + f"{make_path_safe(suffix)}.csv" + ) response = Response(iter_csv(row_gen(data_points)), mimetype="text/csv") response.headers["Content-Disposition"] = f'attachment; filename="{filename}"'