diff --git a/estela-api/api/serializers/stats.py b/estela-api/api/serializers/stats.py index 3c197393..11dd762d 100644 --- a/estela-api/api/serializers/stats.py +++ b/estela-api/api/serializers/stats.py @@ -1,5 +1,7 @@ +from datetime import timedelta from rest_framework import serializers - +from api.serializers.spider import SpiderSerializer +from api.serializers.job import SpiderJobSerializer from core.models import SpiderJob @@ -55,30 +57,68 @@ class StatsSerializer(serializers.Serializer): jobs = JobsStatsSerializer(required=False) pages = PagesStatsSerializer() items_count = serializers.IntegerField(default=0) - runtime = serializers.FloatField(default=0.0) + runtime = serializers.DurationField(default=timedelta(hours=0, minutes=0)) status_codes = StatusCodesStatsSerializer() - success_rate = serializers.FloatField(default=0.0) + success_rate = serializers.FloatField(default=0.0, required=False) logs = LogsStatsSerializer() - coverage = CoverageStatsSerializer() + coverage = CoverageStatsSerializer(required=False) -class JobsMetadataSerializer(serializers.ModelSerializer): - class Meta: - model = SpiderJob - fields = ("jid", "spider", "job_status") - +class SpiderJobStatsSerializer(SpiderJobSerializer): + stats = StatsSerializer() -class GetJobsStatsSerializer(serializers.Serializer): - jid = serializers.IntegerField(default=0) - spider = serializers.IntegerField(default=0) - stats = StatsSerializer(required=False) +class SpiderJobStatsSerializer(SpiderJobSerializer): + stats = StatsSerializer() -class GlobalStatsSerializer(serializers.Serializer): + class Meta: + model = SpiderJob + fields = ( + "jid", + "spider", + "created", + "name", + "lifespan", + "total_response_bytes", + "item_count", + "request_count", + "args", + "env_vars", + "tags", + "job_status", + "cronjob", + "data_expiry_days", + "data_status", + "stats", + ) + + +class ProjectStatsSerializer(serializers.Serializer): date = serializers.DateField() stats = StatsSerializer() - jobs_metadata = JobsMetadataSerializer(many=True) -class SpidersJobsStatsSerializer(GlobalStatsSerializer): +class SpidersStatsSerializer(ProjectStatsSerializer): pass + + +class SpidersPaginationSerializer(serializers.Serializer): + count = serializers.IntegerField() + next = serializers.HyperlinkedIdentityField( + view_name="project-stats", allow_null=True + ) + previous = serializers.HyperlinkedIdentityField( + view_name="project-stats", allow_null=True + ) + results = SpiderSerializer(many=True) + + +class JobsPaginationSerializer(serializers.Serializer): + count = serializers.IntegerField() + next = serializers.HyperlinkedIdentityField( + view_name="project-stats", allow_null=True + ) + previous = serializers.HyperlinkedIdentityField( + view_name="project-stats", allow_null=True + ) + results = SpiderJobStatsSerializer(many=True) diff --git a/estela-api/api/urls.py b/estela-api/api/urls.py index 5b929c43..ad4601a2 100644 --- a/estela-api/api/urls.py +++ b/estela-api/api/urls.py @@ -50,8 +50,8 @@ ) router.register( prefix=r"stats/(?P[0-9a-z-]+)", - viewset=stats_views.GlobalStatsViewSet, - basename="global-stats", + viewset=stats_views.ProjectStatsViewSet, + basename="project-stats", ) router.register( prefix=r"stats/(?P[0-9a-z-]+)/spider/(?P\d+)", diff --git a/estela-api/api/views/notification.py b/estela-api/api/views/notification.py index a9da889e..e4831cc3 100644 --- a/estela-api/api/views/notification.py +++ b/estela-api/api/views/notification.py @@ -18,7 +18,9 @@ class NotificationViewSet(BaseViewSet, viewsets.ModelViewSet): def get_queryset(self): if self.request is None: return Notification.objects.none() - return Notification.objects.filter(user=self.request.user).order_by("-activity__created") + return Notification.objects.filter(user=self.request.user).order_by( + "-activity__created" + ) @swagger_auto_schema( request_body=NotificationUpdateSerializer, diff --git a/estela-api/api/views/stats.py b/estela-api/api/views/stats.py index 0609d20f..b94c820f 100644 --- a/estela-api/api/views/stats.py +++ b/estela-api/api/views/stats.py @@ -1,7 +1,8 @@ from collections import defaultdict -from datetime import datetime, time +from datetime import datetime, timedelta from re import findall -from typing import List, Tuple, Union +from typing import List, Tuple +from json import dumps from django.db.models.query import QuerySet from django.utils import timezone @@ -9,7 +10,7 @@ from drf_yasg.utils import swagger_auto_schema from rest_framework import mixins, status from rest_framework.decorators import action -from rest_framework.exceptions import ValidationError +from rest_framework.pagination import PageNumberPagination from rest_framework.request import Request from rest_framework.response import Response from rest_framework.serializers import ListSerializer @@ -17,19 +18,70 @@ from api import errors from api.exceptions import DataBaseError, InvalidDateFormatException from api.mixins import BaseViewSet +from api.serializers.spider import SpiderSerializer +from api.serializers.job import SpiderJobSerializer from api.serializers.stats import ( - GetJobsStatsSerializer, - GlobalStatsSerializer, - JobsMetadataSerializer, - SpidersJobsStatsSerializer, + ProjectStatsSerializer, + SpidersStatsSerializer, StatsSerializer, + SpidersPaginationSerializer, + JobsPaginationSerializer, ) from config.job_manager import spiderdata_db_client from core.models import Project, Spider, SpiderJob -class StatsForDashboardMixin: +class StatsMixin: + numerical_stats: dict = { + "items_count": 0, + "runtime": timedelta(seconds=0), + "success_rate": 0.0, + } + pages_stats: dict = { + "total_pages": 0, + "scraped_pages": 0, + "missed_pages": 0, + } + jobs_stats: dict = { + "total_jobs": 0, + "waiting_jobs": 0, + "running_jobs": 0, + "stopped_jobs": 0, + "completed_jobs": 0, + "in_queue_jobs": 0, + "error_jobs": 0, + } + status_codes_stats: dict = { + "status_200": 0, + "status_301": 0, + "status_302": 0, + "status_401": 0, + "status_403": 0, + "status_404": 0, + "status_429": 0, + "status_500": 0, + } + logs_stats: dict = { + "total_logs": 0, + "debug_logs": 0, + "info_logs": 0, + "warning_logs": 0, + "error_logs": 0, + "critical_logs": 0, + } + coverage_stats: dict = { + "total_items": 0, + "total_items_coverage": 0.0, + } stats_mapping: dict = { + "jobs": { + "waiting_jobs": SpiderJob.WAITING_STATUS, + "running_jobs": SpiderJob.RUNNING_STATUS, + "stopped_jobs": SpiderJob.STOPPED_STATUS, + "completed_jobs": SpiderJob.COMPLETED_STATUS, + "in_queue_jobs": SpiderJob.IN_QUEUE_STATUS, + "error_jobs": SpiderJob.ERROR_STATUS, + }, "items_count": "item_scraped_count", "runtime": "elapsed_time_seconds", "scraped_pages": "downloader/response_status_count/200", @@ -67,99 +119,37 @@ def get_parameters(self, request: Request) -> Tuple[datetime, datetime]: return start_date, end_date def summarize_stats_results( - self, stats_set: List[dict], jobs_set: QuerySet[SpiderJob] + self, stats_set: List[dict], jobs_set: QuerySet[SpiderJob], offset: int ) -> dict: stats_results = defaultdict(lambda: defaultdict(int)) stats_results.default_factory = lambda: { - "jobs": { - "total_jobs": 0, - "waiting_jobs": 0, - "running_jobs": 0, - "stopped_jobs": 0, - "completed_jobs": 0, - "in_queue_jobs": 0, - "error_jobs": 0, - }, - "pages": { - "total_pages": 0, - "scraped_pages": 0, - "missed_pages": 0, - }, - "items_count": 0, - "runtime": 0.0, - "status_codes": { - "status_200": 0, - "status_301": 0, - "status_302": 0, - "status_401": 0, - "status_403": 0, - "status_404": 0, - "status_429": 0, - "status_500": 0, - }, - "success_rate": 0.0, - "logs": { - "total_logs": 0, - "debug_logs": 0, - "info_logs": 0, - "warning_logs": 0, - "error_logs": 0, - "critical_logs": 0, - }, - "coverage": { - "total_items": 0, - "total_items_coverage": 0.0, - }, - "jobs_metadata": [], + **self.numerical_stats, + "jobs": {**self.jobs_stats}, + "pages": {**self.pages_stats}, + "status_codes": {**self.status_codes_stats}, + "logs": {**self.logs_stats}, + "coverage": {**self.coverage_stats}, } - jobs_ids = {job.jid: job.created.strftime("%Y-%m-%d") for job in jobs_set} - - min_jobs_date: dict = {} - min_jobs_date = { - job.created.strftime("%Y-%m-%d"): job.created - if min_date is None or job.created < min_date - else min_date + jobs_offset = { + job.jid: (job.created - timedelta(minutes=offset)).strftime("%Y-%m-%d") for job in jobs_set - for min_date in [min_jobs_date.get(job.created.strftime("%Y-%m-%d"), None)] } - min_jobs_date = {key: value.isoformat() for key, value in min_jobs_date.items()} for job in jobs_set: - date_str = jobs_ids[job.jid] - stats_results[date_str]["min_date"] = min_jobs_date[date_str] + date_str = jobs_offset[job.jid] stats_results[date_str]["jobs"]["total_jobs"] += 1 - stats_results[date_str]["jobs"]["waiting_jobs"] += int( - job.status == SpiderJob.WAITING_STATUS - ) - stats_results[date_str]["jobs"]["running_jobs"] += int( - job.status == SpiderJob.RUNNING_STATUS - ) - stats_results[date_str]["jobs"]["stopped_jobs"] += int( - job.status == SpiderJob.STOPPED_STATUS - ) - stats_results[date_str]["jobs"]["completed_jobs"] += int( - job.status == SpiderJob.COMPLETED_STATUS - ) - stats_results[date_str]["jobs"]["in_queue_jobs"] += int( - job.status == SpiderJob.IN_QUEUE_STATUS - ) - stats_results[date_str]["jobs"]["error_jobs"] += int( - job.status == SpiderJob.ERROR_STATUS - ) - job_metadata_serializer = JobsMetadataSerializer(job) - stats_results[date_str]["jobs_metadata"].append( - job_metadata_serializer.data - ) + for (key, value) in self.stats_mapping["jobs"].items(): + stats_results[date_str]["jobs"][key] += int(job.status == value) for stats in stats_set: job_id = int(findall(r"\d+", stats["_id"])[1]) - date_str = jobs_ids[job_id] + date_str = jobs_offset[job_id] stats_results[date_str]["items_count"] += stats.get( self.stats_mapping["items_count"], 0 ) - stats_results[date_str]["runtime"] += stats.get( - self.stats_mapping["runtime"], 0.0 + stats_results[date_str]["runtime"] += timedelta( + seconds=stats.get(self.stats_mapping["runtime"], 0.0) ) stats_results[date_str]["pages"]["scraped_pages"] += stats.get( @@ -191,6 +181,7 @@ def summarize_stats_results( ) for stat in stats_results.values(): + stat["runtime"] = str(stat["runtime"]) if stat["jobs"]["completed_jobs"] != 0: stat["coverage"]["total_items_coverage"] /= stat["jobs"][ "completed_jobs" @@ -201,110 +192,54 @@ def summarize_stats_results( ) return stats_results - def parse_jobs_stats( - self, stats_ids: List[str], stats_set: List[dict] - ) -> GetJobsStatsSerializer: - reformatted_stats_set: dict = {stat["_id"]: stat for stat in stats_set} - jobs_stats_results: List[dict] = [] - - for stat_id in stats_ids: - ids = findall(r"\d+", stat_id) - spider_id, job_id = int(ids[0]), int(ids[1]) - job_stat_result: dict = {"jid": job_id, "spider": spider_id} - stats: Union[dict, None] = reformatted_stats_set.get(stat_id) - if isinstance(stats, dict): - job_stat_result["stats"] = {} - job_stat_result["stats"]["items_count"] = stats.get( - self.stats_mapping["items_count"], 0 - ) + def parse_jobs_stats(self, stats_set: List[dict]) -> dict: + stats_results = defaultdict(lambda: defaultdict(int)) + stats_results.default_factory = lambda: { + **self.numerical_stats, + "pages": {**self.pages_stats}, + "status_codes": {**self.status_codes_stats}, + "logs": {**self.logs_stats}, + } - job_stat_result["stats"]["runtime"] = stats.get( - self.stats_mapping["runtime"], 0.0 - ) + for stats in stats_set: + job_id = int(findall(r"\d+", stats["_id"])[1]) + stats_results[job_id]["pages"]["scraped_pages"] = stats.get( + self.stats_mapping["scraped_pages"], 0 + ) + stats_results[job_id]["pages"]["missed_pages"] = stats.get( + self.stats_mapping["total_pages"], 0 + ) - stats.get(self.stats_mapping["scraped_pages"], 0) + stats_results[job_id]["pages"]["total_pages"] = stats.get( + self.stats_mapping["total_pages"], 0 + ) - job_stat_result["stats"]["pages"]: dict = {} - job_stat_result["stats"]["pages"]["scraped_pages"] = stats.get( - self.stats_mapping["scraped_pages"], 0 - ) - job_stat_result["stats"]["pages"]["missed_pages"] = stats.get( - self.stats_mapping["total_pages"], 0 - ) - stats.get(self.stats_mapping["scraped_pages"], 0) - job_stat_result["stats"]["pages"]["total_pages"] = stats.get( - self.stats_mapping["total_pages"], 0 - ) + stats_results[job_id]["items_count"] = stats.get( + self.stats_mapping["items_count"], 0 + ) - job_stat_result["stats"]["status_codes"]: dict = {} - for status_code in self.stats_mapping["status_codes"]: - job_stat_result["stats"]["status_codes"][status_code] = stats.get( - self.stats_mapping["status_codes"][status_code], 0 - ) - - job_stat_result["stats"]["logs"]: dict = {} - for log in self.stats_mapping["logs"]: - log_count = stats.get(self.stats_mapping["logs"][log], 0) - job_stat_result["stats"]["logs"][log] = log_count - job_stat_result["stats"]["logs"]["total_logs"] = log_count - - job_stat_result["stats"]["coverage"]: dict = {} - coverage: Union[dict, None] = stats.get( - self.stats_mapping["coverage"], None - ) - if isinstance(coverage, dict): - for coverage_field, coverage_value in coverage.items(): - job_stat_result["stats"]["coverage"][ - coverage_field - ] = coverage_value - jobs_stats_results.append(job_stat_result) - return GetJobsStatsSerializer(data=jobs_stats_results, many=True) + stats_results[job_id]["runtime"] = str( + timedelta(seconds=stats.get(self.stats_mapping["runtime"], 0.0)) + ) - @swagger_auto_schema( - operation_description="Retrieve stats of all jobs metadata.", - request_body=ListSerializer(child=GetJobsStatsSerializer()), - request_body_description="The list of jobs metadata to retrieve its stats.", - responses={ - status.HTTP_200_OK: openapi.Response( - description="Array with stats summary for each job", - schema=ListSerializer(child=GetJobsStatsSerializer()), - ), - }, - ) - @action(methods=["POST"], detail=False) - def jobs_stats(self, request: Request, *args, **kwargs): - jobs_stats_ids: List[str] = [] - try: - if not isinstance(request.data, list): - raise ValidationError( - "Please provide a valid body schema [{jid:number, spider:number}]" + for status_code in self.stats_mapping["status_codes"]: + stats_results[job_id]["status_codes"][status_code] = stats.get( + self.stats_mapping["status_codes"][status_code], 0 ) - for job_metadata in request.data: - serializer = GetJobsStatsSerializer(data=job_metadata) - if serializer.is_valid(): - jobs_stats_ids.append( - f"{serializer.validated_data.get('spider')}-{serializer.validated_data.get('jid')}-job_stats" - ) - else: - raise ValidationError(serializer.error_messages) - except ValidationError as e: - return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST) - if not spiderdata_db_client.get_connection(): - raise DataBaseError({"error": errors.UNABLE_CONNECT_DB}) - stats_set: List[dict] = spiderdata_db_client.get_jobs_set_stats( - kwargs["pid"], jobs_stats_ids - ) + for log in self.stats_mapping["logs"]: + log_count = stats.get(self.stats_mapping["logs"][log], 0) + stats_results[job_id]["logs"][log] = log_count + stats_results[job_id]["logs"]["total_logs"] += log_count - serializer = self.parse_jobs_stats(jobs_stats_ids, stats_set) - if not serializer.is_valid(): - return Response( - {"error": serializer.errors}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR, - ) - return Response(data=serializer.data, status=status.HTTP_200_OK) + return stats_results -class GlobalStatsViewSet(BaseViewSet, StatsForDashboardMixin, mixins.ListModelMixin): +class ProjectStatsViewSet(BaseViewSet, StatsMixin, mixins.ListModelMixin): model_class = Project lookup_field = "pid" + MAX_PAGINATION_SIZE = 100 + MIN_PAGINATION_SIZE = 1 + DEFAULT_PAGINATION_SIZE = 10 @swagger_auto_schema( operation_description="Retrieve stats of all jobs in a range of time, dates must have the format YYYY-mm-dd.", @@ -323,17 +258,30 @@ class GlobalStatsViewSet(BaseViewSet, StatsForDashboardMixin, mixins.ListModelMi required=True, description="End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z).", ), + openapi.Parameter( + name="offset", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + required=False, + description="Offset from UTC time in minutes.", + ), ], responses={ status.HTTP_200_OK: openapi.Response( description="Global stats array with stats summary for each date", - schema=ListSerializer(child=GlobalStatsSerializer()), + schema=ListSerializer(child=ProjectStatsSerializer()), ), }, ) def list(self, request: Request, *args, **kwargs): try: start_date, end_date = self.get_parameters(request) + offset = int(request.query_params.get("offset", 0)) + except (ValueError, TypeError): + return Response( + {"error": "Invalid 'offset' parameter. Must be an integer."}, + status=status.HTTP_400_BAD_REQUEST, + ) except InvalidDateFormatException as e: return Response({"error": str(e.detail)}, status=e.status_code) @@ -356,18 +304,17 @@ def list(self, request: Request, *args, **kwargs): kwargs["pid"], job_stats_ids ) - global_stats_results = self.summarize_stats_results(stats_set, jobs_set) + global_stats_results = self.summarize_stats_results(stats_set, jobs_set, offset) response_schema = [] - for stat_result in global_stats_results.values(): - date = stat_result.pop("min_date", None) + for (date_stat, stat_result) in global_stats_results.items(): stat_serializer = StatsSerializer(data=stat_result) - if stat_serializer.is_valid(): + if stat_serializer.is_valid(raise_exception=True): response_schema.append( { - "date": date, + "date": datetime.strptime(date_stat, "%Y-%m-%d") + + timedelta(minutes=offset), "stats": stat_serializer.data, - "jobs_metadata": stat_result["jobs_metadata"], } ) @@ -376,12 +323,137 @@ def list(self, request: Request, *args, **kwargs): status=status.HTTP_200_OK, ) + @swagger_auto_schema( + methods=["GET"], + operation_description="Retrieve all the spiders executed in a range of dates.", + manual_parameters=[ + openapi.Parameter( + name="start_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z).", + ), + openapi.Parameter( + name="end_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z).", + ), + ], + responses={ + status.HTTP_200_OK: openapi.Response( + description="Paginated spiders launched in a range of time", + schema=SpidersPaginationSerializer(), + ), + }, + ) + @action(methods=["GET"], detail=False) + def spiders(self, request: Request, *args, **kwargs): + try: + start_date, end_date = self.get_parameters(request) + except InvalidDateFormatException as e: + return Response({"error": str(e.detail)}, status=e.status_code) + + paginator = PageNumberPagination() + paginator.page = request.query_params.get("page", 1) + paginator.page_size = request.query_params.get( + "page_size", self.DEFAULT_PAGINATION_SIZE + ) + paginator.max_page_size = self.MAX_PAGINATION_SIZE + + spiders_set = Spider.objects.filter( + jobs__created__range=[start_date, end_date] + ).distinct() + paginated_spiders_set = paginator.paginate_queryset(spiders_set, request) + + serializer = SpiderSerializer(paginated_spiders_set, many=True) + return paginator.get_paginated_response(serializer.data) -class SpidersJobsStatsViewSet( - BaseViewSet, StatsForDashboardMixin, mixins.ListModelMixin -): + @swagger_auto_schema( + operation_description="Retrieve all the jobs of a spider executed in a range of dates.", + manual_parameters=[ + openapi.Parameter( + name="start_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z).", + ), + openapi.Parameter( + name="end_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z).", + ), + openapi.Parameter( + name="spider", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + required=True, + description="The spider ID related to the jobs.", + ), + ], + responses={ + status.HTTP_200_OK: openapi.Response( + description="Paginated jobs belonging to a spider in a range of time", + schema=JobsPaginationSerializer(), + ), + }, + ) + @action(methods=["GET"], detail=False) + def jobs(self, request: Request, *args, **kwargs): + try: + start_date, end_date = self.get_parameters(request) + spider = int(request.query_params.get("spider")) + except (ValueError, TypeError): + return Response( + {"error": "Invalid 'spider' parameter. Must be an integer."}, + status=status.HTTP_400_BAD_REQUEST, + ) + except InvalidDateFormatException as e: + return Response({"error": str(e.detail)}, status=e.status_code) + + if not spiderdata_db_client.get_connection(): + raise DataBaseError({"error": errors.UNABLE_CONNECT_DB}) + + paginator = PageNumberPagination() + paginator.page = request.query_params.get("page", 1) + paginator.page_size = request.query_params.get( + "page_size", self.DEFAULT_PAGINATION_SIZE + ) + paginator.max_page_size = self.MAX_PAGINATION_SIZE + + jobs_set = SpiderJob.objects.filter( + spider=spider, created__range=[start_date, end_date] + ) + + paginated_jobs_set = paginator.paginate_queryset(jobs_set, request) + + jobs_stats_ids: List[str] = [ + "{}-{}-job_stats".format(job.spider.sid, job.jid) + for job in paginated_jobs_set + ] + stats_set: List[dict] = spiderdata_db_client.get_jobs_set_stats( + kwargs["pid"], jobs_stats_ids + ) + stats_results: dict = self.parse_jobs_stats(stats_set=stats_set) + serializer = SpiderJobSerializer(paginated_jobs_set, many=True) + response_schema = [] + for job in serializer.data: + job_id = job.get("jid", None) + response_schema.append({**job, "stats": stats_results[job_id]}) + return paginator.get_paginated_response(response_schema) + + +class SpidersJobsStatsViewSet(BaseViewSet, StatsMixin, mixins.ListModelMixin): model_class = Spider lookup_field = "sid" + MAX_PAGINATION_SIZE = 100 + MIN_PAGINATION_SIZE = 1 + DEFAULT_PAGINATION_SIZE = 10 @swagger_auto_schema( operation_description="Retrieve stats of all jobs of a spider in a range of time, dates must have the format YYYY-mm-dd.", @@ -400,24 +472,37 @@ class SpidersJobsStatsViewSet( required=True, description="End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z).", ), + openapi.Parameter( + name="offset", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + required=False, + description="Offset from UTC time in minutes.", + ), ], responses={ status.HTTP_200_OK: openapi.Response( description="Spiders/Jobs stats array with stats summary for each date", - schema=ListSerializer(child=SpidersJobsStatsSerializer()), + schema=ListSerializer(child=SpidersStatsSerializer()), ), }, ) - def list(self, request, *args, **kwargs): + def list(self, request: Request, *args, **kwargs): try: start_date, end_date = self.get_parameters(request) + offset = int(request.query_params.get("offset", 0)) + except (ValueError, TypeError): + return Response( + {"error": "Invalid 'offset' parameter. Must be an integer."}, + status=status.HTTP_400_BAD_REQUEST, + ) except InvalidDateFormatException as e: return Response({"error": str(e.detail)}, status=e.status_code) if not spiderdata_db_client.get_connection(): raise ConnectionError({"error": errors.UNABLE_CONNECT_DB}) - spider: Spider = Spider.objects.get(sid=kwargs["sid"]) + spider = Spider.objects.get(sid=kwargs["sid"]) jobs_set: QuerySet[SpiderJob] = spider.jobs.filter( created__range=[start_date, end_date] ) @@ -431,19 +516,18 @@ def list(self, request, *args, **kwargs): ) spider_jobs_stats_results: dict = self.summarize_stats_results( - stats_set, jobs_set + stats_set, jobs_set, offset ) response_schema = [] - for stat_result in spider_jobs_stats_results.values(): - date = stat_result.pop("min_date", None) + for (date_stat, stat_result) in spider_jobs_stats_results.items(): stat_serializer = StatsSerializer(data=stat_result) if stat_serializer.is_valid(): response_schema.append( { - "date": date, + "date": datetime.strptime(date_stat, "%Y-%m-%d") + + timedelta(minutes=offset), "stats": stat_serializer.data, - "jobs_metadata": stat_result["jobs_metadata"], } ) @@ -451,3 +535,74 @@ def list(self, request, *args, **kwargs): data=response_schema, status=status.HTTP_200_OK, ) + + @swagger_auto_schema( + operation_description="Retrieve all the jobs of a spider executed in a range of dates.", + manual_parameters=[ + openapi.Parameter( + name="start_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z).", + ), + openapi.Parameter( + name="end_date", + in_=openapi.IN_QUERY, + type=openapi.TYPE_STRING, + required=True, + description="End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z).", + ), + openapi.Parameter( + name="spider", + in_=openapi.IN_QUERY, + type=openapi.TYPE_INTEGER, + required=True, + description="The spider ID related to the jobs.", + ), + ], + responses={ + status.HTTP_200_OK: openapi.Response( + description="Paginated jobs belonging to a spider in a range of time", + schema=JobsPaginationSerializer(), + ), + }, + ) + @action(methods=["GET"], detail=False) + def jobs(self, request: Request, *args, **kwargs): + try: + start_date, end_date = self.get_parameters(request) + except InvalidDateFormatException as e: + return Response({"error": str(e.detail)}, status=e.status_code) + + if not spiderdata_db_client.get_connection(): + raise DataBaseError({"error": errors.UNABLE_CONNECT_DB}) + + paginator = PageNumberPagination() + paginator.page = request.query_params.get("page", 1) + paginator.page_size = request.query_params.get( + "page_size", self.DEFAULT_PAGINATION_SIZE + ) + paginator.max_page_size = self.MAX_PAGINATION_SIZE + + jobs_set = SpiderJob.objects.filter( + spider=kwargs["sid"], created__range=[start_date, end_date] + ) + + paginated_jobs_set = paginator.paginate_queryset(jobs_set, request) + + jobs_stats_ids: List[str] = [ + "{}-{}-job_stats".format(job.spider.sid, job.jid) + for job in paginated_jobs_set + ] + stats_set: List[dict] = spiderdata_db_client.get_jobs_set_stats( + kwargs["pid"], jobs_stats_ids + ) + + stats_results: dict = self.parse_jobs_stats(stats_set=stats_set) + serializer = SpiderJobSerializer(paginated_jobs_set, many=True) + response_schema = [] + for job in serializer.data: + job_id = job.get("jid", None) + response_schema.append({**job, "stats": stats_results[job_id]}) + return paginator.get_paginated_response(response_schema) diff --git a/estela-api/core/views.py b/estela-api/core/views.py index 38536b27..a5be4b58 100644 --- a/estela-api/core/views.py +++ b/estela-api/core/views.py @@ -10,6 +10,7 @@ from api.tokens import account_reset_token from config.job_manager import job_manager + def launch_deploy_job(pid, did, container_image): deploy_user = User.objects.get(username="deploy_manager") deploy_user_token, _ = Token.objects.get_or_create(user=deploy_user) diff --git a/estela-api/docs/api.yaml b/estela-api/docs/api.yaml index 18fe6a86..7788371b 100644 --- a/estela-api/docs/api.yaml +++ b/estela-api/docs/api.yaml @@ -1328,13 +1328,18 @@ paths: description: End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z). required: true type: string + - name: offset + in: query + description: Offset from UTC time in minutes. + required: false + type: integer responses: '200': description: Global stats array with stats summary for each date schema: type: array items: - $ref: '#/definitions/GlobalStats' + $ref: '#/definitions/ProjectStats' tags: - api parameters: @@ -1342,25 +1347,41 @@ paths: in: path required: true type: string - /api/stats/{pid}/jobs_stats: - post: - operationId: api_stats_jobs_stats - description: Retrieve stats of all jobs metadata. + /api/stats/{pid}/jobs: + get: + operationId: api_stats_jobs + description: Retrieve all the jobs of a spider executed in a range of dates. parameters: - - name: data - in: body + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + - name: page_size + in: query + description: Number of results to return per page. + required: false + type: integer + - name: start_date + in: query + description: Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z). required: true - schema: - type: array - items: - $ref: '#/definitions/GetJobsStats' + type: string + - name: end_date + in: query + description: End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z). + required: true + type: string + - name: spider + in: query + description: The spider ID related to the jobs. + required: true + type: integer responses: '200': - description: Array with stats summary for each job + description: Paginated jobs belonging to a spider in a range of time schema: - type: array - items: - $ref: '#/definitions/GetJobsStats' + $ref: '#/definitions/JobsPagination' tags: - api parameters: @@ -1394,13 +1415,18 @@ paths: description: End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z). required: true type: string + - name: offset + in: query + description: Offset from UTC time in minutes. + required: false + type: integer responses: '200': description: Spiders/Jobs stats array with stats summary for each date schema: type: array items: - $ref: '#/definitions/SpidersJobsStats' + $ref: '#/definitions/SpidersStats' tags: - api parameters: @@ -1412,25 +1438,41 @@ paths: in: path required: true type: string - /api/stats/{pid}/spider/{sid}/jobs_stats: - post: - operationId: api_stats_spider_jobs_stats - description: Retrieve stats of all jobs metadata. + /api/stats/{pid}/spider/{sid}/jobs: + get: + operationId: api_stats_spider_jobs + description: Retrieve all the jobs of a spider executed in a range of dates. parameters: - - name: data - in: body + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + - name: page_size + in: query + description: Number of results to return per page. + required: false + type: integer + - name: start_date + in: query + description: Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z). required: true - schema: - type: array - items: - $ref: '#/definitions/GetJobsStats' + type: string + - name: end_date + in: query + description: End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z). + required: true + type: string + - name: spider + in: query + description: The spider ID related to the jobs. + required: true + type: integer responses: '200': - description: Array with stats summary for each job + description: Paginated jobs belonging to a spider in a range of time schema: - type: array - items: - $ref: '#/definitions/GetJobsStats' + $ref: '#/definitions/JobsPagination' tags: - api parameters: @@ -1442,6 +1484,43 @@ paths: in: path required: true type: string + /api/stats/{pid}/spiders: + get: + operationId: api_stats_spiders + description: Retrieve all the spiders executed in a range of dates. + parameters: + - name: page + in: query + description: A page number within the paginated result set. + required: false + type: integer + - name: page_size + in: query + description: Number of results to return per page. + required: false + type: integer + - name: start_date + in: query + description: Start of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-04-01T05%3A00%3A00.000Z). + required: true + type: string + - name: end_date + in: query + description: End of date in UTC format [%Y-%m-%dT%H:%M:%S.%fZ] (e.g. 2023-06-02T04%3A59%3A59.999Z). + required: true + type: string + responses: + '200': + description: Paginated spiders launched in a range of time + schema: + $ref: '#/definitions/SpidersPagination' + tags: + - api + parameters: + - name: pid + in: path + required: true + type: string definitions: ChangePassword: required: @@ -2656,7 +2735,6 @@ definitions: - pages - status_codes - logs - - coverage type: object properties: jobs: @@ -2670,7 +2748,7 @@ definitions: runtime: title: Runtime type: number - default: 0.0 + default: 00:00:00 status_codes: $ref: '#/definitions/StatusCodesStats' success_rate: @@ -2681,29 +2759,10 @@ definitions: $ref: '#/definitions/LogsStats' coverage: $ref: '#/definitions/CoverageStats' - JobsMetadata: - required: - - spider - type: object - properties: - jid: - title: Jid - description: A unique integer value identifying this job. - type: integer - readOnly: true - spider: - title: Spider - description: Spider sid. - type: integer - job_status: - title: Job status - type: string - readOnly: true - GlobalStats: + ProjectStats: required: - date - stats - - jobs_metadata type: object properties: date: @@ -2712,28 +2771,126 @@ definitions: format: date stats: $ref: '#/definitions/Stats' - jobs_metadata: - type: array - items: - $ref: '#/definitions/JobsMetadata' - GetJobsStats: + SpiderJobStats: + required: + - stats type: object properties: jid: title: Jid + description: A unique integer value identifying this job. type: integer - default: 0 + readOnly: true spider: title: Spider + type: string + readOnly: true + created: + title: Created + description: Job creation date. + type: string + format: date-time + readOnly: true + name: + title: Name + description: Unique job name. + type: string + readOnly: true + minLength: 1 + lifespan: + title: Lifespan + description: The elapsed seconds the spider job was running. + type: number + total_response_bytes: + title: Total response bytes + description: The total bytes received in responses. type: integer - default: 0 + maximum: 18446744073709551615 + minimum: 0 + item_count: + title: Item count + description: The number of items extracted in the job. + type: integer + maximum: 18446744073709551615 + minimum: 0 + request_count: + title: Request count + description: The number of requests made by the spider job. + type: integer + maximum: 18446744073709551615 + minimum: 0 + args: + description: Job arguments. + type: array + items: + $ref: '#/definitions/SpiderJobArg' + env_vars: + description: Job env variables. + type: array + items: + $ref: '#/definitions/SpiderJobEnvVar' + tags: + description: Job tags. + type: array + items: + $ref: '#/definitions/SpiderJobTag' + job_status: + title: Job status + description: Current job status. + type: string + readOnly: true + minLength: 1 + cronjob: + title: Cronjob + description: Related cron job. + type: integer + x-nullable: true + data_expiry_days: + title: Data expiry days + description: Days before data is deleted. + type: integer + maximum: 65535 + minimum: 0 + x-nullable: true + data_status: + title: Data status + description: Data status. + type: string + enum: + - PERSISTENT + - PENDING + - DELETED stats: $ref: '#/definitions/Stats' - SpidersJobsStats: + JobsPagination: + required: + - count + - results + type: object + properties: + count: + title: Count + type: integer + next: + title: Next + type: string + format: uri + readOnly: true + x-nullable: true + previous: + title: Previous + type: string + format: uri + readOnly: true + x-nullable: true + results: + type: array + items: + $ref: '#/definitions/SpiderJobStats' + SpidersStats: required: - date - stats - - jobs_metadata type: object properties: date: @@ -2742,7 +2899,28 @@ definitions: format: date stats: $ref: '#/definitions/Stats' - jobs_metadata: + SpidersPagination: + required: + - count + - results + type: object + properties: + count: + title: Count + type: integer + next: + title: Next + type: string + format: uri + readOnly: true + x-nullable: true + previous: + title: Previous + type: string + format: uri + readOnly: true + x-nullable: true + results: type: array items: - $ref: '#/definitions/JobsMetadata' + $ref: '#/definitions/Spider' diff --git a/estela-api/engines/kubernetes.py b/estela-api/engines/kubernetes.py index af13ae23..71a0e448 100644 --- a/estela-api/engines/kubernetes.py +++ b/estela-api/engines/kubernetes.py @@ -86,14 +86,17 @@ def create_job_object( pod_spec = client.V1PodSpec( containers=[container], restart_policy=self.POD_RESTART_POLICY, - image_pull_secrets=[client.V1LocalObjectReference(self.IMAGE_PULL_SECRET_NAME)], + image_pull_secrets=[ + client.V1LocalObjectReference(self.IMAGE_PULL_SECRET_NAME) + ], volumes=[volume] if volume else None, - node_selector={"role": self.SPIDER_NODE_ROLE} if settings.MULTI_NODE_MODE else None, + node_selector={"role": self.SPIDER_NODE_ROLE} + if settings.MULTI_NODE_MODE + else None, ) if not isbuild: pod_spec.security_context = client.V1PodSecurityContext( - run_as_non_root=True, - run_as_user=1000 + run_as_non_root=True, run_as_user=1000 ) template.template.spec = pod_spec diff --git a/estela-web/src/assets/icons/arrowLeft.svg b/estela-web/src/assets/icons/arrowLeft.svg new file mode 100644 index 00000000..0474f519 --- /dev/null +++ b/estela-web/src/assets/icons/arrowLeft.svg @@ -0,0 +1,3 @@ + + + diff --git a/estela-web/src/assets/icons/arrowRight.svg b/estela-web/src/assets/icons/arrowRight.svg new file mode 100644 index 00000000..646904ac --- /dev/null +++ b/estela-web/src/assets/icons/arrowRight.svg @@ -0,0 +1,3 @@ + + + diff --git a/estela-web/src/assets/icons/cross.svg b/estela-web/src/assets/icons/cross.svg new file mode 100644 index 00000000..09d60ae2 --- /dev/null +++ b/estela-web/src/assets/icons/cross.svg @@ -0,0 +1,4 @@ + + + + diff --git a/estela-web/src/assets/icons/doubleLeft.svg b/estela-web/src/assets/icons/doubleLeft.svg index 81b5f673..f6ef72b4 100644 --- a/estela-web/src/assets/icons/doubleLeft.svg +++ b/estela-web/src/assets/icons/doubleLeft.svg @@ -1,4 +1,4 @@ - - + + diff --git a/estela-web/src/assets/icons/doubleRight.svg b/estela-web/src/assets/icons/doubleRight.svg index b4d3a4b0..b0fcb872 100644 --- a/estela-web/src/assets/icons/doubleRight.svg +++ b/estela-web/src/assets/icons/doubleRight.svg @@ -1,4 +1,4 @@ - - + + diff --git a/estela-web/src/assets/icons/expand.svg b/estela-web/src/assets/icons/expand.svg new file mode 100644 index 00000000..c6509ffd --- /dev/null +++ b/estela-web/src/assets/icons/expand.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/estela-web/src/components/Stats/ChartsModalSection.tsx b/estela-web/src/components/Stats/ChartsModalSection.tsx new file mode 100644 index 00000000..bbde306c --- /dev/null +++ b/estela-web/src/components/Stats/ChartsModalSection.tsx @@ -0,0 +1,915 @@ +import React, { Component } from "react"; +import { + Chart as ChartJS, + CategoryScale, + LinearScale, + BarElement, + Title, + Tooltip, + Legend, + ChartDataset, + ChartData, +} from "chart.js"; +import { Bar } from "react-chartjs-2"; +import { StatType } from "../../shared"; +import { SpiderJobStats } from "../../services"; +import { Tabs } from "antd"; +import "./ChartsSection.scss"; +import { parseDurationToSeconds, setValArr, sumArr } from "../../utils"; +import moment from "moment"; +import { MinMaxStatCard } from "./MinMaxStatCard"; + +ChartJS.register(CategoryScale, LinearScale, BarElement, Title, Tooltip, Legend); + +const datasetsGenerator = (statOption: StatType, stats: SpiderJobStats | SpiderJobStats[]) => { + if (Array.isArray(stats)) { + if (statOption === StatType.PAGES) + return [ + { + label: "scraped", + data: [sumArr(stats.map((jobsStats) => jobsStats.stats?.pages.scrapedPages ?? 0)), 0], + backgroundColor: "#32C3A4", + }, + { + label: "missed", + data: [0, sumArr(stats.map((jobsStats) => jobsStats.stats?.pages.missedPages ?? 0))], + backgroundColor: "#A13764", + }, + ]; + if (statOption === StatType.ITEMS) + return [ + { + label: "items", + data: [sumArr(stats.map((jobsStats) => jobsStats.stats?.itemsCount ?? 0))], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.RUNTIME) + return [ + { + label: "runtime", + data: [ + sumArr(stats.map((jobsStats) => parseDurationToSeconds(jobsStats.stats?.runtime?.toString()))), + ], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.SUCCESS_RATE) + return [ + { + label: "Job success rate", + data: [sumArr(stats.map((jobsStats) => jobsStats.stats?.successRate ?? 0)) / stats.length], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.STATUS_CODE) + return [ + { + label: "200", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status200 ?? 0)), + index: 0, + }), + backgroundColor: ["#32C3A4"], + }, + { + label: "301", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status301 ?? 0)), + index: 1, + }), + backgroundColor: "#D1A34F", + }, + { + label: "302", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status302 ?? 0)), + index: 2, + }), + backgroundColor: "#A13764", + }, + { + label: "401", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status401 ?? 0)), + index: 3, + }), + backgroundColor: "#3C7BC6", + }, + { + label: "403", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status403 ?? 0)), + index: 4, + }), + backgroundColor: "#7DC932", + }, + { + label: "404", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status404 ?? 0)), + index: 5, + }), + backgroundColor: "#FE9F99", + }, + { + label: "429", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status429 ?? 0)), + index: 6, + }), + backgroundColor: "#E7E255", + }, + { + label: "500", + data: setValArr({ + arr: new Array(8).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.statusCodes.status500 ?? 0)), + index: 7, + }), + backgroundColor: "#6C757D", + }, + ]; + if (statOption === StatType.LOGS) + return [ + { + label: "INFO", + data: setValArr({ + arr: new Array(5).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.logs.infoLogs ?? 0)), + index: 0, + }), + backgroundColor: "#32C3A4", + }, + { + label: "DEBUG", + data: setValArr({ + arr: new Array(5).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.logs.debugLogs ?? 0)), + index: 1, + }), + backgroundColor: "#D1A34F", + }, + { + label: "ERROR", + data: setValArr({ + arr: new Array(5).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.logs.errorLogs ?? 0)), + index: 2, + }), + backgroundColor: "#A13764", + }, + { + label: "WARNING", + data: setValArr({ + arr: new Array(5).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.logs.warningLogs ?? 0)), + index: 3, + }), + backgroundColor: "#E7E255", + }, + { + label: "CRITICAL", + data: setValArr({ + arr: new Array(5).fill(0), + val: sumArr(stats.map((jobsStats) => jobsStats.stats?.logs.criticalLogs ?? 0)), + index: 4, + }), + backgroundColor: "#6C757D", + }, + ]; + } else { + if (statOption === StatType.PAGES) + return [ + { + label: "scraped", + data: [stats.stats.pages.scrapedPages ?? 0, 0], + backgroundColor: "#32C3A4", + }, + { + label: "missed", + data: [0, stats.stats.pages.missedPages ?? 0], + backgroundColor: "#A13764", + }, + ]; + if (statOption === StatType.ITEMS) + return [ + { + label: "items", + data: [stats.stats.itemsCount ?? 0], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.RUNTIME) + return [ + { + label: "runtime", + data: [stats.stats.runtime ?? 0], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.SUCCESS_RATE) + return [ + { + label: "Job success rate", + data: [stats.stats.successRate ?? 0], + backgroundColor: "#32C3A4", + }, + ]; + if (statOption === StatType.STATUS_CODE) + return [ + { + label: "200", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status200 ?? 0, + index: 0, + }), + backgroundColor: ["#32C3A4"], + }, + { + label: "301", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status301 ?? 0, + index: 1, + }), + backgroundColor: "#D1A34F", + }, + { + label: "302", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status302 ?? 0, + index: 2, + }), + backgroundColor: "#A13764", + }, + { + label: "401", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status401 ?? 0, + index: 3, + }), + backgroundColor: "#3C7BC6", + }, + { + label: "403", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status403 ?? 0, + index: 4, + }), + backgroundColor: "#7DC932", + }, + { + label: "404", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status404 ?? 0, + index: 5, + }), + backgroundColor: "#FE9F99", + }, + { + label: "429", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status429 ?? 0, + index: 6, + }), + backgroundColor: "#E7E255", + }, + { + label: "500", + data: setValArr({ + arr: new Array(8).fill(0), + val: stats.stats.statusCodes.status500 ?? 0, + index: 7, + }), + backgroundColor: "#6C757D", + }, + ]; + return [ + { + label: "INFO", + data: setValArr({ + arr: new Array(5).fill(0), + val: stats.stats.logs.infoLogs ?? 0, + index: 0, + }), + backgroundColor: "#32C3A4", + }, + { + label: "DEBUG", + data: setValArr({ + arr: new Array(5).fill(0), + val: stats.stats.logs.debugLogs ?? 0, + index: 1, + }), + backgroundColor: "#D1A34F", + }, + { + label: "ERROR", + data: setValArr({ + arr: new Array(5).fill(0), + val: stats.stats.logs.errorLogs ?? 0, + index: 2, + }), + backgroundColor: "#A13764", + }, + { + label: "WARNING", + data: setValArr({ + arr: new Array(5).fill(0), + val: stats.stats.logs.warningLogs ?? 0, + index: 3, + }), + backgroundColor: "#E7E255", + }, + { + label: "CRITICAL", + data: setValArr({ + arr: new Array(5).fill(0), + val: stats.stats.logs.criticalLogs ?? 0, + index: 4, + }), + backgroundColor: "#6C757D", + }, + ]; + } + return [ + { + label: "", + data: [], + backgroundColor: "", + }, + ]; +}; + +const findMinMaxPages = (stats: SpiderJobStats[]) => { + if (stats.length === 0) return { scraped: { max: null, min: null }, missed: { max: null, min: null } }; + return stats.reduce( + (acc, curr) => { + if ((curr.stats.pages.scrapedPages ?? 0) > (acc.scraped.max.stats.pages.scrapedPages ?? 0)) + acc.scraped.max = curr; + if ((curr.stats.pages.scrapedPages ?? 0) < (acc.scraped.min.stats.pages.scrapedPages ?? 0)) + acc.scraped.min = curr; + if ((curr.stats.pages.missedPages ?? 0) > (acc.scraped.max.stats.pages.missedPages ?? 0)) + acc.missed.max = curr; + if ((curr.stats.pages.missedPages ?? 0) < (acc.scraped.min.stats.pages.missedPages ?? 0)) + acc.missed.min = curr; + return acc; + }, + { + scraped: { max: stats[0], min: stats[0] }, + missed: { max: stats[0], min: stats[0] }, + }, + ); +}; +const findMinMaxStatusCodes = (stats: SpiderJobStats[]) => { + if (stats.length === 0) + return { + vals200: { max: null, min: null }, + vals301: { max: null, min: null }, + vals302: { max: null, min: null }, + vals401: { max: null, min: null }, + vals403: { max: null, min: null }, + vals404: { max: null, min: null }, + vals429: { max: null, min: null }, + vals500: { max: null, min: null }, + }; + + return stats.reduce( + (acc, curr) => { + if ((curr.stats.statusCodes.status200 ?? 0) > (acc.vals200.max.stats.statusCodes.status200 ?? 0)) + acc.vals200.max = curr; + if ((curr.stats.statusCodes.status200 ?? 0) < (acc.vals200.min.stats.statusCodes.status200 ?? 0)) + acc.vals200.min = curr; + if ((curr.stats.statusCodes.status301 ?? 0) > (acc.vals301.max.stats.statusCodes.status301 ?? 0)) + acc.vals301.max = curr; + if ((curr.stats.statusCodes.status301 ?? 0) < (acc.vals301.min.stats.statusCodes.status301 ?? 0)) + acc.vals301.min = curr; + if ((curr.stats.statusCodes.status302 ?? 0) > (acc.vals302.max.stats.statusCodes.status302 ?? 0)) + acc.vals302.max = curr; + if ((curr.stats.statusCodes.status302 ?? 0) < (acc.vals302.min.stats.statusCodes.status302 ?? 0)) + acc.vals302.min = curr; + if ((curr.stats.statusCodes.status401 ?? 0) > (acc.vals401.max.stats.statusCodes.status401 ?? 0)) + acc.vals401.max = curr; + if ((curr.stats.statusCodes.status401 ?? 0) < (acc.vals401.min.stats.statusCodes.status401 ?? 0)) + acc.vals401.min = curr; + if ((curr.stats.statusCodes.status403 ?? 0) > (acc.vals403.max.stats.statusCodes.status403 ?? 0)) + acc.vals403.max = curr; + if ((curr.stats.statusCodes.status403 ?? 0) < (acc.vals403.min.stats.statusCodes.status403 ?? 0)) + acc.vals403.min = curr; + if ((curr.stats.statusCodes.status404 ?? 0) > (acc.vals404.max.stats.statusCodes.status404 ?? 0)) + acc.vals404.max = curr; + if ((curr.stats.statusCodes.status404 ?? 0) < (acc.vals404.min.stats.statusCodes.status404 ?? 0)) + acc.vals404.min = curr; + if ((curr.stats.statusCodes.status429 ?? 0) > (acc.vals429.max.stats.statusCodes.status429 ?? 0)) + acc.vals429.max = curr; + if ((curr.stats.statusCodes.status429 ?? 0) < (acc.vals429.min.stats.statusCodes.status429 ?? 0)) + acc.vals429.min = curr; + if ((curr.stats.statusCodes.status500 ?? 0) > (acc.vals500.max.stats.statusCodes.status500 ?? 0)) + acc.vals500.max = curr; + if ((curr.stats.statusCodes.status500 ?? 0) < (acc.vals500.min.stats.statusCodes.status500 ?? 0)) + acc.vals500.min = curr; + return acc; + }, + { + vals200: { max: stats[0], min: stats[0] }, + vals301: { max: stats[0], min: stats[0] }, + vals302: { max: stats[0], min: stats[0] }, + vals401: { max: stats[0], min: stats[0] }, + vals403: { max: stats[0], min: stats[0] }, + vals404: { max: stats[0], min: stats[0] }, + vals429: { max: stats[0], min: stats[0] }, + vals500: { max: stats[0], min: stats[0] }, + }, + ); +}; +const findMinMaxLogs = (stats: SpiderJobStats[]) => { + if (stats.length === 0) + return { + info: { max: null, min: null }, + debug: { max: null, min: null }, + error: { max: null, min: null }, + warning: { max: null, min: null }, + critical: { max: null, min: null }, + }; + + return stats.reduce( + (acc, curr) => { + if ((curr.stats.logs.infoLogs ?? 0) > (acc.info.max.stats.logs.infoLogs ?? 0)) acc.info.max = curr; + if ((curr.stats.logs.infoLogs ?? 0) < (acc.info.min.stats.logs.infoLogs ?? 0)) acc.info.min = curr; + if ((curr.stats.logs.debugLogs ?? 0) > (acc.debug.max.stats.logs.debugLogs ?? 0)) acc.debug.max = curr; + if ((curr.stats.logs.debugLogs ?? 0) < (acc.debug.min.stats.logs.debugLogs ?? 0)) acc.debug.min = curr; + if ((curr.stats.logs.errorLogs ?? 0) > (acc.error.max.stats.logs.errorLogs ?? 0)) acc.error.max = curr; + if ((curr.stats.logs.errorLogs ?? 0) < (acc.error.min.stats.logs.errorLogs ?? 0)) acc.error.min = curr; + if ((curr.stats.logs.warningLogs ?? 0) > (acc.warning.max.stats.logs.warningLogs ?? 0)) + acc.warning.max = curr; + if ((curr.stats.logs.warningLogs ?? 0) < (acc.warning.min.stats.logs.warningLogs ?? 0)) + acc.warning.min = curr; + if ((curr.stats.logs.criticalLogs ?? 0) > (acc.critical.max.stats.logs.criticalLogs ?? 0)) + acc.critical.max = curr; + if ((curr.stats.logs.criticalLogs ?? 0) < (acc.critical.min.stats.logs.criticalLogs ?? 0)) + acc.critical.min = curr; + return acc; + }, + { + info: { max: stats[0], min: stats[0] }, + debug: { max: stats[0], min: stats[0] }, + error: { max: stats[0], min: stats[0] }, + warning: { max: stats[0], min: stats[0] }, + critical: { max: stats[0], min: stats[0] }, + }, + ); +}; + +interface ChartsModalSectionProps { + pid: string; + stats: SpiderJobStats | SpiderJobStats[]; + pages?: boolean; + items?: boolean; + runtime?: boolean; + successRate?: boolean; + statusCodes?: boolean; + coverage?: boolean; + logs?: boolean; +} +export class ChartsModalSection extends Component { + labelsGenerator = (statOption: StatType) => { + if (statOption === StatType.PAGES) return ["scraped", "missed"]; + else if (statOption === StatType.ITEMS) return ["items"]; + else if (statOption === StatType.RUNTIME) return ["runtime"]; + else if (statOption === StatType.SUCCESS_RATE) return ["job success rate"]; + else if (statOption === StatType.STATUS_CODE) return ["200", "301", "302", "401", "403", "404", "429", "500"]; + else if (statOption === StatType.COVERAGE) return ["coverage"]; + else if (statOption === StatType.LOGS) return ["info", "debug", "error", "warning", "critical"]; + return []; + }; + + appendixCharts = (statOption: StatType): JSX.Element => { + const { stats, pid } = this.props; + if (!Array.isArray(stats)) return <>; + let items: { label: React.ReactNode; key: string; children: React.ReactNode }[] | null = null; + if (statOption === StatType.PAGES) { + const { scraped, missed } = findMinMaxPages(stats); + items = [ + { + label: "Scraped", + key: "scraped", + children: ( + + ), + }, + { + label: "Missed", + key: "missed", + children: ( + + ), + }, + ]; + } + if (statOption === StatType.STATUS_CODE) { + const { vals200, vals301, vals302, vals401, vals403, vals404, vals429, vals500 } = + findMinMaxStatusCodes(stats); + items = [ + { + label: "200", + key: "200", + children: ( + + ), + }, + { + label: "301", + key: "301", + children: ( + + ), + }, + { + label: "302", + key: "302", + children: ( + + ), + }, + { + label: "401", + key: "401", + children: ( + + ), + }, + { + label: "403", + key: "403", + children: ( + + ), + }, + { + label: "404", + key: "404", + children: ( + + ), + }, + { + label: "429", + key: "429", + children: ( + + ), + }, + { + label: "500", + key: "500", + children: ( + + ), + }, + ]; + } + if (statOption === StatType.LOGS) { + const { info, debug, error, warning, critical } = findMinMaxLogs(stats); + items = [ + { + label: "INFO", + key: "info", + children: ( + + ), + }, + { + label: "DEBUG", + key: "debug", + children: ( + + ), + }, + { + label: "ERROR", + key: "error", + children: ( + + ), + }, + { + label: "WARNING", + key: "warning", + children: ( + + ), + }, + { + label: "CRITICAL", + key: "critical", + children: ( + + ), + }, + ]; + } + + if (items) { + return ( +
+ +
+ ); + } + return <>; + }; + + charts = (statOption: StatType): JSX.Element => { + const { stats } = this.props; + const labels: string[] = this.labelsGenerator(statOption); + + const datasets: ChartDataset<"bar", number[]>[] = datasetsGenerator(statOption, stats); + + const data: ChartData<"bar", number[], string> = { + labels: labels, + datasets: datasets, + }; + + return ( + <> +
+ +
+ {this.appendixCharts(statOption)} + + ); + }; + + render() { + const { pages, items, runtime, successRate, statusCodes, coverage, logs } = this.props; + const tabsItems = []; + if (pages) + tabsItems.push({ + label: "Pages", + key: StatType.PAGES, + children: this.charts(StatType.PAGES), + }); + if (items) + tabsItems.push({ + label: "Items", + key: StatType.ITEMS, + children: this.charts(StatType.ITEMS), + }); + if (runtime) + tabsItems.push({ + label: "Runtime", + key: StatType.RUNTIME, + children: this.charts(StatType.RUNTIME), + }); + if (successRate) + tabsItems.push({ + label: "Job success rate", + key: StatType.SUCCESS_RATE, + children: this.charts(StatType.SUCCESS_RATE), + }); + if (statusCodes) + tabsItems.push({ + label: "Status code", + key: StatType.STATUS_CODE, + children: this.charts(StatType.STATUS_CODE), + }); + if (coverage) + tabsItems.push({ + label: "Coverage", + key: StatType.COVERAGE, + children: this.charts(StatType.COVERAGE), + }); + if (logs) + tabsItems.push({ + label: "Logs", + key: StatType.LOGS, + children: this.charts(StatType.LOGS), + }); + + return ; + } +} diff --git a/estela-web/src/components/Stats/ChartsSection.tsx b/estela-web/src/components/Stats/ChartsSection.tsx index 417fabef..a8907147 100644 --- a/estela-web/src/components/Stats/ChartsSection.tsx +++ b/estela-web/src/components/Stats/ChartsSection.tsx @@ -12,14 +12,14 @@ import { } from "chart.js"; import { Bar } from "react-chartjs-2"; import { StatType, Spin as Spinner } from "../../shared"; -import { GlobalStats, SpidersJobsStats } from "../../services"; +import { ProjectStats, SpidersStats } from "../../services"; import { Empty, Tabs } from "antd"; import moment from "moment"; import "./ChartsSection.scss"; ChartJS.register(CategoryScale, LinearScale, BarElement, Title, Tooltip, Legend); -const getJobsDataset = (statsData: GlobalStats[]) => { +const getJobsDataset = (statsData: ProjectStats[]) => { return [ { label: "Completed", @@ -54,7 +54,7 @@ const getJobsDataset = (statsData: GlobalStats[]) => { ]; }; -const getPagesDataset = (statsData: GlobalStats[]) => { +const getPagesDataset = (statsData: ProjectStats[]) => { return [ { label: "Scraped", @@ -69,7 +69,7 @@ const getPagesDataset = (statsData: GlobalStats[]) => { ]; }; -const getItemsDataset = (statsData: GlobalStats[]) => { +const getItemsDataset = (statsData: ProjectStats[]) => { const datasets = [ { label: "Scraped", @@ -80,7 +80,7 @@ const getItemsDataset = (statsData: GlobalStats[]) => { return datasets; }; -const getRuntimeDataset = (statsData: GlobalStats[]) => { +const getRuntimeDataset = (statsData: ProjectStats[]) => { return [ { label: "Runtime (seconds)", @@ -90,17 +90,17 @@ const getRuntimeDataset = (statsData: GlobalStats[]) => { ]; }; -const getCoverageDataset = (statsData: GlobalStats[]) => { +const getCoverageDataset = (statsData: ProjectStats[]) => { return [ { label: "Item coverage (percentage)", - data: statsData.map((statsData) => statsData.stats.coverage.totalItemsCoverage ?? 0), + data: statsData.map((statsData) => statsData.stats.coverage?.totalItemsCoverage ?? 0), backgroundColor: "#32C3A4", }, ]; }; -const getSuccessRateDataset = (statsData: GlobalStats[]) => { +const getSuccessRateDataset = (statsData: ProjectStats[]) => { return [ { label: "Success rate (percentage)", @@ -110,7 +110,7 @@ const getSuccessRateDataset = (statsData: GlobalStats[]) => { ]; }; -const getStatusCodeDataset = (statsData: GlobalStats[]) => { +const getStatusCodeDataset = (statsData: ProjectStats[]) => { return [ { label: "200", @@ -155,7 +155,7 @@ const getStatusCodeDataset = (statsData: GlobalStats[]) => { ]; }; -const getLogsDataset = (statData: GlobalStats[]) => { +const getLogsDataset = (statData: ProjectStats[]) => { return [ { label: "INFO", @@ -187,11 +187,11 @@ const getLogsDataset = (statData: GlobalStats[]) => { interface ChartsSectionProps { loadedStats: boolean; - stats: GlobalStats[] | SpidersJobsStats[]; + stats: ProjectStats[] | SpidersStats[]; } export class ChartsSection extends Component { - datasetsGenerators: { [key in StatType]: (statsData: GlobalStats[]) => ChartDataset<"bar", number[]>[] } = { + datasetsGenerators: { [key in StatType]: (statsData: ProjectStats[]) => ChartDataset<"bar", number[]>[] } = { [StatType.JOBS]: getJobsDataset, [StatType.PAGES]: getPagesDataset, [StatType.ITEMS]: getItemsDataset, @@ -221,7 +221,7 @@ export class ChartsSection extends Component { {stats.length === 0 ? ( ) : ( -
+
| undefined; + onRefreshEventHandler: () => Promise; onChangeDateRangeHandler: ((values: RangeValue, formatString: [string, string]) => void) | undefined; } @@ -65,7 +65,7 @@ export class HeaderSection extends Component} className="flex float-left items-center py-3 px-4 rounded-3xl text-sm font-medium stroke-estela border-none bg-estela-blue-low hover:bg-estela-blue-low text-estela hover:stroke-estela hover:text-estela focus:bg-estela-blue-low focus:stroke-estela focus:text-estela" - onClick={onRefreshEventHandler} + onClick={() => onRefreshEventHandler()} > Refresh diff --git a/estela-web/src/components/Stats/MinMaxStatCard.tsx b/estela-web/src/components/Stats/MinMaxStatCard.tsx new file mode 100644 index 00000000..1d2f4a75 --- /dev/null +++ b/estela-web/src/components/Stats/MinMaxStatCard.tsx @@ -0,0 +1,49 @@ +import React, { Component } from "react"; +import { Link } from "react-router-dom"; +import ArrowRight from "../../assets/icons/arrowRight.svg"; + +interface MinMaxStatCardProps { + maxJobURL: string; + minJobURL: string; + maxHeadText: string; + minHeadText: string; + maxJobText: string; + minJobText: string; + maxValText: string; + minValText: string; +} + +export class MinMaxStatCard extends Component { + render() { + const { maxJobURL, minJobURL, maxHeadText, minHeadText, maxJobText, minJobText, maxValText, minValText } = + this.props; + return ( + <> +

{maxHeadText}

+ +
+

{maxJobText}

+

{maxValText}

+
+ + +

{minHeadText}

+ +
+

{minJobText}

+

{minValText}

+
+ + + + ); + } +} diff --git a/estela-web/src/components/Stats/ProjectHealth.tsx b/estela-web/src/components/Stats/ProjectHealth.tsx index 21463e06..0288fdf8 100644 --- a/estela-web/src/components/Stats/ProjectHealth.tsx +++ b/estela-web/src/components/Stats/ProjectHealth.tsx @@ -5,7 +5,7 @@ import { Spin } from "../../shared"; import { BytesMetric, formatSecondsToHHMMSS } from "../../utils"; import { Chart as ChartJS, CategoryScale, Title, Tooltip, Legend, ArcElement } from "chart.js"; import Help from "../../assets/icons/help.svg"; -import { GlobalStats } from "../../services"; +import { ProjectStats } from "../../services"; ChartJS.register(CategoryScale, ArcElement, Title, Tooltip, Legend); const { Text } = Typography; @@ -15,7 +15,7 @@ const influencePages = 0.3; interface ProjectHealthProps { loadedStats: boolean; - stats: GlobalStats[]; + stats: ProjectStats[]; formattedNetwork: BytesMetric; processingTime: number; formattedStorage: BytesMetric; diff --git a/estela-web/src/components/Stats/StatsDateModalContent.scss b/estela-web/src/components/Stats/StatsDateModalContent.scss new file mode 100644 index 00000000..e69de29b diff --git a/estela-web/src/components/Stats/StatsDateModalContent.tsx b/estela-web/src/components/Stats/StatsDateModalContent.tsx new file mode 100644 index 00000000..35cd736b --- /dev/null +++ b/estela-web/src/components/Stats/StatsDateModalContent.tsx @@ -0,0 +1,420 @@ +import React, { Component } from "react"; +import { Spin as Spinner } from "../../shared"; +import { ApiApi, JobsPagination, SpidersPagination, SpiderJobStats, Spider } from "../../services"; +import { Button, Row, Tabs } from "antd"; +import ArrowLeft from "../../assets/icons/arrowLeft.svg"; +import ArrowRight from "../../assets/icons/arrowRight.svg"; +import DoubleLeft from "../../assets/icons/doubleLeft.svg"; +import DoubleRight from "../../assets/icons/doubleRight.svg"; +import "./StatsDateModalContent.scss"; +import moment from "moment"; +import { Link } from "react-router-dom"; +import { ChartsModalSection } from "./ChartsModalSection"; +import { formatBytes, parseDurationToSeconds } from "../../utils"; + +interface StatsDateModalContentState { + activeSpider: Spider; + spiders: SpidersPagination; + loadedSpiders: boolean; + currSpidersPage: number; + activeJob: SpiderJobStats; + jobs: JobsPagination; + loadedJobs: boolean; + currJobsPage: number; + overviewTabSelected: boolean; +} + +interface StatsDateModalContentProps { + pid: string; + apiService: ApiApi; + startDate: string; + endDate: string; + nextDate: () => void; + prevDate: () => void; +} + +export class StatsDateModalContent extends Component { + abortController = new AbortController(); + jobPageSize = 10; + spiderPageSize = 10; + + state: StatsDateModalContentState = { + activeSpider: {} as Spider, + spiders: {} as SpidersPagination, + loadedSpiders: false, + currSpidersPage: 1, + activeJob: {} as SpiderJobStats, + jobs: {} as JobsPagination, + loadedJobs: false, + currJobsPage: 1, + overviewTabSelected: true, + }; + + async componentDidMount(): Promise { + const activeSpider = await this.retrieveSpiders(1); + if (activeSpider) this.retrieveJobsSpider(activeSpider.sid ?? 0, 1); + } + + async componentDidUpdate(prevProps: Readonly) { + const { startDate, endDate } = this.props; + if (prevProps.startDate !== startDate && prevProps.endDate !== endDate) { + const activeSpider = await this.retrieveSpiders(1); + if (activeSpider) this.retrieveJobsSpider(activeSpider.sid ?? 0, 1); + } + } + + componentWillUnmount(): void { + this.abortController.abort(); + } + + retrieveSpiders = async (page?: number) => { + this.setState({ loadedSpiders: false, overviewTabSelected: true }); + try { + const { currSpidersPage } = this.state; + const { pid, startDate, endDate, apiService } = this.props; + const spiders: SpidersPagination = await apiService.apiStatsSpiders({ + pid: pid, + startDate: startDate, + endDate: endDate, + pageSize: this.spiderPageSize, + page: page || currSpidersPage, + }); + if (spiders.results.length === 0 && !this.abortController.signal.aborted) { + this.setState({ loadedSpiders: true, spiders: spiders }); + return null; + } + if (!this.abortController.signal.aborted) { + this.setState({ + loadedSpiders: true, + spiders: spiders, + activeSpider: spiders.results[0], + currSpidersPage: page || currSpidersPage, + }); + return spiders.results[0]; + } + } catch (error) { + console.error(error); + } + return null; + }; + + retrieveJobsSpider = async (spider: number, page?: number) => { + this.setState({ loadedJobs: false, overviewTabSelected: true }); + try { + const { pid, startDate, endDate, apiService } = this.props; + const { activeSpider, currJobsPage } = this.state; + + if (!activeSpider.sid) throw new Error("No active spider found"); + const jobs = apiService.apiStatsJobs({ + pid: pid, + spider: spider, + startDate: startDate, + endDate: endDate, + pageSize: this.jobPageSize, + page: page || currJobsPage, + }); + jobs.then((jobs) => { + if (jobs.results.length === 0 && !this.abortController.signal.aborted) { + this.setState({ loadedJobs: true, jobs: jobs }); + return; + } + if (!this.abortController.signal.aborted) { + this.setState({ loadedJobs: true, jobs: jobs, currJobsPage: page || currJobsPage }); + return; + } + }).catch((error) => { + if (error.name === "AbortError") { + return; + } + console.error(error); + }); + } catch (error) { + console.error(error); + } + }; + + generateTabsItems = () => { + const { pid } = this.props; + const { jobs } = this.state; + const items = [ + { + label:

Overview

, + key: "overview", + children: , + }, + ]; + const jobsItems = jobs.results.map((job) => { + return { + label:

Job {job.jid}

, + key: `${job.jid}`, + children: , + }; + }); + return items.concat(jobsItems); + }; + + rightSidedStatsSection(): JSX.Element { + const { pid } = this.props; + const { activeSpider, overviewTabSelected, activeJob, jobs } = this.state; + + const spiderBandwidth = formatBytes( + jobs.results.reduce((acc, curr) => acc + (curr.totalResponseBytes ?? 0), 0), + ); + const spiderProcessingTime = Math.round( + jobs.results.reduce((acc, curr) => acc + parseDurationToSeconds(curr.stats.runtime?.toString()), 0), + ); + + return ( + <> + {overviewTabSelected && activeSpider.sid ? ( + + See all spider {activeSpider.name} information + + + ) : ( + + See all job {activeJob.jid} information + + + )} + + {!overviewTabSelected && activeJob.jid && ( + <> +

Job execution stats

+
+
+
+

+ {isNaN(Math.round(parseDurationToSeconds(activeJob.stats.runtime?.toString()))) + ? "none" + : Math.round(parseDurationToSeconds(activeJob.stats.runtime?.toString()))} +

+

Sec

+
+
+

Runtime

+

+ How much your spider delayed (seconds). +

+
+
+
+
+

+ {activeJob.itemCount || "none"} +

+

Items

+
+
+

Scraped items

+

+ Number of items retrieved by the job. +

+
+
+
+ + )} + {!overviewTabSelected && activeJob.jid && ( + <> +

Job usage stats

+
+
+
+

+ {formatBytes(activeJob.totalResponseBytes ?? 0).quantity} +

+

+ {formatBytes(activeJob.totalResponseBytes ?? 0).type} +

+
+
+

Bandwidth

+

+ Size of the network your job used +

+
+
+
+ + )} + + {overviewTabSelected && ( + <> +

Spider usage stats

+
+
+
+

+ {spiderBandwidth.quantity} +

+

{spiderBandwidth.type}

+
+
+

Bandwidth

+

+ Size of the network your spider used (for the current page) +

+
+
+
+
+

+ {spiderProcessingTime} +

+

Sec

+
+
+

Processing time

+

+ Processing time taken by all the jobs of this spider (for the current page) +

+
+
+
+ + )} + + ); + } + + render() { + const { nextDate, prevDate, startDate } = this.props; + const { activeSpider, loadedSpiders, spiders, loadedJobs, jobs, currSpidersPage } = this.state; + return ( +
+
+ {loadedSpiders && activeSpider && ( + <> + +
prevDate()} + > + +
+
+

{moment.utc(startDate).local().format("dddd")}

+

+ {moment.utc(startDate).local().format("DD MMMM, YYYY")} +

+
+
nextDate()} + > + +
+
+ +

Spiders

+ + {spiders.previous && ( + + )} + {spiders.results.map((spider, index) => { + const style = + spider.sid === activeSpider.sid + ? "rounded-t-lg border-0 bg-estela-white-full text-estela-blue-full hover:bg-estela-white-full hover:text-estela-blue-full focus:bg-estela-white-full focus:text-estela-blue-full" + : "rounded-t-lg border-0 bg-estela-blue-medium text-estela-white-full hover:bg-estela-white-full hover:text-estela-blue-full focus:bg-estela-white-full focus:text-estela-blue-full"; + return ( + + ); + })} + {spiders.next && ( + + )} +
+ + )} +
+
+ {loadedSpiders && activeSpider && loadedJobs ? ( +
+
+ { + this.setState({ overviewTabSelected: key === "overview" }); + if (key !== "overview") { + const jobResult = jobs.results.find((job) => `${job.jid}` === key); + if (jobResult) this.setState({ activeJob: jobResult }); + } + }} + items={this.generateTabsItems()} + className="w-full" + /> + {jobs.previous && ( + + )} + {jobs.next && ( + + )} +
+
{this.rightSidedStatsSection()}
+
+ ) : ( + + )} +
+
+ ); + } +} diff --git a/estela-web/src/components/Stats/StatsTableSection.scss b/estela-web/src/components/Stats/StatsTableSection.scss new file mode 100644 index 00000000..6100bbed --- /dev/null +++ b/estela-web/src/components/Stats/StatsTableSection.scss @@ -0,0 +1,3 @@ +.stats-date-modal .ant-modal-body { + padding: 0% !important; +} diff --git a/estela-web/src/components/Stats/StatsTableSection.tsx b/estela-web/src/components/Stats/StatsTableSection.tsx index e31da7de..e56394be 100644 --- a/estela-web/src/components/Stats/StatsTableSection.tsx +++ b/estela-web/src/components/Stats/StatsTableSection.tsx @@ -1,81 +1,69 @@ import React, { Component } from "react"; -import { Row, Table } from "antd"; +import { Modal, Row, Table } from "antd"; import moment from "moment"; import type { ColumnsType } from "antd/es/table"; import { formatSecondsToHHMMSS } from "../../utils"; -import { - ApiApi, - ApiStatsJobsStatsRequest, - GetJobsStats, - GlobalStats, - JobsMetadata, - SpidersJobsStats, -} from "../../services"; -import { Link } from "react-router-dom"; -import { Spin } from "../../shared"; +import { ApiApi, ProjectStats, SpidersStats } from "../../services"; +import Cross from "../../assets/icons/cross.svg"; +import Expand from "../../assets/icons/expand.svg"; +import "./StatsTableSection.scss"; +import { StatsDateModalContent } from "./StatsDateModalContent"; interface StatsTableDataType { key: string; - statsDate: GlobalStats; + statsDate: ProjectStats; } -interface StatsTraceabilityDataType { - key: string; - statsDate: GetJobsStats; - jobStatus: string; -} - -interface DataListSectionProps { +interface StatsTableSectionProps { loadedStats: boolean; - stats: GlobalStats[] | SpidersJobsStats[]; + stats: ProjectStats[] | SpidersStats[]; pid: string; apiService: ApiApi; } -interface DataListSectionState { - loadedDatesStats: boolean[]; - jobsDateStats: GetJobsStats[][]; - focusedStatIndex: number; +interface StatsTableSectionState { + focusStatsDateIndex: number; + openDateModal: boolean; + startDateModal: string; + endDateModal: string; } -export class StatsTableSection extends Component { - state: DataListSectionState = { - loadedDatesStats: [], - jobsDateStats: [], - focusedStatIndex: 0, - }; - - componentDidUpdate() { - const { stats } = this.props; - const { loadedDatesStats, jobsDateStats } = this.state; - if (loadedDatesStats.length === 0 && jobsDateStats.length === 0 && stats.length !== 0) { - const newLoadedDatesStats = Array(stats.length).fill(false); - const newJobsDateStats = Array(stats.length); - this.setState({ loadedDatesStats: [...newLoadedDatesStats], jobsDateStats: [...newJobsDateStats] }); - } - } - - retrieveDateJobsStats = async (index: number, jobsMetadata: JobsMetadata[]): Promise => { - const { pid, apiService } = this.props; - - const params: ApiStatsJobsStatsRequest = { - pid: pid, - data: jobsMetadata, - }; - await apiService.apiStatsJobsStats(params).then((response: GetJobsStats[]) => { - const { loadedDatesStats, jobsDateStats } = this.state; - const newLoadedDatesStats = [...loadedDatesStats]; - newLoadedDatesStats[index] = true; - const newJobsDateStats = [...jobsDateStats]; - newJobsDateStats[index] = response; - this.setState({ - jobsDateStats: [...newJobsDateStats], - loadedDatesStats: [...newLoadedDatesStats], - }); - }); +export class StatsTableSection extends Component { + state: StatsTableSectionState = { + focusStatsDateIndex: 0, + openDateModal: false, + startDateModal: "", + endDateModal: "", }; colsStatsTable: ColumnsType = [ + { + title: "", + dataIndex: "details", + key: "details", + align: "center", + render: (_, { statsDate }, index) => { + return ( +
{ + const [startDate, endDate] = [ + moment(statsDate.date).startOf("day").utc().toISOString(), + moment(statsDate.date).endOf("day").utc().toISOString(), + ]; + this.setState({ + openDateModal: true, + focusStatsDateIndex: index, + startDateModal: startDate, + endDateModal: endDate, + }); + }} + > + +
+ ); + }, + }, { title:

DAY

, dataIndex: "day", @@ -184,130 +172,56 @@ export class StatsTableSection extends Component = [ { - title:

JOB

, - dataIndex: "job_id", - key: "job_id", + title:

ERROR

, + dataIndex: "errorLogs", + key: "errorLogs", align: "center", render: (_, { statsDate }) => { - const { pid } = this.props; - const jobId = statsDate.jid; - const spiderId = statsDate.spider; - if (!jobId || !spiderId) { - return

no-data

; - } - return ( - - Job-{jobId} - - ); + const errorLogs = statsDate.stats.logs.errorLogs ?? 0; + return

{errorLogs}

; }, - }, - { - title:

SPIDER

, - dataIndex: "spider_id", - key: "spider_id", - align: "center", - render: (_, { statsDate }) => { - const { pid } = this.props; - const spiderId = statsDate.spider; - if (!spiderId) { - return

no-data

; - } - return ( - - Spider-{spiderId} - - ); - }, - }, - { - title:

STATUS

, - dataIndex: "status", - key: "status", - filters: [ - { - text: "COMPLETED", - value: "COMPLETED", - }, - { - text: "ERROR", - value: "ERROR", - }, - { - text: "RUNNING", - value: "RUNNING", - }, - { - text: "WAITING", - value: "WAITING", - }, - { - text: "STOPPED", - value: "STOPPED", - }, - { - text: "IN_QUEUE", - value: "IN_QUEUE", - }, - ], - render: (_, { jobStatus }) => { - return

{jobStatus}

; + sorter: (statA, statB) => { + const errorLogsA = statA.statsDate.stats.logs.errorLogs ?? 0; + const errorLogsB = statB.statsDate.stats.logs.errorLogs ?? 0; + return errorLogsA - errorLogsB; }, - onFilter: (status, record) => String(status) === record.jobStatus, }, { - title:

ITEMS

, - dataIndex: "items", - key: "items", + title:

WARNING

, + dataIndex: "warningLogs", + key: "warningLogs", + align: "center", render: (_, { statsDate }) => { - const itemsCount = statsDate.stats?.itemsCount || "no-data"; - return

{itemsCount}

; + const warningLogs = statsDate.stats.logs.warningLogs ?? 0; + return

{warningLogs}

; }, - }, - { - title:

RUN TIME

, - dataIndex: "runtime", - key: "runtime", - render: (_, { statsDate }) => { - let runtime = "no-data"; - if (statsDate.stats) runtime = formatSecondsToHHMMSS(statsDate.stats.runtime ?? 0); - return

{runtime}

; + sorter: (statA, statB) => { + const warningLogsA = statA.statsDate.stats.logs.warningLogs ?? 0; + const warningLogsB = statB.statsDate.stats.logs.warningLogs ?? 0; + return warningLogsA - warningLogsB; }, }, { - title:

SCRAPED PAGES

, - dataIndex: "scraped_pages", - key: "scraped_pages", + title:

CRITICAL

, + dataIndex: "criticalLogs", + key: "criticalLogs", + align: "center", render: (_, { statsDate }) => { - const scrapedPages = statsDate.stats?.pages.scrapedPages ?? "no-data"; - return

{scrapedPages}

; + const criticalLogs = statsDate.stats.logs.criticalLogs ?? 0; + return

{criticalLogs}

; }, - }, - { - title:

MISSED PAGES

, - dataIndex: "missed_pages", - key: "missed_pages", - render: (_, { statsDate }) => { - const missedPages = statsDate.stats?.pages.missedPages ?? "no-data"; - return

{missedPages}

; + sorter: (statA, statB) => { + const criticalLogsA = statA.statsDate.stats.logs.criticalLogs ?? 0; + const criticalLogsB = statB.statsDate.stats.logs.criticalLogs ?? 0; + return criticalLogsA - criticalLogsB; }, }, ]; render() { - const { loadedStats, stats } = this.props; + const { openDateModal, focusStatsDateIndex, startDateModal, endDateModal } = this.state; + const { loadedStats, stats, apiService, pid } = this.props; if (!loadedStats) { return ; @@ -325,41 +239,87 @@ export class StatsTableSection extends Component { - const { loadedDatesStats, jobsDateStats } = this.state; - if (!loadedDatesStats[index]) { - this.retrieveDateJobsStats(index, record.statsDate.jobsMetadata); - return ; - } - const dataDateTraceStats: StatsTraceabilityDataType[] = jobsDateStats[index].map( - (jobStat: GetJobsStats, jobIndex: number) => { - const status = - record.statsDate.jobsMetadata.find((jobMeta) => jobMeta.jid === jobStat.jid) - ?.jobStatus ?? "UNKNOWN"; - return { - key: `${jobIndex}`, - statsDate: jobStat, - jobStatus: status, - }; + <> + { + return { + onClick: () => { + const [startDate, endDate] = [ + moment(record.statsDate.date).startOf("day").utc().toISOString(), + moment(record.statsDate.date).endOf("day").utc().toISOString(), + ]; + this.setState({ + openDateModal: true, + focusStatsDateIndex: rowIndex ?? 0, + startDateModal: startDate, + endDateModal: endDate, + }); }, - ); - return ( -
- ); - }, - }} - pagination={false} - /> + }; + }} + /> + + this.setState({ openDateModal: false })} + title={null} + className="stats-date-modal" + width="90%" + closeIcon={} + footer={null} + destroyOnClose + > + { + if (focusStatsDateIndex < stats.length - 1) { + const [startDate, endDate] = [ + moment(stats[focusStatsDateIndex + 1].date) + .startOf("day") + .utc() + .toISOString(), + moment(stats[focusStatsDateIndex + 1].date) + .endOf("day") + .utc() + .toISOString(), + ]; + this.setState({ + focusStatsDateIndex: focusStatsDateIndex + 1, + startDateModal: startDate, + endDateModal: endDate, + }); + } + }} + prevDate={() => { + if (focusStatsDateIndex > 0) { + const [startDate, endDate] = [ + moment(stats[focusStatsDateIndex - 1].date) + .startOf("day") + .utc() + .toISOString(), + moment(stats[focusStatsDateIndex - 1].date) + .endOf("day") + .utc() + .toISOString(), + ]; + this.setState({ + focusStatsDateIndex: focusStatsDateIndex - 1, + startDateModal: startDate, + endDateModal: endDate, + }); + } + }} + /> + + ); } } diff --git a/estela-web/src/pages/ProjectDashboardPage/index.tsx b/estela-web/src/pages/ProjectDashboardPage/index.tsx index 06d40548..18b4c892 100644 --- a/estela-web/src/pages/ProjectDashboardPage/index.tsx +++ b/estela-web/src/pages/ProjectDashboardPage/index.tsx @@ -4,8 +4,8 @@ import { RouteComponentProps } from "react-router-dom"; import "./styles.scss"; import { ApiService, AuthService } from "../../services"; import Copy from "../../assets/icons/copy.svg"; -import { ApiProjectsReadRequest, Project, ProjectUsage, GlobalStats, ApiStatsListRequest } from "../../services/api"; -import { BytesMetric, formatBytes } from "../../utils"; +import { ApiProjectsReadRequest, Project, ProjectUsage, ProjectStats } from "../../services/api"; +import { BytesMetric, formatBytes, parseDurationToSeconds } from "../../utils"; import { resourceNotAllowedNotification, Spin } from "../../shared"; import { UserContext, UserContextProps } from "../../context"; import moment from "moment"; @@ -24,7 +24,7 @@ interface ProjectDashboardPageState { count: number; current: number; loadedStats: boolean; - globalStats: GlobalStats[]; + projectStats: ProjectStats[]; statsStartDate: moment.Moment; statsEndDate: moment.Moment; } @@ -50,7 +50,7 @@ export class ProjectDashboardPage extends Component => { + this.setState({ loadedStats: false }); const { statsStartDate, statsEndDate } = this.state; - const params: ApiStatsListRequest = { - pid: this.projectId, - startDate: !startDate ? statsStartDate.toISOString() : startDate, - endDate: !endDate ? statsEndDate.toISOString() : endDate, - }; - if (startDate && endDate) { this.setState({ statsStartDate: moment.utc(startDate), statsEndDate: moment.utc(endDate), }); } - - await this.apiService.apiStatsList(params).then( - (response: GlobalStats[]) => { - this.setState({ - globalStats: response, - loadedStats: true, - }); - }, - (error: Error) => { - notification.error({ - message: "No data", - description: error.message, - }); - this.setState({ loadedStats: true }); - }, - ); + await this.apiService + .apiStatsList({ + pid: this.projectId, + startDate: !startDate ? statsStartDate.toISOString() : startDate, + endDate: !endDate ? statsEndDate.toISOString() : endDate, + offset: new Date().getTimezoneOffset(), + }) + .then( + (response: ProjectStats[]) => { + response.forEach((stat) => { + if (stat.stats.runtime) + stat.stats.runtime = parseDurationToSeconds(stat.stats.runtime.toString()); + }); + this.setState({ + projectStats: [...response], + loadedStats: true, + }); + }, + (error: Error) => { + notification.error({ + message: "No data", + description: error.message, + }); + this.setState({ loadedStats: true }); + }, + ); }; calcAverageSuccessRate = (): number => { - const { globalStats } = this.state; - if (globalStats.length === 0) return 0; - const successRates = globalStats.map((stat) => (stat.stats.successRate ?? 0) / 100); + const { projectStats } = this.state; + if (projectStats.length === 0) return 0; + const successRates = projectStats.map((stat) => (stat.stats.successRate ?? 0) / 100); const sumSuccessRates = successRates.reduce((acc, cur) => acc + cur, 0); return sumSuccessRates / successRates.length; }; @@ -151,16 +156,15 @@ export class ProjectDashboardPage extends Component | undefined = () => { - this.setState({ loadedStats: false }); - this.getProjectStatsAndUpdateDates(); + onRefreshEventHandler = async () => { + await this.getProjectStatsAndUpdateDates(); }; render(): JSX.Element { const { name, loaded, - globalStats, + projectStats, loadedStats, formattedNetwork, formattedStorage, @@ -199,16 +203,16 @@ export class ProjectDashboardPage extends Component - + diff --git a/estela-web/src/services/api/generated-api/.openapi-generator/FILES b/estela-web/src/services/api/generated-api/.openapi-generator/FILES index a9b6c5e3..e6949712 100644 --- a/estela-web/src/services/api/generated-api/.openapi-generator/FILES +++ b/estela-web/src/services/api/generated-api/.openapi-generator/FILES @@ -11,8 +11,6 @@ models/Deploy.ts models/DeployCreate.ts models/DeployUpdate.ts models/FieldCoverageStats.ts -models/GetJobsStats.ts -models/GlobalStats.ts models/InlineResponse200.ts models/InlineResponse2001.ts models/InlineResponse2002.ts @@ -23,7 +21,7 @@ models/InlineResponse2006.ts models/InlineResponse2007.ts models/InlineResponse2008.ts models/InlineResponse401.ts -models/JobsMetadata.ts +models/JobsPagination.ts models/JobsStats.ts models/LogsStats.ts models/Notification.ts @@ -35,6 +33,7 @@ models/ProjectActivity.ts models/ProjectCronJob.ts models/ProjectDetail.ts models/ProjectJob.ts +models/ProjectStats.ts models/ProjectUpdate.ts models/ProjectUsage.ts models/ResetPasswordConfirm.ts @@ -47,10 +46,12 @@ models/SpiderJob.ts models/SpiderJobArg.ts models/SpiderJobCreate.ts models/SpiderJobEnvVar.ts +models/SpiderJobStats.ts models/SpiderJobTag.ts models/SpiderJobUpdate.ts models/SpiderUpdate.ts -models/SpidersJobsStats.ts +models/SpidersPagination.ts +models/SpidersStats.ts models/Stats.ts models/StatusCodesStats.ts models/Token.ts diff --git a/estela-web/src/services/api/generated-api/apis/ApiApi.ts b/estela-web/src/services/api/generated-api/apis/ApiApi.ts index a0df6bfb..faef8ff8 100644 --- a/estela-web/src/services/api/generated-api/apis/ApiApi.ts +++ b/estela-web/src/services/api/generated-api/apis/ApiApi.ts @@ -33,12 +33,6 @@ import { DeployUpdate, DeployUpdateFromJSON, DeployUpdateToJSON, - GetJobsStats, - GetJobsStatsFromJSON, - GetJobsStatsToJSON, - GlobalStats, - GlobalStatsFromJSON, - GlobalStatsToJSON, InlineResponse200, InlineResponse200FromJSON, InlineResponse200ToJSON, @@ -69,6 +63,9 @@ import { InlineResponse401, InlineResponse401FromJSON, InlineResponse401ToJSON, + JobsPagination, + JobsPaginationFromJSON, + JobsPaginationToJSON, Notification, NotificationFromJSON, NotificationToJSON, @@ -87,6 +84,9 @@ import { ProjectJob, ProjectJobFromJSON, ProjectJobToJSON, + ProjectStats, + ProjectStatsFromJSON, + ProjectStatsToJSON, ProjectUpdate, ProjectUpdateFromJSON, ProjectUpdateToJSON, @@ -123,9 +123,12 @@ import { SpiderUpdate, SpiderUpdateFromJSON, SpiderUpdateToJSON, - SpidersJobsStats, - SpidersJobsStatsFromJSON, - SpidersJobsStatsToJSON, + SpidersPagination, + SpidersPaginationFromJSON, + SpidersPaginationToJSON, + SpidersStats, + SpidersStatsFromJSON, + SpidersStatsToJSON, Token, TokenFromJSON, TokenToJSON, @@ -435,9 +438,13 @@ export interface ApiProjectsUsageRequest { endDate?: string; } -export interface ApiStatsJobsStatsRequest { +export interface ApiStatsJobsRequest { pid: string; - data: Array; + startDate: string; + endDate: string; + spider: number; + page?: number; + pageSize?: number; } export interface ApiStatsListRequest { @@ -446,12 +453,17 @@ export interface ApiStatsListRequest { endDate: string; page?: number; pageSize?: number; + offset?: number; } -export interface ApiStatsSpiderJobsStatsRequest { +export interface ApiStatsSpiderJobsRequest { pid: string; sid: string; - data: Array; + startDate: string; + endDate: string; + spider: number; + page?: number; + pageSize?: number; } export interface ApiStatsSpiderListRequest { @@ -461,6 +473,15 @@ export interface ApiStatsSpiderListRequest { endDate: string; page?: number; pageSize?: number; + offset?: number; +} + +export interface ApiStatsSpidersRequest { + pid: string; + startDate: string; + endDate: string; + page?: number; + pageSize?: number; } /** @@ -2588,49 +2609,74 @@ export class ApiApi extends runtime.BaseAPI { } /** - * Retrieve stats of all jobs metadata. + * Retrieve all the jobs of a spider executed in a range of dates. */ - async apiStatsJobsStatsRaw(requestParameters: ApiStatsJobsStatsRequest): Promise>> { + async apiStatsJobsRaw(requestParameters: ApiStatsJobsRequest): Promise> { if (requestParameters.pid === null || requestParameters.pid === undefined) { - throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsJobsStats.'); + throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsJobs.'); } - if (requestParameters.data === null || requestParameters.data === undefined) { - throw new runtime.RequiredError('data','Required parameter requestParameters.data was null or undefined when calling apiStatsJobsStats.'); + if (requestParameters.startDate === null || requestParameters.startDate === undefined) { + throw new runtime.RequiredError('startDate','Required parameter requestParameters.startDate was null or undefined when calling apiStatsJobs.'); + } + + if (requestParameters.endDate === null || requestParameters.endDate === undefined) { + throw new runtime.RequiredError('endDate','Required parameter requestParameters.endDate was null or undefined when calling apiStatsJobs.'); + } + + if (requestParameters.spider === null || requestParameters.spider === undefined) { + throw new runtime.RequiredError('spider','Required parameter requestParameters.spider was null or undefined when calling apiStatsJobs.'); } const queryParameters: any = {}; - const headerParameters: runtime.HTTPHeaders = {}; + if (requestParameters.page !== undefined) { + queryParameters['page'] = requestParameters.page; + } - headerParameters['Content-Type'] = 'application/json'; + if (requestParameters.pageSize !== undefined) { + queryParameters['page_size'] = requestParameters.pageSize; + } + + if (requestParameters.startDate !== undefined) { + queryParameters['start_date'] = requestParameters.startDate; + } + + if (requestParameters.endDate !== undefined) { + queryParameters['end_date'] = requestParameters.endDate; + } + + if (requestParameters.spider !== undefined) { + queryParameters['spider'] = requestParameters.spider; + } + + const headerParameters: runtime.HTTPHeaders = {}; if (this.configuration && (this.configuration.username !== undefined || this.configuration.password !== undefined)) { headerParameters["Authorization"] = "Basic " + btoa(this.configuration.username + ":" + this.configuration.password); } const response = await this.request({ - path: `/api/stats/{pid}/jobs_stats`.replace(`{${"pid"}}`, encodeURIComponent(String(requestParameters.pid))), - method: 'POST', + path: `/api/stats/{pid}/jobs`.replace(`{${"pid"}}`, encodeURIComponent(String(requestParameters.pid))), + method: 'GET', headers: headerParameters, query: queryParameters, - body: requestParameters.data.map(GetJobsStatsToJSON), }); - return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(GetJobsStatsFromJSON)); + return new runtime.JSONApiResponse(response, (jsonValue) => JobsPaginationFromJSON(jsonValue)); } /** - * Retrieve stats of all jobs metadata. + * Retrieve all the jobs of a spider executed in a range of dates. */ - async apiStatsJobsStats(requestParameters: ApiStatsJobsStatsRequest): Promise> { - const response = await this.apiStatsJobsStatsRaw(requestParameters); + async apiStatsJobs(requestParameters: ApiStatsJobsRequest): Promise { + const response = await this.apiStatsJobsRaw(requestParameters); return await response.value(); } /** * Retrieve stats of all jobs in a range of time, dates must have the format YYYY-mm-dd. */ - async apiStatsListRaw(requestParameters: ApiStatsListRequest): Promise>> { + async apiStatsListRaw(requestParameters: ApiStatsListRequest): Promise>> { if (requestParameters.pid === null || requestParameters.pid === undefined) { throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsList.'); } @@ -2661,6 +2707,10 @@ export class ApiApi extends runtime.BaseAPI { queryParameters['end_date'] = requestParameters.endDate; } + if (requestParameters.offset !== undefined) { + queryParameters['offset'] = requestParameters.offset; + } + const headerParameters: runtime.HTTPHeaders = {}; if (this.configuration && (this.configuration.username !== undefined || this.configuration.password !== undefined)) { @@ -2673,65 +2723,90 @@ export class ApiApi extends runtime.BaseAPI { query: queryParameters, }); - return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(GlobalStatsFromJSON)); + return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(ProjectStatsFromJSON)); } /** * Retrieve stats of all jobs in a range of time, dates must have the format YYYY-mm-dd. */ - async apiStatsList(requestParameters: ApiStatsListRequest): Promise> { + async apiStatsList(requestParameters: ApiStatsListRequest): Promise> { const response = await this.apiStatsListRaw(requestParameters); return await response.value(); } /** - * Retrieve stats of all jobs metadata. + * Retrieve all the jobs of a spider executed in a range of dates. */ - async apiStatsSpiderJobsStatsRaw(requestParameters: ApiStatsSpiderJobsStatsRequest): Promise>> { + async apiStatsSpiderJobsRaw(requestParameters: ApiStatsSpiderJobsRequest): Promise> { if (requestParameters.pid === null || requestParameters.pid === undefined) { - throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsSpiderJobsStats.'); + throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsSpiderJobs.'); } if (requestParameters.sid === null || requestParameters.sid === undefined) { - throw new runtime.RequiredError('sid','Required parameter requestParameters.sid was null or undefined when calling apiStatsSpiderJobsStats.'); + throw new runtime.RequiredError('sid','Required parameter requestParameters.sid was null or undefined when calling apiStatsSpiderJobs.'); } - if (requestParameters.data === null || requestParameters.data === undefined) { - throw new runtime.RequiredError('data','Required parameter requestParameters.data was null or undefined when calling apiStatsSpiderJobsStats.'); + if (requestParameters.startDate === null || requestParameters.startDate === undefined) { + throw new runtime.RequiredError('startDate','Required parameter requestParameters.startDate was null or undefined when calling apiStatsSpiderJobs.'); + } + + if (requestParameters.endDate === null || requestParameters.endDate === undefined) { + throw new runtime.RequiredError('endDate','Required parameter requestParameters.endDate was null or undefined when calling apiStatsSpiderJobs.'); + } + + if (requestParameters.spider === null || requestParameters.spider === undefined) { + throw new runtime.RequiredError('spider','Required parameter requestParameters.spider was null or undefined when calling apiStatsSpiderJobs.'); } const queryParameters: any = {}; - const headerParameters: runtime.HTTPHeaders = {}; + if (requestParameters.page !== undefined) { + queryParameters['page'] = requestParameters.page; + } - headerParameters['Content-Type'] = 'application/json'; + if (requestParameters.pageSize !== undefined) { + queryParameters['page_size'] = requestParameters.pageSize; + } + + if (requestParameters.startDate !== undefined) { + queryParameters['start_date'] = requestParameters.startDate; + } + + if (requestParameters.endDate !== undefined) { + queryParameters['end_date'] = requestParameters.endDate; + } + + if (requestParameters.spider !== undefined) { + queryParameters['spider'] = requestParameters.spider; + } + + const headerParameters: runtime.HTTPHeaders = {}; if (this.configuration && (this.configuration.username !== undefined || this.configuration.password !== undefined)) { headerParameters["Authorization"] = "Basic " + btoa(this.configuration.username + ":" + this.configuration.password); } const response = await this.request({ - path: `/api/stats/{pid}/spider/{sid}/jobs_stats`.replace(`{${"pid"}}`, encodeURIComponent(String(requestParameters.pid))).replace(`{${"sid"}}`, encodeURIComponent(String(requestParameters.sid))), - method: 'POST', + path: `/api/stats/{pid}/spider/{sid}/jobs`.replace(`{${"pid"}}`, encodeURIComponent(String(requestParameters.pid))).replace(`{${"sid"}}`, encodeURIComponent(String(requestParameters.sid))), + method: 'GET', headers: headerParameters, query: queryParameters, - body: requestParameters.data.map(GetJobsStatsToJSON), }); - return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(GetJobsStatsFromJSON)); + return new runtime.JSONApiResponse(response, (jsonValue) => JobsPaginationFromJSON(jsonValue)); } /** - * Retrieve stats of all jobs metadata. + * Retrieve all the jobs of a spider executed in a range of dates. */ - async apiStatsSpiderJobsStats(requestParameters: ApiStatsSpiderJobsStatsRequest): Promise> { - const response = await this.apiStatsSpiderJobsStatsRaw(requestParameters); + async apiStatsSpiderJobs(requestParameters: ApiStatsSpiderJobsRequest): Promise { + const response = await this.apiStatsSpiderJobsRaw(requestParameters); return await response.value(); } /** * Retrieve stats of all jobs of a spider in a range of time, dates must have the format YYYY-mm-dd. */ - async apiStatsSpiderListRaw(requestParameters: ApiStatsSpiderListRequest): Promise>> { + async apiStatsSpiderListRaw(requestParameters: ApiStatsSpiderListRequest): Promise>> { if (requestParameters.pid === null || requestParameters.pid === undefined) { throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsSpiderList.'); } @@ -2766,6 +2841,10 @@ export class ApiApi extends runtime.BaseAPI { queryParameters['end_date'] = requestParameters.endDate; } + if (requestParameters.offset !== undefined) { + queryParameters['offset'] = requestParameters.offset; + } + const headerParameters: runtime.HTTPHeaders = {}; if (this.configuration && (this.configuration.username !== undefined || this.configuration.password !== undefined)) { @@ -2778,15 +2857,72 @@ export class ApiApi extends runtime.BaseAPI { query: queryParameters, }); - return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(SpidersJobsStatsFromJSON)); + return new runtime.JSONApiResponse(response, (jsonValue) => jsonValue.map(SpidersStatsFromJSON)); } /** * Retrieve stats of all jobs of a spider in a range of time, dates must have the format YYYY-mm-dd. */ - async apiStatsSpiderList(requestParameters: ApiStatsSpiderListRequest): Promise> { + async apiStatsSpiderList(requestParameters: ApiStatsSpiderListRequest): Promise> { const response = await this.apiStatsSpiderListRaw(requestParameters); return await response.value(); } + /** + * Retrieve all the spiders executed in a range of dates. + */ + async apiStatsSpidersRaw(requestParameters: ApiStatsSpidersRequest): Promise> { + if (requestParameters.pid === null || requestParameters.pid === undefined) { + throw new runtime.RequiredError('pid','Required parameter requestParameters.pid was null or undefined when calling apiStatsSpiders.'); + } + + if (requestParameters.startDate === null || requestParameters.startDate === undefined) { + throw new runtime.RequiredError('startDate','Required parameter requestParameters.startDate was null or undefined when calling apiStatsSpiders.'); + } + + if (requestParameters.endDate === null || requestParameters.endDate === undefined) { + throw new runtime.RequiredError('endDate','Required parameter requestParameters.endDate was null or undefined when calling apiStatsSpiders.'); + } + + const queryParameters: any = {}; + + if (requestParameters.page !== undefined) { + queryParameters['page'] = requestParameters.page; + } + + if (requestParameters.pageSize !== undefined) { + queryParameters['page_size'] = requestParameters.pageSize; + } + + if (requestParameters.startDate !== undefined) { + queryParameters['start_date'] = requestParameters.startDate; + } + + if (requestParameters.endDate !== undefined) { + queryParameters['end_date'] = requestParameters.endDate; + } + + const headerParameters: runtime.HTTPHeaders = {}; + + if (this.configuration && (this.configuration.username !== undefined || this.configuration.password !== undefined)) { + headerParameters["Authorization"] = "Basic " + btoa(this.configuration.username + ":" + this.configuration.password); + } + const response = await this.request({ + path: `/api/stats/{pid}/spiders`.replace(`{${"pid"}}`, encodeURIComponent(String(requestParameters.pid))), + method: 'GET', + headers: headerParameters, + query: queryParameters, + }); + + return new runtime.JSONApiResponse(response, (jsonValue) => SpidersPaginationFromJSON(jsonValue)); + } + + /** + * Retrieve all the spiders executed in a range of dates. + */ + async apiStatsSpiders(requestParameters: ApiStatsSpidersRequest): Promise { + const response = await this.apiStatsSpidersRaw(requestParameters); + return await response.value(); + } + } diff --git a/estela-web/src/services/api/generated-api/models/GetJobsStats.ts b/estela-web/src/services/api/generated-api/models/GetJobsStats.ts deleted file mode 100644 index 23245b41..00000000 --- a/estela-web/src/services/api/generated-api/models/GetJobsStats.ts +++ /dev/null @@ -1,80 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * estela API v1.0 Documentation - * estela API Swagger Specification - * - * The version of the OpenAPI document: v1 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - -import { exists, mapValues } from '../runtime'; -import { - Stats, - StatsFromJSON, - StatsFromJSONTyped, - StatsToJSON, -} from './'; - -/** - * - * @export - * @interface GetJobsStats - */ -export interface GetJobsStats { - /** - * - * @type {number} - * @memberof GetJobsStats - */ - jid?: number; - /** - * - * @type {number} - * @memberof GetJobsStats - */ - spider?: number; - /** - * - * @type {Stats} - * @memberof GetJobsStats - */ - stats?: Stats; -} - -export function GetJobsStatsFromJSON(json: any): GetJobsStats { - return GetJobsStatsFromJSONTyped(json, false); -} - -export function GetJobsStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): GetJobsStats { - if ((json === undefined) || (json === null)) { - return json; - } - return { - - 'jid': !exists(json, 'jid') ? undefined : json['jid'], - 'spider': !exists(json, 'spider') ? undefined : json['spider'], - 'stats': !exists(json, 'stats') ? undefined : StatsFromJSON(json['stats']), - }; -} - -export function GetJobsStatsToJSON(value?: GetJobsStats | null): any { - if (value === undefined) { - return undefined; - } - if (value === null) { - return null; - } - return { - - 'jid': value.jid, - 'spider': value.spider, - 'stats': StatsToJSON(value.stats), - }; -} - - diff --git a/estela-web/src/services/api/generated-api/models/JobsMetadata.ts b/estela-web/src/services/api/generated-api/models/JobsMetadata.ts deleted file mode 100644 index 04af4de0..00000000 --- a/estela-web/src/services/api/generated-api/models/JobsMetadata.ts +++ /dev/null @@ -1,71 +0,0 @@ -/* tslint:disable */ -/* eslint-disable */ -/** - * estela API v1.0 Documentation - * estela API Swagger Specification - * - * The version of the OpenAPI document: v1 - * - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - -import { exists, mapValues } from '../runtime'; -/** - * - * @export - * @interface JobsMetadata - */ -export interface JobsMetadata { - /** - * A unique integer value identifying this job. - * @type {number} - * @memberof JobsMetadata - */ - readonly jid?: number; - /** - * Spider sid. - * @type {number} - * @memberof JobsMetadata - */ - spider: number; - /** - * - * @type {string} - * @memberof JobsMetadata - */ - readonly jobStatus?: string; -} - -export function JobsMetadataFromJSON(json: any): JobsMetadata { - return JobsMetadataFromJSONTyped(json, false); -} - -export function JobsMetadataFromJSONTyped(json: any, ignoreDiscriminator: boolean): JobsMetadata { - if ((json === undefined) || (json === null)) { - return json; - } - return { - - 'jid': !exists(json, 'jid') ? undefined : json['jid'], - 'spider': json['spider'], - 'jobStatus': !exists(json, 'job_status') ? undefined : json['job_status'], - }; -} - -export function JobsMetadataToJSON(value?: JobsMetadata | null): any { - if (value === undefined) { - return undefined; - } - if (value === null) { - return null; - } - return { - - 'spider': value.spider, - }; -} - - diff --git a/estela-web/src/services/api/generated-api/models/JobsPagination.ts b/estela-web/src/services/api/generated-api/models/JobsPagination.ts new file mode 100644 index 00000000..18e50581 --- /dev/null +++ b/estela-web/src/services/api/generated-api/models/JobsPagination.ts @@ -0,0 +1,86 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * estela API v1.0 Documentation + * estela API Swagger Specification + * + * The version of the OpenAPI document: v1 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + +import { exists, mapValues } from '../runtime'; +import { + SpiderJobStats, + SpiderJobStatsFromJSON, + SpiderJobStatsFromJSONTyped, + SpiderJobStatsToJSON, +} from './'; + +/** + * + * @export + * @interface JobsPagination + */ +export interface JobsPagination { + /** + * + * @type {number} + * @memberof JobsPagination + */ + count: number; + /** + * + * @type {string} + * @memberof JobsPagination + */ + readonly next?: string | null; + /** + * + * @type {string} + * @memberof JobsPagination + */ + readonly previous?: string | null; + /** + * + * @type {Array} + * @memberof JobsPagination + */ + results: Array; +} + +export function JobsPaginationFromJSON(json: any): JobsPagination { + return JobsPaginationFromJSONTyped(json, false); +} + +export function JobsPaginationFromJSONTyped(json: any, ignoreDiscriminator: boolean): JobsPagination { + if ((json === undefined) || (json === null)) { + return json; + } + return { + + 'count': json['count'], + 'next': !exists(json, 'next') ? undefined : json['next'], + 'previous': !exists(json, 'previous') ? undefined : json['previous'], + 'results': ((json['results'] as Array).map(SpiderJobStatsFromJSON)), + }; +} + +export function JobsPaginationToJSON(value?: JobsPagination | null): any { + if (value === undefined) { + return undefined; + } + if (value === null) { + return null; + } + return { + + 'count': value.count, + 'results': ((value.results as Array).map(SpiderJobStatsToJSON)), + }; +} + + diff --git a/estela-web/src/services/api/generated-api/models/GlobalStats.ts b/estela-web/src/services/api/generated-api/models/ProjectStats.ts similarity index 57% rename from estela-web/src/services/api/generated-api/models/GlobalStats.ts rename to estela-web/src/services/api/generated-api/models/ProjectStats.ts index 713489db..11c7faf9 100644 --- a/estela-web/src/services/api/generated-api/models/GlobalStats.ts +++ b/estela-web/src/services/api/generated-api/models/ProjectStats.ts @@ -14,10 +14,6 @@ import { exists, mapValues } from '../runtime'; import { - JobsMetadata, - JobsMetadataFromJSON, - JobsMetadataFromJSONTyped, - JobsMetadataToJSON, Stats, StatsFromJSON, StatsFromJSONTyped, @@ -27,34 +23,28 @@ import { /** * * @export - * @interface GlobalStats + * @interface ProjectStats */ -export interface GlobalStats { +export interface ProjectStats { /** * * @type {Date} - * @memberof GlobalStats + * @memberof ProjectStats */ date: Date; /** * * @type {Stats} - * @memberof GlobalStats + * @memberof ProjectStats */ stats: Stats; - /** - * - * @type {Array} - * @memberof GlobalStats - */ - jobsMetadata: Array; } -export function GlobalStatsFromJSON(json: any): GlobalStats { - return GlobalStatsFromJSONTyped(json, false); +export function ProjectStatsFromJSON(json: any): ProjectStats { + return ProjectStatsFromJSONTyped(json, false); } -export function GlobalStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): GlobalStats { +export function ProjectStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): ProjectStats { if ((json === undefined) || (json === null)) { return json; } @@ -62,11 +52,10 @@ export function GlobalStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean 'date': (new Date(json['date'])), 'stats': StatsFromJSON(json['stats']), - 'jobsMetadata': ((json['jobs_metadata'] as Array).map(JobsMetadataFromJSON)), }; } -export function GlobalStatsToJSON(value?: GlobalStats | null): any { +export function ProjectStatsToJSON(value?: ProjectStats | null): any { if (value === undefined) { return undefined; } @@ -77,7 +66,6 @@ export function GlobalStatsToJSON(value?: GlobalStats | null): any { 'date': (value.date.toISOString().substr(0,10)), 'stats': StatsToJSON(value.stats), - 'jobs_metadata': ((value.jobsMetadata as Array).map(JobsMetadataToJSON)), }; } diff --git a/estela-web/src/services/api/generated-api/models/SpiderJobStats.ts b/estela-web/src/services/api/generated-api/models/SpiderJobStats.ts new file mode 100644 index 00000000..1bbe288e --- /dev/null +++ b/estela-web/src/services/api/generated-api/models/SpiderJobStats.ts @@ -0,0 +1,201 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * estela API v1.0 Documentation + * estela API Swagger Specification + * + * The version of the OpenAPI document: v1 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + +import { exists, mapValues } from '../runtime'; +import { + SpiderJobArg, + SpiderJobArgFromJSON, + SpiderJobArgFromJSONTyped, + SpiderJobArgToJSON, + SpiderJobEnvVar, + SpiderJobEnvVarFromJSON, + SpiderJobEnvVarFromJSONTyped, + SpiderJobEnvVarToJSON, + SpiderJobTag, + SpiderJobTagFromJSON, + SpiderJobTagFromJSONTyped, + SpiderJobTagToJSON, + Stats, + StatsFromJSON, + StatsFromJSONTyped, + StatsToJSON, +} from './'; + +/** + * + * @export + * @interface SpiderJobStats + */ +export interface SpiderJobStats { + /** + * A unique integer value identifying this job. + * @type {number} + * @memberof SpiderJobStats + */ + readonly jid?: number; + /** + * + * @type {string} + * @memberof SpiderJobStats + */ + readonly spider?: string; + /** + * Job creation date. + * @type {Date} + * @memberof SpiderJobStats + */ + readonly created?: Date; + /** + * Unique job name. + * @type {string} + * @memberof SpiderJobStats + */ + readonly name?: string; + /** + * The elapsed seconds the spider job was running. + * @type {number} + * @memberof SpiderJobStats + */ + lifespan?: number; + /** + * The total bytes received in responses. + * @type {number} + * @memberof SpiderJobStats + */ + totalResponseBytes?: number; + /** + * The number of items extracted in the job. + * @type {number} + * @memberof SpiderJobStats + */ + itemCount?: number; + /** + * The number of requests made by the spider job. + * @type {number} + * @memberof SpiderJobStats + */ + requestCount?: number; + /** + * Job arguments. + * @type {Array} + * @memberof SpiderJobStats + */ + args?: Array; + /** + * Job env variables. + * @type {Array} + * @memberof SpiderJobStats + */ + envVars?: Array; + /** + * Job tags. + * @type {Array} + * @memberof SpiderJobStats + */ + tags?: Array; + /** + * Current job status. + * @type {string} + * @memberof SpiderJobStats + */ + readonly jobStatus?: string; + /** + * Related cron job. + * @type {number} + * @memberof SpiderJobStats + */ + cronjob?: number | null; + /** + * Days before data is deleted. + * @type {number} + * @memberof SpiderJobStats + */ + dataExpiryDays?: number | null; + /** + * Data status. + * @type {string} + * @memberof SpiderJobStats + */ + dataStatus?: SpiderJobStatsDataStatusEnum; + /** + * + * @type {Stats} + * @memberof SpiderJobStats + */ + stats: Stats; +} + +/** +* @export +* @enum {string} +*/ +export enum SpiderJobStatsDataStatusEnum { + Persistent = 'PERSISTENT', + Pending = 'PENDING', + Deleted = 'DELETED' +} + +export function SpiderJobStatsFromJSON(json: any): SpiderJobStats { + return SpiderJobStatsFromJSONTyped(json, false); +} + +export function SpiderJobStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): SpiderJobStats { + if ((json === undefined) || (json === null)) { + return json; + } + return { + + 'jid': !exists(json, 'jid') ? undefined : json['jid'], + 'spider': !exists(json, 'spider') ? undefined : json['spider'], + 'created': !exists(json, 'created') ? undefined : (new Date(json['created'])), + 'name': !exists(json, 'name') ? undefined : json['name'], + 'lifespan': !exists(json, 'lifespan') ? undefined : json['lifespan'], + 'totalResponseBytes': !exists(json, 'total_response_bytes') ? undefined : json['total_response_bytes'], + 'itemCount': !exists(json, 'item_count') ? undefined : json['item_count'], + 'requestCount': !exists(json, 'request_count') ? undefined : json['request_count'], + 'args': !exists(json, 'args') ? undefined : ((json['args'] as Array).map(SpiderJobArgFromJSON)), + 'envVars': !exists(json, 'env_vars') ? undefined : ((json['env_vars'] as Array).map(SpiderJobEnvVarFromJSON)), + 'tags': !exists(json, 'tags') ? undefined : ((json['tags'] as Array).map(SpiderJobTagFromJSON)), + 'jobStatus': !exists(json, 'job_status') ? undefined : json['job_status'], + 'cronjob': !exists(json, 'cronjob') ? undefined : json['cronjob'], + 'dataExpiryDays': !exists(json, 'data_expiry_days') ? undefined : json['data_expiry_days'], + 'dataStatus': !exists(json, 'data_status') ? undefined : json['data_status'], + 'stats': StatsFromJSON(json['stats']), + }; +} + +export function SpiderJobStatsToJSON(value?: SpiderJobStats | null): any { + if (value === undefined) { + return undefined; + } + if (value === null) { + return null; + } + return { + + 'lifespan': value.lifespan, + 'total_response_bytes': value.totalResponseBytes, + 'item_count': value.itemCount, + 'request_count': value.requestCount, + 'args': value.args === undefined ? undefined : ((value.args as Array).map(SpiderJobArgToJSON)), + 'env_vars': value.envVars === undefined ? undefined : ((value.envVars as Array).map(SpiderJobEnvVarToJSON)), + 'tags': value.tags === undefined ? undefined : ((value.tags as Array).map(SpiderJobTagToJSON)), + 'cronjob': value.cronjob, + 'data_expiry_days': value.dataExpiryDays, + 'data_status': value.dataStatus, + 'stats': StatsToJSON(value.stats), + }; +} + + diff --git a/estela-web/src/services/api/generated-api/models/SpidersPagination.ts b/estela-web/src/services/api/generated-api/models/SpidersPagination.ts new file mode 100644 index 00000000..e75afa3c --- /dev/null +++ b/estela-web/src/services/api/generated-api/models/SpidersPagination.ts @@ -0,0 +1,86 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * estela API v1.0 Documentation + * estela API Swagger Specification + * + * The version of the OpenAPI document: v1 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + +import { exists, mapValues } from '../runtime'; +import { + Spider, + SpiderFromJSON, + SpiderFromJSONTyped, + SpiderToJSON, +} from './'; + +/** + * + * @export + * @interface SpidersPagination + */ +export interface SpidersPagination { + /** + * + * @type {number} + * @memberof SpidersPagination + */ + count: number; + /** + * + * @type {string} + * @memberof SpidersPagination + */ + readonly next?: string | null; + /** + * + * @type {string} + * @memberof SpidersPagination + */ + readonly previous?: string | null; + /** + * + * @type {Array} + * @memberof SpidersPagination + */ + results: Array; +} + +export function SpidersPaginationFromJSON(json: any): SpidersPagination { + return SpidersPaginationFromJSONTyped(json, false); +} + +export function SpidersPaginationFromJSONTyped(json: any, ignoreDiscriminator: boolean): SpidersPagination { + if ((json === undefined) || (json === null)) { + return json; + } + return { + + 'count': json['count'], + 'next': !exists(json, 'next') ? undefined : json['next'], + 'previous': !exists(json, 'previous') ? undefined : json['previous'], + 'results': ((json['results'] as Array).map(SpiderFromJSON)), + }; +} + +export function SpidersPaginationToJSON(value?: SpidersPagination | null): any { + if (value === undefined) { + return undefined; + } + if (value === null) { + return null; + } + return { + + 'count': value.count, + 'results': ((value.results as Array).map(SpiderToJSON)), + }; +} + + diff --git a/estela-web/src/services/api/generated-api/models/SpidersJobsStats.ts b/estela-web/src/services/api/generated-api/models/SpidersStats.ts similarity index 55% rename from estela-web/src/services/api/generated-api/models/SpidersJobsStats.ts rename to estela-web/src/services/api/generated-api/models/SpidersStats.ts index 638b08f0..db4864ab 100644 --- a/estela-web/src/services/api/generated-api/models/SpidersJobsStats.ts +++ b/estela-web/src/services/api/generated-api/models/SpidersStats.ts @@ -14,10 +14,6 @@ import { exists, mapValues } from '../runtime'; import { - JobsMetadata, - JobsMetadataFromJSON, - JobsMetadataFromJSONTyped, - JobsMetadataToJSON, Stats, StatsFromJSON, StatsFromJSONTyped, @@ -27,34 +23,28 @@ import { /** * * @export - * @interface SpidersJobsStats + * @interface SpidersStats */ -export interface SpidersJobsStats { +export interface SpidersStats { /** * * @type {Date} - * @memberof SpidersJobsStats + * @memberof SpidersStats */ date: Date; /** * * @type {Stats} - * @memberof SpidersJobsStats + * @memberof SpidersStats */ stats: Stats; - /** - * - * @type {Array} - * @memberof SpidersJobsStats - */ - jobsMetadata: Array; } -export function SpidersJobsStatsFromJSON(json: any): SpidersJobsStats { - return SpidersJobsStatsFromJSONTyped(json, false); +export function SpidersStatsFromJSON(json: any): SpidersStats { + return SpidersStatsFromJSONTyped(json, false); } -export function SpidersJobsStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): SpidersJobsStats { +export function SpidersStatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): SpidersStats { if ((json === undefined) || (json === null)) { return json; } @@ -62,11 +52,10 @@ export function SpidersJobsStatsFromJSONTyped(json: any, ignoreDiscriminator: bo 'date': (new Date(json['date'])), 'stats': StatsFromJSON(json['stats']), - 'jobsMetadata': ((json['jobs_metadata'] as Array).map(JobsMetadataFromJSON)), }; } -export function SpidersJobsStatsToJSON(value?: SpidersJobsStats | null): any { +export function SpidersStatsToJSON(value?: SpidersStats | null): any { if (value === undefined) { return undefined; } @@ -77,7 +66,6 @@ export function SpidersJobsStatsToJSON(value?: SpidersJobsStats | null): any { 'date': (value.date.toISOString().substr(0,10)), 'stats': StatsToJSON(value.stats), - 'jobs_metadata': ((value.jobsMetadata as Array).map(JobsMetadataToJSON)), }; } diff --git a/estela-web/src/services/api/generated-api/models/Stats.ts b/estela-web/src/services/api/generated-api/models/Stats.ts index 344d6325..5a6ca40a 100644 --- a/estela-web/src/services/api/generated-api/models/Stats.ts +++ b/estela-web/src/services/api/generated-api/models/Stats.ts @@ -89,7 +89,7 @@ export interface Stats { * @type {CoverageStats} * @memberof Stats */ - coverage: CoverageStats; + coverage?: CoverageStats; } export function StatsFromJSON(json: any): Stats { @@ -109,7 +109,7 @@ export function StatsFromJSONTyped(json: any, ignoreDiscriminator: boolean): Sta 'statusCodes': StatusCodesStatsFromJSON(json['status_codes']), 'successRate': !exists(json, 'success_rate') ? undefined : json['success_rate'], 'logs': LogsStatsFromJSON(json['logs']), - 'coverage': CoverageStatsFromJSON(json['coverage']), + 'coverage': !exists(json, 'coverage') ? undefined : CoverageStatsFromJSON(json['coverage']), }; } diff --git a/estela-web/src/services/api/generated-api/models/index.ts b/estela-web/src/services/api/generated-api/models/index.ts index cc5e13c9..2723746d 100644 --- a/estela-web/src/services/api/generated-api/models/index.ts +++ b/estela-web/src/services/api/generated-api/models/index.ts @@ -7,8 +7,6 @@ export * from './Deploy'; export * from './DeployCreate'; export * from './DeployUpdate'; export * from './FieldCoverageStats'; -export * from './GetJobsStats'; -export * from './GlobalStats'; export * from './InlineResponse200'; export * from './InlineResponse2001'; export * from './InlineResponse2002'; @@ -19,7 +17,7 @@ export * from './InlineResponse2006'; export * from './InlineResponse2007'; export * from './InlineResponse2008'; export * from './InlineResponse401'; -export * from './JobsMetadata'; +export * from './JobsPagination'; export * from './JobsStats'; export * from './LogsStats'; export * from './Notification'; @@ -31,6 +29,7 @@ export * from './ProjectActivity'; export * from './ProjectCronJob'; export * from './ProjectDetail'; export * from './ProjectJob'; +export * from './ProjectStats'; export * from './ProjectUpdate'; export * from './ProjectUsage'; export * from './ResetPasswordConfirm'; @@ -43,10 +42,12 @@ export * from './SpiderJob'; export * from './SpiderJobArg'; export * from './SpiderJobCreate'; export * from './SpiderJobEnvVar'; +export * from './SpiderJobStats'; export * from './SpiderJobTag'; export * from './SpiderJobUpdate'; export * from './SpiderUpdate'; -export * from './SpidersJobsStats'; +export * from './SpidersPagination'; +export * from './SpidersStats'; export * from './Stats'; export * from './StatusCodesStats'; export * from './Token'; diff --git a/estela-web/src/utils.ts b/estela-web/src/utils.ts index fd0c250f..58e7da36 100644 --- a/estela-web/src/utils.ts +++ b/estela-web/src/utils.ts @@ -37,6 +37,24 @@ export function formatSecondsToHHMMSS(seconds: number): string { return formattedTime; } +export function setValArr({ arr, val, index }: { arr: number[]; val: number; index: number }): number[] { + arr.fill(val, index, index + 1); + return arr; +} + +export function parseDurationToSeconds(durationString: string | undefined): number { + if (durationString) { + const [hours, minutes, seconds] = durationString.split(":").map(Number); + const totalSeconds = hours * 3600 + minutes * 60 + seconds; + return totalSeconds; + } + return 0; +} + +export function sumArr(arr: number[]): number { + return arr.reduce((acc, curr) => acc + curr, 0); +} + export function formatBytes(bytes: number): BytesMetric { if (!+bytes) { return { diff --git a/estela-web/tailwind.config.js b/estela-web/tailwind.config.js index c8aaf959..6d80159b 100644 --- a/estela-web/tailwind.config.js +++ b/estela-web/tailwind.config.js @@ -37,6 +37,7 @@ module.exports = { "estela-red-low": "#FFF5F2", "estela-background": "#FBFCFD", "estela-complementary-purple": "#A13764", + "bitmaker-primary": "#FF5733", }, }, }, diff --git a/estela-web/tsconfig.json b/estela-web/tsconfig.json index a273b0cf..293da700 100644 --- a/estela-web/tsconfig.json +++ b/estela-web/tsconfig.json @@ -1,6 +1,6 @@ { "compilerOptions": { - "target": "es5", + "target": "ES2015", "lib": [ "dom", "dom.iterable",