From 3e58ffb5045ea928d260e7c2a3bc4b3965820c0e Mon Sep 17 00:00:00 2001 From: "vrishab.srivatsa]" Date: Mon, 22 Apr 2024 20:10:33 +0530 Subject: [PATCH] feat:authentication analytics - separated from sdk events --- crates/analytics/src/auth_events.rs | 7 ++ .../analytics/src/auth_events/accumulator.rs | 96 +++++++++++++++ crates/analytics/src/auth_events/core.rs | 115 ++++++++++++++++++ crates/analytics/src/auth_events/metrics.rs | 114 +++++++++++++++++ .../metrics/authentication_attempt_count.rs | 101 +++++++++++++++ .../metrics/authentication_success_count.rs | 101 +++++++++++++++ .../metrics/challenge_attempt_count.rs | 90 ++++++++++++++ .../metrics/challenge_flow_count.rs | 95 +++++++++++++++ .../metrics/challenge_success_count.rs | 94 ++++++++++++++ .../metrics/frictionless_flow_count.rs | 97 +++++++++++++++ .../auth_events/metrics/three_ds_sdk_count.rs | 93 ++++++++++++++ crates/analytics/src/clickhouse.rs | 23 +++- crates/analytics/src/core.rs | 5 + crates/analytics/src/lib.rs | 34 ++++++ crates/analytics/src/query.rs | 11 ++ crates/analytics/src/sqlx.rs | 2 + crates/analytics/src/types.rs | 3 + crates/analytics/src/utils.rs | 5 + crates/api_models/src/analytics.rs | 13 ++ .../api_models/src/analytics/auth_events.rs | 111 +++++++++++++++++ crates/api_models/src/analytics/sdk_events.rs | 1 + crates/api_models/src/events.rs | 3 +- crates/router/src/analytics.rs | 47 ++++++- 23 files changed, 1252 insertions(+), 9 deletions(-) create mode 100644 crates/analytics/src/auth_events.rs create mode 100644 crates/analytics/src/auth_events/accumulator.rs create mode 100644 crates/analytics/src/auth_events/core.rs create mode 100644 crates/analytics/src/auth_events/metrics.rs create mode 100644 crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/authentication_success_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/challenge_flow_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/challenge_success_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs create mode 100644 crates/analytics/src/auth_events/metrics/three_ds_sdk_count.rs create mode 100644 crates/api_models/src/analytics/auth_events.rs diff --git a/crates/analytics/src/auth_events.rs b/crates/analytics/src/auth_events.rs new file mode 100644 index 000000000000..a9670c50076e --- /dev/null +++ b/crates/analytics/src/auth_events.rs @@ -0,0 +1,7 @@ +pub mod accumulator; +mod core; +pub mod metrics; +pub use accumulator::{AuthEventMetricAccumulator, AuthEventMetricsAccumulator}; +pub trait AuthEventAnalytics: metrics::AuthEventMetricAnalytics {} + +pub use self::core::get_metrics; diff --git a/crates/analytics/src/auth_events/accumulator.rs b/crates/analytics/src/auth_events/accumulator.rs new file mode 100644 index 000000000000..08a56e952094 --- /dev/null +++ b/crates/analytics/src/auth_events/accumulator.rs @@ -0,0 +1,96 @@ +use api_models::analytics::auth_events::AuthEventMetricsBucketValue; +use router_env::logger; + +use super::metrics::AuthEventMetricRow; + +#[derive(Debug, Default)] +pub struct AuthEventMetricsAccumulator { + pub three_ds_sdk_count: CountAccumulator, + pub authentication_attempt_count: CountAccumulator, + pub authentication_success_count: CountAccumulator, + pub challenge_flow_count: CountAccumulator, + pub challenge_attempt_count: CountAccumulator, + pub challenge_success_count: CountAccumulator, + pub frictionless_flow_count: CountAccumulator, +} + +#[derive(Debug, Default)] +#[repr(transparent)] +pub struct CountAccumulator { + pub count: Option, +} + +#[derive(Debug, Default)] +pub struct AverageAccumulator { + pub total: u32, + pub count: u32, +} + +pub trait AuthEventMetricAccumulator { + type MetricOutput; + + fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow); + + fn collect(self) -> Self::MetricOutput; +} + +impl AuthEventMetricAccumulator for CountAccumulator { + type MetricOutput = Option; + #[inline] + fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow) { + self.count = match (self.count, metrics.count) { + (None, None) => None, + (None, i @ Some(_)) | (i @ Some(_), None) => i, + (Some(a), Some(b)) => Some(a + b), + } + } + #[inline] + fn collect(self) -> Self::MetricOutput { + self.count.and_then(|i| u64::try_from(i).ok()) + } +} + +impl AuthEventMetricAccumulator for AverageAccumulator { + type MetricOutput = Option; + + fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow) { + let total = metrics + .total + .as_ref() + .and_then(bigdecimal::ToPrimitive::to_u32); + let count = metrics.count.and_then(|total| u32::try_from(total).ok()); + + match (total, count) { + (Some(total), Some(count)) => { + self.total += total; + self.count += count; + } + _ => { + logger::error!(message="Dropping metrics for average accumulator", metric=?metrics); + } + } + } + + fn collect(self) -> Self::MetricOutput { + if self.count == 0 { + None + } else { + Some(f64::from(self.total) / f64::from(self.count)) + } + } +} + +impl AuthEventMetricsAccumulator { + #[allow(dead_code)] + pub fn collect(self) -> AuthEventMetricsBucketValue { + AuthEventMetricsBucketValue { + three_ds_sdk_count: self.three_ds_sdk_count.collect(), + authentication_attempt_count: self.authentication_attempt_count.collect(), + authentication_success_count: self.authentication_success_count.collect(), + challenge_flow_count: self.challenge_flow_count.collect(), + challenge_attempt_count: self.challenge_attempt_count.collect(), + challenge_success_count: self.challenge_success_count.collect(), + frictionless_flow_count: self.frictionless_flow_count.collect(), + } + } +} diff --git a/crates/analytics/src/auth_events/core.rs b/crates/analytics/src/auth_events/core.rs new file mode 100644 index 000000000000..933948310574 --- /dev/null +++ b/crates/analytics/src/auth_events/core.rs @@ -0,0 +1,115 @@ +use std::collections::HashMap; + +use api_models::analytics::{ + auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier, MetricsBucketResponse}, + AnalyticsMetadata, GetAuthEventMetricRequest, MetricsResponse, +}; +use error_stack::ResultExt; +use router_env::{instrument, logger, tracing}; + +use super::AuthEventMetricsAccumulator; +use crate::{ + auth_events::AuthEventMetricAccumulator, + errors::{AnalyticsError, AnalyticsResult}, + AnalyticsProvider, +}; + +#[instrument(skip_all)] +pub async fn get_metrics( + pool: &AnalyticsProvider, + merchant_id: &String, + publishable_key: Option<&String>, + req: GetAuthEventMetricRequest, +) -> AnalyticsResult> { + let mut metrics_accumulator: HashMap< + AuthEventMetricsBucketIdentifier, + AuthEventMetricsAccumulator, + > = HashMap::new(); + + if let Some(publishable_key) = publishable_key { + let mut set = tokio::task::JoinSet::new(); + for metric_type in req.metrics.iter().cloned() { + let req = req.clone(); + let merchant_id_scoped = merchant_id.to_owned(); + let publishable_key_scoped = publishable_key.to_owned(); + let pool = pool.clone(); + set.spawn(async move { + let data = pool + .get_auth_event_metrics( + &metric_type, + &merchant_id_scoped, + &publishable_key_scoped, + &req.time_series.map(|t| t.granularity), + &req.time_range, + ) + .await + .change_context(AnalyticsError::UnknownError); + (metric_type, data) + }); + } + + while let Some((metric, data)) = set + .join_next() + .await + .transpose() + .change_context(AnalyticsError::UnknownError)? + { + logger::info!("Logging Result {:?}", data); + for (id, value) in data? { + let metrics_builder = metrics_accumulator.entry(id).or_default(); + match metric { + AuthEventMetrics::ThreeDsSdkCount => metrics_builder + .three_ds_sdk_count + .add_metrics_bucket(&value), + AuthEventMetrics::AuthenticationAttemptCount => metrics_builder + .authentication_attempt_count + .add_metrics_bucket(&value), + AuthEventMetrics::AuthenticationSuccessCount => metrics_builder + .authentication_success_count + .add_metrics_bucket(&value), + AuthEventMetrics::ChallengeFlowCount => metrics_builder + .challenge_flow_count + .add_metrics_bucket(&value), + AuthEventMetrics::ChallengeAttemptCount => metrics_builder + .challenge_attempt_count + .add_metrics_bucket(&value), + AuthEventMetrics::ChallengeSuccessCount => metrics_builder + .challenge_success_count + .add_metrics_bucket(&value), + AuthEventMetrics::FrictionlessFlowCount => metrics_builder + .frictionless_flow_count + .add_metrics_bucket(&value), + } + } + + logger::debug!( + "Analytics Accumulated Results: metric: {}, results: {:#?}", + metric, + metrics_accumulator + ); + } + + let query_data: Vec = metrics_accumulator + .into_iter() + .map(|(id, val)| MetricsBucketResponse { + values: val.collect(), + dimensions: id, + }) + .collect(); + + Ok(MetricsResponse { + query_data, + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) + } else { + logger::error!("Publishable key not present for merchant ID"); + Ok(MetricsResponse { + query_data: vec![], + meta_data: [AnalyticsMetadata { + current_time_range: req.time_range, + }], + }) + } +} diff --git a/crates/analytics/src/auth_events/metrics.rs b/crates/analytics/src/auth_events/metrics.rs new file mode 100644 index 000000000000..b3442022f5ec --- /dev/null +++ b/crates/analytics/src/auth_events/metrics.rs @@ -0,0 +1,114 @@ +use api_models::analytics::{ + auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use time::PrimitiveDateTime; + +use crate::{ + query::{Aggregate, GroupByClause, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult}, +}; + +mod authentication_attempt_count; +mod authentication_success_count; +mod challenge_attempt_count; +mod challenge_flow_count; +mod challenge_success_count; +mod frictionless_flow_count; +mod three_ds_sdk_count; + +use authentication_attempt_count::AuthenticationAttemptCount; +use authentication_success_count::AuthenticationSuccessCount; +use challenge_attempt_count::ChallengeAttemptCount; +use challenge_flow_count::ChallengeFlowCount; +use challenge_success_count::ChallengeSuccessCount; +use frictionless_flow_count::FrictionlessFlowCount; +use three_ds_sdk_count::ThreeDsSdkCount; + +#[derive(Debug, PartialEq, Eq, serde::Deserialize)] +pub struct AuthEventMetricRow { + pub total: Option, + pub count: Option, + pub time_bucket: Option, + pub payment_method: Option, + pub platform: Option, + pub browser_name: Option, + pub source: Option, + pub component: Option, + pub payment_experience: Option, +} + +pub trait AuthEventMetricAnalytics: LoadRow {} + +#[async_trait::async_trait] +pub trait AuthEventMetric +where + T: AnalyticsDataSource + AuthEventMetricAnalytics, +{ + async fn load_metrics( + &self, + merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult>; +} + +#[async_trait::async_trait] +impl AuthEventMetric for AuthEventMetrics +where + T: AnalyticsDataSource + AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + match self { + Self::ThreeDsSdkCount => { + ThreeDsSdkCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::AuthenticationAttemptCount => { + AuthenticationAttemptCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::AuthenticationSuccessCount => { + AuthenticationSuccessCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::ChallengeFlowCount => { + ChallengeFlowCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::ChallengeAttemptCount => { + ChallengeAttemptCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::ChallengeSuccessCount => { + ChallengeSuccessCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::FrictionlessFlowCount => { + FrictionlessFlowCount + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + } + } +} diff --git a/crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs b/crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs new file mode 100644 index 000000000000..dd65458b1e3f --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs @@ -0,0 +1,101 @@ +use api_models::analytics::{ + auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity, + TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct AuthenticationAttemptCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for AuthenticationAttemptCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::AuthenticationCallInit) + .switch()?; + + query_builder + .add_filter_clause("log_type", "INFO") + .switch()?; + + query_builder + .add_filter_clause("category", "API") + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/authentication_success_count.rs b/crates/analytics/src/auth_events/metrics/authentication_success_count.rs new file mode 100644 index 000000000000..7559abe8e2a1 --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/authentication_success_count.rs @@ -0,0 +1,101 @@ +use api_models::analytics::{ + auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity, + TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct AuthenticationSuccessCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for AuthenticationSuccessCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::AuthenticationCall) + .switch()?; + + query_builder + .add_filter_clause("log_type", "INFO") + .switch()?; + + query_builder + .add_filter_clause("category", "API") + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs b/crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs new file mode 100644 index 000000000000..19951168ae6c --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs @@ -0,0 +1,90 @@ +use api_models::analytics::{ + auth_events::{AuthEventFlows, AuthEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ChallengeAttemptCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for ChallengeAttemptCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + merchant_id: &str, + _publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::ConnectorEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + query_builder + .add_filter_clause("flow", AuthEventFlows::PostAuthentication) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/challenge_flow_count.rs b/crates/analytics/src/auth_events/metrics/challenge_flow_count.rs new file mode 100644 index 000000000000..c7e3a441dbdb --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/challenge_flow_count.rs @@ -0,0 +1,95 @@ +use api_models::analytics::{ + auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity, + TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ChallengeFlowCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for ChallengeFlowCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk) + .switch()?; + + query_builder.add_filter_clause("value", "C").switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/challenge_success_count.rs b/crates/analytics/src/auth_events/metrics/challenge_success_count.rs new file mode 100644 index 000000000000..2c5f8a111d14 --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/challenge_success_count.rs @@ -0,0 +1,94 @@ +use api_models::analytics::{ + auth_events::{AuthEventFlows, AuthEventMetricsBucketIdentifier}, + Granularity, TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ChallengeSuccessCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for ChallengeSuccessCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + merchant_id: &str, + _publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = + QueryBuilder::new(AnalyticsCollection::ConnectorEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", merchant_id) + .switch()?; + + query_builder + .add_filter_clause("flow", AuthEventFlows::PostAuthentication) + .switch()?; + + query_builder + .add_filter_clause("visitParamExtractRaw(response, 'transStatus')", "\"Y\"") + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs b/crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs new file mode 100644 index 000000000000..8bcaab0f467b --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs @@ -0,0 +1,97 @@ +use api_models::analytics::{ + auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity, + TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct FrictionlessFlowCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for FrictionlessFlowCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::DisplayThreeDsSdk) + .switch()?; + + query_builder + .add_negative_filter_clause("value", "C") + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/auth_events/metrics/three_ds_sdk_count.rs b/crates/analytics/src/auth_events/metrics/three_ds_sdk_count.rs new file mode 100644 index 000000000000..de6923a6130b --- /dev/null +++ b/crates/analytics/src/auth_events/metrics/three_ds_sdk_count.rs @@ -0,0 +1,93 @@ +use api_models::analytics::{ + auth_events::AuthEventMetricsBucketIdentifier, sdk_events::SdkEventNames, Granularity, + TimeRange, +}; +use common_utils::errors::ReportSwitchExt; +use error_stack::ResultExt; +use time::PrimitiveDateTime; + +use super::AuthEventMetricRow; +use crate::{ + query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window}, + types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult}, +}; + +#[derive(Default)] +pub(super) struct ThreeDsSdkCount; + +#[async_trait::async_trait] +impl super::AuthEventMetric for ThreeDsSdkCount +where + T: AnalyticsDataSource + super::AuthEventMetricAnalytics, + PrimitiveDateTime: ToSql, + AnalyticsCollection: ToSql, + Granularity: GroupByClause, + Aggregate<&'static str>: ToSql, + Window<&'static str>: ToSql, +{ + async fn load_metrics( + &self, + _merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + pool: &T, + ) -> MetricsResult> { + let mut query_builder: QueryBuilder = QueryBuilder::new(AnalyticsCollection::SdkEvents); + + query_builder + .add_select_column(Aggregate::Count { + field: None, + alias: Some("count"), + }) + .switch()?; + + if let Some(granularity) = granularity.as_ref() { + query_builder + .add_granularity_in_mins(granularity) + .switch()?; + } + + query_builder + .add_filter_clause("merchant_id", publishable_key) + .switch()?; + + query_builder + .add_bool_filter_clause("first_event", 1) + .switch()?; + + query_builder + .add_filter_clause("event_name", SdkEventNames::ThreeDsMethod) + .switch()?; + + time_range + .set_filter_clause(&mut query_builder) + .attach_printable("Error filtering time range") + .switch()?; + + if let Some(_granularity) = granularity.as_ref() { + query_builder + .add_group_by_clause("time_bucket") + .attach_printable("Error adding granularity") + .switch()?; + } + + query_builder + .execute_query::(pool) + .await + .change_context(MetricsError::QueryBuildingError)? + .change_context(MetricsError::QueryExecutionFailure)? + .into_iter() + .map(|i| { + Ok(( + AuthEventMetricsBucketIdentifier::new(i.time_bucket.clone()), + i, + )) + }) + .collect::, + crate::query::PostProcessingError, + >>() + .change_context(MetricsError::PostProcessingFailure) + } +} diff --git a/crates/analytics/src/clickhouse.rs b/crates/analytics/src/clickhouse.rs index c96d2d330be5..3d1451789aa0 100644 --- a/crates/analytics/src/clickhouse.rs +++ b/crates/analytics/src/clickhouse.rs @@ -7,6 +7,7 @@ use router_env::logger; use time::PrimitiveDateTime; use super::{ + auth_events::metrics::AuthEventMetricRow, health_check::HealthCheck, payments::{ distribution::PaymentDistributionRow, filters::FilterRow, metrics::PaymentMetricRow, @@ -132,10 +133,11 @@ impl AnalyticsDataSource for ClickhouseClient { | AnalyticsCollection::Dispute => { TableEngine::CollapsingMergeTree { sign: "sign_flag" } } - AnalyticsCollection::SdkEvents => TableEngine::BasicTree, - AnalyticsCollection::ApiEvents => TableEngine::BasicTree, - AnalyticsCollection::ConnectorEvents => TableEngine::BasicTree, - AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree, + AnalyticsCollection::SdkEvents + | AnalyticsCollection::ApiEvents + | AnalyticsCollection::ConnectorEvents + | AnalyticsCollection::AuthEvents + | AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree, } } } @@ -158,6 +160,7 @@ impl super::refunds::filters::RefundFilterAnalytics for ClickhouseClient {} impl super::sdk_events::filters::SdkEventFilterAnalytics for ClickhouseClient {} impl super::sdk_events::metrics::SdkEventMetricAnalytics for ClickhouseClient {} impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {} +impl super::auth_events::metrics::AuthEventMetricAnalytics for ClickhouseClient {} impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {} impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {} impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {} @@ -320,6 +323,16 @@ impl TryInto for serde_json::Value { } } +impl TryInto for serde_json::Value { + type Error = Report; + + fn try_into(self) -> Result { + serde_json::from_value(self).change_context(ParsingError::StructParseFailure( + "Failed to parse AuthEventMetricRow in clickhouse results", + )) + } +} + impl TryInto for serde_json::Value { type Error = Report; @@ -360,7 +373,7 @@ impl ToSql for AnalyticsCollection { Self::Payment => Ok("payment_attempts".to_string()), Self::Refund => Ok("refunds".to_string()), Self::SdkEvents => Ok("sdk_events_audit".to_string()), - Self::ApiEvents => Ok("api_events_audit".to_string()), + Self::ApiEvents | Self::AuthEvents => Ok("api_events_audit".to_string()), Self::PaymentIntent => Ok("payment_intents".to_string()), Self::ConnectorEvents => Ok("connector_events_audit".to_string()), Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()), diff --git a/crates/analytics/src/core.rs b/crates/analytics/src/core.rs index 6ccf2858e223..f32783497480 100644 --- a/crates/analytics/src/core.rs +++ b/crates/analytics/src/core.rs @@ -21,6 +21,11 @@ pub async fn get_domain_info( download_dimensions: None, dimensions: utils::get_sdk_event_dimensions(), }, + AnalyticsDomain::AuthEvents => GetInfoResponse { + metrics: utils::get_auth_event_metrics_info(), + download_dimensions: None, + dimensions: Vec::new(), + }, AnalyticsDomain::ApiEvents => GetInfoResponse { metrics: utils::get_api_event_metrics_info(), download_dimensions: None, diff --git a/crates/analytics/src/lib.rs b/crates/analytics/src/lib.rs index eb08d8549d10..a7f399c49ff5 100644 --- a/crates/analytics/src/lib.rs +++ b/crates/analytics/src/lib.rs @@ -8,6 +8,7 @@ mod query; pub mod refunds; pub mod api_event; +pub mod auth_events; pub mod connector_events; pub mod health_check; pub mod outgoing_webhook_event; @@ -33,6 +34,7 @@ use api_models::analytics::{ api_event::{ ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier, }, + auth_events::{AuthEventMetrics, AuthEventMetricsBucketIdentifier}, disputes::{DisputeDimensions, DisputeFilters, DisputeMetrics, DisputeMetricsBucketIdentifier}, payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier}, refunds::{RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier}, @@ -53,6 +55,7 @@ use storage_impl::config::Database; use strum::Display; use self::{ + auth_events::metrics::{AuthEventMetric, AuthEventMetricRow}, payments::{ distribution::{PaymentDistribution, PaymentDistributionRow}, metrics::{PaymentMetric, PaymentMetricRow}, @@ -536,6 +539,36 @@ impl AnalyticsProvider { } } + pub async fn get_auth_event_metrics( + &self, + metric: &AuthEventMetrics, + merchant_id: &str, + publishable_key: &str, + granularity: &Option, + time_range: &TimeRange, + ) -> types::MetricsResult> { + match self { + Self::Sqlx(_pool) => Err(report!(MetricsError::NotImplemented)), + Self::Clickhouse(pool) => { + metric + .load_metrics(merchant_id, publishable_key, granularity, time_range, pool) + .await + } + Self::CombinedCkh(_sqlx_pool, ckh_pool) | Self::CombinedSqlx(_sqlx_pool, ckh_pool) => { + metric + .load_metrics( + merchant_id, + publishable_key, + granularity, + // Since API events are ckh only use ckh here + time_range, + ckh_pool, + ) + .await + } + } + } + pub async fn get_api_event_metrics( &self, metric: &ApiEventMetrics, @@ -719,6 +752,7 @@ pub enum AnalyticsFlow { GetPaymentMetrics, GetRefundsMetrics, GetSdkMetrics, + GetAuthMetrics, GetPaymentFilters, GetRefundFilters, GetSdkEventFilters, diff --git a/crates/analytics/src/query.rs b/crates/analytics/src/query.rs index b8236c08912a..5414527efcf5 100644 --- a/crates/analytics/src/query.rs +++ b/crates/analytics/src/query.rs @@ -4,6 +4,7 @@ use api_models::{ analytics::{ self as analytics_api, api_event::ApiEventDimensions, + auth_events::AuthEventFlows, disputes::DisputeDimensions, payments::{PaymentDimensions, PaymentDistributions}, refunds::{RefundDimensions, RefundType}, @@ -386,6 +387,8 @@ impl_to_sql_for_to_string!( impl_to_sql_for_to_string!(&SdkEventDimensions, SdkEventDimensions, SdkEventNames); +impl_to_sql_for_to_string!(AuthEventFlows); + impl_to_sql_for_to_string!(&ApiEventDimensions, ApiEventDimensions); impl_to_sql_for_to_string!(&DisputeDimensions, DisputeDimensions, DisputeStage); @@ -512,6 +515,14 @@ where self.add_custom_filter_clause(key, value, FilterTypes::EqualBool) } + pub fn add_negative_filter_clause( + &mut self, + key: impl ToSql, + value: impl ToSql, + ) -> QueryResult<()> { + self.add_custom_filter_clause(key, value, FilterTypes::NotEqual) + } + pub fn add_custom_filter_clause( &mut self, lhs: impl ToSql, diff --git a/crates/analytics/src/sqlx.rs b/crates/analytics/src/sqlx.rs index 68d2bb649f39..f33fc425c2c6 100644 --- a/crates/analytics/src/sqlx.rs +++ b/crates/analytics/src/sqlx.rs @@ -512,6 +512,8 @@ impl ToSql for AnalyticsCollection { Self::Refund => Ok("refund".to_string()), Self::SdkEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("SdkEvents table is not implemented for Sqlx"))?, + Self::AuthEvents => Err(error_stack::report!(ParsingError::UnknownError) + .attach_printable("ApiEvents table is not implemented for Sqlx"))?, Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError) .attach_printable("ApiEvents table is not implemented for Sqlx"))?, Self::PaymentIntent => Ok("payment_intent".to_string()), diff --git a/crates/analytics/src/types.rs b/crates/analytics/src/types.rs index 356d11bb77d4..a957fa6039ab 100644 --- a/crates/analytics/src/types.rs +++ b/crates/analytics/src/types.rs @@ -15,6 +15,7 @@ use crate::errors::AnalyticsError; pub enum AnalyticsDomain { Payments, Refunds, + AuthEvents, SdkEvents, ApiEvents, Dispute, @@ -30,6 +31,7 @@ pub enum AnalyticsCollection { ConnectorEvents, OutgoingWebhookEvent, Dispute, + AuthEvents, } #[allow(dead_code)] @@ -67,6 +69,7 @@ where pub trait RefundAnalytics {} pub trait SdkEventAnalytics {} +pub trait AuthEventAnalytics {} #[async_trait::async_trait] pub trait AnalyticsDataSource diff --git a/crates/analytics/src/utils.rs b/crates/analytics/src/utils.rs index 7bff5c87da66..0afe9bd6c5e3 100644 --- a/crates/analytics/src/utils.rs +++ b/crates/analytics/src/utils.rs @@ -1,5 +1,6 @@ use api_models::analytics::{ api_event::{ApiEventDimensions, ApiEventMetrics}, + auth_events::AuthEventMetrics, disputes::{DisputeDimensions, DisputeMetrics}, payments::{PaymentDimensions, PaymentMetrics}, refunds::{RefundDimensions, RefundMetrics}, @@ -36,6 +37,10 @@ pub fn get_sdk_event_metrics_info() -> Vec { SdkEventMetrics::iter().map(Into::into).collect() } +pub fn get_auth_event_metrics_info() -> Vec { + AuthEventMetrics::iter().map(Into::into).collect() +} + pub fn get_api_event_metrics_info() -> Vec { ApiEventMetrics::iter().map(Into::into).collect() } diff --git a/crates/api_models/src/analytics.rs b/crates/api_models/src/analytics.rs index ad035c707cc2..d697c8ab2712 100644 --- a/crates/api_models/src/analytics.rs +++ b/crates/api_models/src/analytics.rs @@ -5,6 +5,7 @@ use masking::Secret; use self::{ api_event::{ApiEventDimensions, ApiEventMetrics}, + auth_events::AuthEventMetrics, disputes::{DisputeDimensions, DisputeMetrics}, payments::{PaymentDimensions, PaymentDistributions, PaymentMetrics}, refunds::{RefundDimensions, RefundMetrics}, @@ -13,6 +14,7 @@ use self::{ pub use crate::payments::TimeRange; pub mod api_event; +pub mod auth_events; pub mod connector_events; pub mod disputes; pub mod outgoing_webhook_event; @@ -138,6 +140,17 @@ pub struct GetSdkEventMetricRequest { pub delta: bool, } +#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct GetAuthEventMetricRequest { + pub time_series: Option, + pub time_range: TimeRange, + #[serde(default)] + pub metrics: HashSet, + #[serde(default)] + pub delta: bool, +} + #[derive(Debug, serde::Serialize)] pub struct AnalyticsMetadata { pub current_time_range: TimeRange, diff --git a/crates/api_models/src/analytics/auth_events.rs b/crates/api_models/src/analytics/auth_events.rs new file mode 100644 index 000000000000..bb08f9f59342 --- /dev/null +++ b/crates/api_models/src/analytics/auth_events.rs @@ -0,0 +1,111 @@ +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use super::NameDescription; + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +pub enum AuthEventMetrics { + ThreeDsSdkCount, + AuthenticationAttemptCount, + AuthenticationSuccessCount, + ChallengeFlowCount, + FrictionlessFlowCount, + ChallengeAttemptCount, + ChallengeSuccessCount, +} + +#[derive( + Clone, + Debug, + Hash, + PartialEq, + Eq, + serde::Serialize, + serde::Deserialize, + strum::Display, + strum::EnumIter, + strum::AsRefStr, +)] +pub enum AuthEventFlows { + PostAuthentication, +} + +pub mod metric_behaviour { + pub struct ThreeDsSdkCount; + pub struct AuthenticationAttemptCount; + pub struct AuthenticationSuccessCount; + pub struct ChallengeFlowCount; + pub struct FrictionlessFlowCount; + pub struct ChallengeAttemptCount; + pub struct ChallengeSuccessCount; +} + +impl From for NameDescription { + fn from(value: AuthEventMetrics) -> Self { + Self { + name: value.to_string(), + desc: String::new(), + } + } +} + +#[derive(Debug, serde::Serialize, Eq)] +pub struct AuthEventMetricsBucketIdentifier { + pub time_bucket: Option, +} + +impl AuthEventMetricsBucketIdentifier { + pub fn new(time_bucket: Option) -> Self { + Self { time_bucket } + } +} + +impl Hash for AuthEventMetricsBucketIdentifier { + fn hash(&self, state: &mut H) { + self.time_bucket.hash(state); + } +} + +impl PartialEq for AuthEventMetricsBucketIdentifier { + fn eq(&self, other: &Self) -> bool { + let mut left = DefaultHasher::new(); + self.hash(&mut left); + let mut right = DefaultHasher::new(); + other.hash(&mut right); + left.finish() == right.finish() + } +} + +#[derive(Debug, serde::Serialize)] +pub struct AuthEventMetricsBucketValue { + pub three_ds_sdk_count: Option, + pub authentication_attempt_count: Option, + pub authentication_success_count: Option, + pub challenge_flow_count: Option, + pub challenge_attempt_count: Option, + pub challenge_success_count: Option, + pub frictionless_flow_count: Option, +} + +#[derive(Debug, serde::Serialize)] +pub struct MetricsBucketResponse { + #[serde(flatten)] + pub values: AuthEventMetricsBucketValue, + #[serde(flatten)] + pub dimensions: AuthEventMetricsBucketIdentifier, +} diff --git a/crates/api_models/src/analytics/sdk_events.rs b/crates/api_models/src/analytics/sdk_events.rs index 6905ee555ab3..9e33181ec679 100644 --- a/crates/api_models/src/analytics/sdk_events.rs +++ b/crates/api_models/src/analytics/sdk_events.rs @@ -108,6 +108,7 @@ pub enum SdkEventNames { DisplayBankTransferInfoPage, DisplayQrCodeInfoPage, AuthenticationCall, + AuthenticationCallInit, ThreeDsMethodCall, ThreeDsMethodResult, ThreeDsMethod, diff --git a/crates/api_models/src/events.rs b/crates/api_models/src/events.rs index 46fa8caa6df6..c9ae775fed87 100644 --- a/crates/api_models/src/events.rs +++ b/crates/api_models/src/events.rs @@ -22,7 +22,7 @@ use common_utils::{ use crate::{ admin::*, analytics::{ - api_event::*, connector_events::ConnectorEventsRequest, + api_event::*, auth_events::*, connector_events::ConnectorEventsRequest, outgoing_webhook_event::OutgoingWebhookLogsRequest, sdk_events::*, search::*, *, }, api_keys::*, @@ -84,6 +84,7 @@ impl_misc_api_event_type!( GetPaymentMetricRequest, GetRefundMetricRequest, GetSdkEventMetricRequest, + GetAuthEventMetricRequest, GetPaymentFiltersRequest, PaymentFiltersResponse, GetRefundFilterRequest, diff --git a/crates/router/src/analytics.rs b/crates/router/src/analytics.rs index 0aec94c205c5..531953c0161a 100644 --- a/crates/router/src/analytics.rs +++ b/crates/router/src/analytics.rs @@ -13,9 +13,9 @@ pub mod routes { GetGlobalSearchRequest, GetSearchRequest, GetSearchRequestWithIndex, SearchIndex, }, GenerateReportRequest, GetApiEventFiltersRequest, GetApiEventMetricRequest, - GetDisputeMetricRequest, GetPaymentFiltersRequest, GetPaymentMetricRequest, - GetRefundFilterRequest, GetRefundMetricRequest, GetSdkEventFiltersRequest, - GetSdkEventMetricRequest, ReportRequest, + GetAuthEventMetricRequest, GetDisputeMetricRequest, GetPaymentFiltersRequest, + GetPaymentMetricRequest, GetRefundFilterRequest, GetRefundMetricRequest, + GetSdkEventFiltersRequest, GetSdkEventMetricRequest, ReportRequest, }; use error_stack::ResultExt; @@ -74,6 +74,10 @@ pub mod routes { web::resource("filters/sdk_events") .route(web::post().to(get_sdk_event_filters)), ) + .service( + web::resource("metrics/auth_events") + .route(web::post().to(get_auth_event_metrics)), + ) .service(web::resource("api_event_logs").route(web::get().to(get_api_events))) .service(web::resource("sdk_event_logs").route(web::post().to(get_sdk_events))) .service( @@ -241,6 +245,43 @@ pub mod routes { .await } + /// # Panics + /// + /// Panics if `json_payload` array does not contain one `GetAuthEventMetricRequest` element. + pub async fn get_auth_event_metrics( + state: web::Data, + req: actix_web::HttpRequest, + json_payload: web::Json<[GetAuthEventMetricRequest; 1]>, + ) -> impl Responder { + // safety: This shouldn't panic owing to the data type + #[allow(clippy::expect_used)] + let payload = json_payload + .into_inner() + .to_vec() + .pop() + .expect("Couldn't get GetAuthEventMetricRequest"); + let flow = AnalyticsFlow::GetAuthMetrics; + Box::pin(api::server_wrap( + flow, + state, + &req, + payload, + |state, auth: AuthenticationData, req| async move { + analytics::auth_events::get_metrics( + &state.pool, + &auth.merchant_account.merchant_id, + auth.merchant_account.publishable_key.as_ref(), + req, + ) + .await + .map(ApplicationResponse::Json) + }, + &auth::JWTAuth(Permission::Analytics), + api_locking::LockAction::NotApplicable, + )) + .await + } + pub async fn get_payment_filters( state: web::Data, req: actix_web::HttpRequest,