diff --git a/Cargo.lock b/Cargo.lock index 8c28b54a..97458c60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -653,7 +653,7 @@ dependencies = [ [[package]] name = "auction-server" -version = "0.34.3" +version = "0.35.0" dependencies = [ "anchor-lang", "anchor-lang-idl", diff --git a/auction-server/.sqlx/query-d9c19f7767f197f7fea6dc0ebc34e1a1262b540aa8ecd8296ea03cc672a71602.json b/auction-server/.sqlx/query-d9c19f7767f197f7fea6dc0ebc34e1a1262b540aa8ecd8296ea03cc672a71602.json new file mode 100644 index 00000000..b8a86944 --- /dev/null +++ b/auction-server/.sqlx/query-d9c19f7767f197f7fea6dc0ebc34e1a1262b540aa8ecd8296ea03cc672a71602.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "WITH rows_to_delete AS (\n SELECT id FROM auction WHERE creation_time < $1 LIMIT $2\n ) DELETE FROM auction WHERE id IN (SELECT id FROM rows_to_delete)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamp", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "d9c19f7767f197f7fea6dc0ebc34e1a1262b540aa8ecd8296ea03cc672a71602" +} diff --git a/auction-server/Cargo.toml b/auction-server/Cargo.toml index 0f57e4cc..407c8688 100644 --- a/auction-server/Cargo.toml +++ b/auction-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "auction-server" -version = "0.34.3" +version = "0.35.0" edition = "2021" license-file = "license.txt" diff --git a/auction-server/clickhouse_migrations/0017_alter_bid_swap_tx_hash.sql b/auction-server/clickhouse_migrations/0017_alter_bid_swap_tx_hash.sql new file mode 100644 index 00000000..3a769583 --- /dev/null +++ b/auction-server/clickhouse_migrations/0017_alter_bid_swap_tx_hash.sql @@ -0,0 +1 @@ +ALTER TABLE bid_swap ADD COLUMN tx_hash Nullable(String); diff --git a/auction-server/migrations/20250726182247_auction_creation_time_idx.down.sql b/auction-server/migrations/20250726182247_auction_creation_time_idx.down.sql new file mode 100644 index 00000000..215dc78d --- /dev/null +++ b/auction-server/migrations/20250726182247_auction_creation_time_idx.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS auction_creation_time_idx; diff --git a/auction-server/migrations/20250726182247_auction_creation_time_idx.up.sql b/auction-server/migrations/20250726182247_auction_creation_time_idx.up.sql new file mode 100644 index 00000000..717e6903 --- /dev/null +++ b/auction-server/migrations/20250726182247_auction_creation_time_idx.up.sql @@ -0,0 +1 @@ +CREATE INDEX CONCURRENTLY auction_creation_time_idx ON auction (creation_time); diff --git a/auction-server/src/auction/repository/add_bid_analytics.rs b/auction-server/src/auction/repository/add_bid_analytics.rs index 128482f4..4712ec6d 100644 --- a/auction-server/src/auction/repository/add_bid_analytics.rs +++ b/auction-server/src/auction/repository/add_bid_analytics.rs @@ -37,6 +37,28 @@ impl Repository { decimals: HashMap, ) -> anyhow::Result<()> { let transaction = STANDARD.encode(bincode::serialize(&bid.chain_data.transaction.clone())?); + let tx_hash = match &bid.status { + entities::BidStatusSvm::Pending => None, + entities::BidStatusSvm::AwaitingSignature { auction } => { + Some(auction.tx_hash.to_string()) + } + entities::BidStatusSvm::SentToUserForSubmission { auction } => { + Some(auction.tx_hash.to_string()) + } + entities::BidStatusSvm::Submitted { auction } => Some(auction.tx_hash.to_string()), + entities::BidStatusSvm::Lost { auction } => { + auction.as_ref().map(|a| a.tx_hash.to_string()) + } + entities::BidStatusSvm::Won { auction } => Some(auction.tx_hash.to_string()), + entities::BidStatusSvm::Failed { auction, reason: _ } => { + Some(auction.tx_hash.to_string()) + } + entities::BidStatusSvm::Expired { auction } => Some(auction.tx_hash.to_string()), + entities::BidStatusSvm::Cancelled { auction } => Some(auction.tx_hash.to_string()), + entities::BidStatusSvm::SubmissionFailed { auction, reason: _ } => { + Some(auction.tx_hash.to_string()) + } + }; let bid_analytics = match data { entities::BidTransactionData::SubmitBid(transaction_data) => { let SubmitBidArgs { @@ -53,6 +75,7 @@ impl Repository { bid_amount: bid.amount, auction_id: bid.status.get_auction_id(), + tx_hash, submission_time: bid.submission_time, conclusion_time: bid.conclusion_time, @@ -113,6 +136,7 @@ impl Repository { bid_amount: bid.amount, auction_id: bid.status.get_auction_id(), + tx_hash, opportunity_id: bid.opportunity_id, conclusion_time: bid.conclusion_time, diff --git a/auction-server/src/auction/repository/models.rs b/auction-server/src/auction/repository/models.rs index 4231f2bd..95934988 100644 --- a/auction-server/src/auction/repository/models.rs +++ b/auction-server/src/auction/repository/models.rs @@ -858,6 +858,7 @@ pub struct BidAnalyticsSwap { #[serde(with = "clickhouse::serde::uuid::option")] pub auction_id: Option, + pub tx_hash: Option, #[serde(with = "clickhouse::serde::time::datetime64::micros::option")] pub submission_time: Option, #[serde(with = "clickhouse::serde::uuid::option")] @@ -905,6 +906,7 @@ pub struct BidAnalyticsLimo { #[serde(with = "clickhouse::serde::uuid::option")] pub auction_id: Option, + pub tx_hash: Option, #[serde(with = "clickhouse::serde::time::datetime64::micros::option")] pub submission_time: Option, #[serde(with = "clickhouse::serde::time::datetime64::micros::option")] diff --git a/auction-server/src/config.rs b/auction-server/src/config.rs index 70475c70..80d63164 100644 --- a/auction-server/src/config.rs +++ b/auction-server/src/config.rs @@ -102,6 +102,12 @@ pub struct DeletePgRowsOptions { #[arg(env = "DELETE_THRESHOLD_SECONDS")] #[arg(default_value = "172800")] // 2 days in seconds pub delete_threshold_secs: u64, + + /// The buffer time to account for bids that may still exist in the db. We cannot delete auctions with ids that are still referenced by bids, so we wait an additional buffer time before deleting auctions. + #[arg(long = "delete-buffer-auction-seconds")] + #[arg(env = "DELETE_BUFFER_AUCTION_SECONDS")] + #[arg(default_value = "3600")] // 1 hour in seconds + pub delete_buffer_auction_secs: u64, } #[derive(Args, Clone, Debug)] diff --git a/auction-server/src/kernel/workers.rs b/auction-server/src/kernel/workers.rs index f964d194..57efbc91 100644 --- a/auction-server/src/kernel/workers.rs +++ b/auction-server/src/kernel/workers.rs @@ -122,6 +122,12 @@ pub async fn run_delete_pg_db_history( } }); futures::future::try_join_all(futures).await?; + + delete_pg_db_auction_history( + db, + delete_threshold_secs + delete_pg_rows_options.delete_buffer_auction_secs, + ) + .await?; } } } @@ -205,3 +211,35 @@ pub async fn delete_pg_db_opportunity_history( Ok(()) } + +#[instrument( + target = "metrics", + name = "db_delete_pg_auction_history" + fields(category = "db_queries", result = "success", name = "delete_pg_auction_history", tracing_enabled), + skip_all +)] +pub async fn delete_pg_db_auction_history( + db: &PgPool, + delete_threshold_secs: u64, +) -> anyhow::Result<()> { + let threshold = OffsetDateTime::now_utc() - Duration::from_secs(delete_threshold_secs); + let n_auctions_deleted = sqlx::query!( + "WITH rows_to_delete AS ( + SELECT id FROM auction WHERE creation_time < $1 LIMIT $2 + ) DELETE FROM auction WHERE id IN (SELECT id FROM rows_to_delete)", + PrimitiveDateTime::new(threshold.date(), threshold.time()), + DELETE_BATCH_SIZE as i64, + ) + .execute(db) + .await + .map_err(|e| { + tracing::Span::current().record("result", "error"); + tracing::error!("Failed to delete PG DB auction history: {}", e); + e + })? + .rows_affected(); + + metrics::histogram!("db_delete_pg_auction_count").record(n_auctions_deleted as f64); + + Ok(()) +} diff --git a/integration.py b/integration.py index 821c6897..b6e7a745 100644 --- a/integration.py +++ b/integration.py @@ -16,6 +16,7 @@ def main(): f.write(f'export DELETE_ENABLED=true\n') f.write(f'export DELETE_INTERVAL_SECONDS={1}\n') f.write(f'export DELETE_THRESHOLD_SECONDS={60*60*24*2}\n') + f.write(f'export DELETE_BUFFER_AUCTION_SECONDS={60*60}\n') mint_buy = Keypair.from_json((open('keypairs/mint_buy.json').read())).pubkey() mint_sell = Keypair.from_json((open('keypairs/mint_sell.json').read())).pubkey()