Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion auction-server/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "auction-server"
version = "0.34.3"
version = "0.35.0"
edition = "2021"
license-file = "license.txt"

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE bid_swap ADD COLUMN tx_hash Nullable(String);
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DROP INDEX IF EXISTS auction_creation_time_idx;
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
CREATE INDEX CONCURRENTLY auction_creation_time_idx ON auction (creation_time);
24 changes: 24 additions & 0 deletions auction-server/src/auction/repository/add_bid_analytics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,28 @@ impl Repository {
decimals: HashMap<Pubkey, u8>,
) -> anyhow::Result<()> {
let transaction = STANDARD.encode(bincode::serialize(&bid.chain_data.transaction.clone())?);
let tx_hash = match &bid.status {
entities::BidStatusSvm::Pending => None,
entities::BidStatusSvm::AwaitingSignature { auction } => {
Some(auction.tx_hash.to_string())
}
entities::BidStatusSvm::SentToUserForSubmission { auction } => {
Some(auction.tx_hash.to_string())
}
entities::BidStatusSvm::Submitted { auction } => Some(auction.tx_hash.to_string()),
entities::BidStatusSvm::Lost { auction } => {
auction.as_ref().map(|a| a.tx_hash.to_string())
}
entities::BidStatusSvm::Won { auction } => Some(auction.tx_hash.to_string()),
entities::BidStatusSvm::Failed { auction, reason: _ } => {
Some(auction.tx_hash.to_string())
}
entities::BidStatusSvm::Expired { auction } => Some(auction.tx_hash.to_string()),
entities::BidStatusSvm::Cancelled { auction } => Some(auction.tx_hash.to_string()),
entities::BidStatusSvm::SubmissionFailed { auction, reason: _ } => {
Some(auction.tx_hash.to_string())
}
};
let bid_analytics = match data {
entities::BidTransactionData::SubmitBid(transaction_data) => {
let SubmitBidArgs {
Expand All @@ -53,6 +75,7 @@ impl Repository {
bid_amount: bid.amount,

auction_id: bid.status.get_auction_id(),
tx_hash,
submission_time: bid.submission_time,
conclusion_time: bid.conclusion_time,

Expand Down Expand Up @@ -113,6 +136,7 @@ impl Repository {
bid_amount: bid.amount,

auction_id: bid.status.get_auction_id(),
tx_hash,
opportunity_id: bid.opportunity_id,
conclusion_time: bid.conclusion_time,

Expand Down
2 changes: 2 additions & 0 deletions auction-server/src/auction/repository/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -858,6 +858,7 @@ pub struct BidAnalyticsSwap {

#[serde(with = "clickhouse::serde::uuid::option")]
pub auction_id: Option<Uuid>,
pub tx_hash: Option<String>,
#[serde(with = "clickhouse::serde::time::datetime64::micros::option")]
pub submission_time: Option<OffsetDateTime>,
#[serde(with = "clickhouse::serde::uuid::option")]
Expand Down Expand Up @@ -905,6 +906,7 @@ pub struct BidAnalyticsLimo {

#[serde(with = "clickhouse::serde::uuid::option")]
pub auction_id: Option<Uuid>,
pub tx_hash: Option<String>,
#[serde(with = "clickhouse::serde::time::datetime64::micros::option")]
pub submission_time: Option<OffsetDateTime>,
#[serde(with = "clickhouse::serde::time::datetime64::micros::option")]
Expand Down
6 changes: 6 additions & 0 deletions auction-server/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,12 @@ pub struct DeletePgRowsOptions {
#[arg(env = "DELETE_THRESHOLD_SECONDS")]
#[arg(default_value = "172800")] // 2 days in seconds
pub delete_threshold_secs: u64,

/// The buffer time to account for bids that may still exist in the db. We cannot delete auctions with ids that are still referenced by bids, so we wait an additional buffer time before deleting auctions.
#[arg(long = "delete-buffer-auction-seconds")]
#[arg(env = "DELETE_BUFFER_AUCTION_SECONDS")]
#[arg(default_value = "3600")] // 1 hour in seconds
pub delete_buffer_auction_secs: u64,
}

#[derive(Args, Clone, Debug)]
Expand Down
38 changes: 38 additions & 0 deletions auction-server/src/kernel/workers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,12 @@ pub async fn run_delete_pg_db_history(
}
});
futures::future::try_join_all(futures).await?;

delete_pg_db_auction_history(
db,
delete_threshold_secs + delete_pg_rows_options.delete_buffer_auction_secs,
)
.await?;
}
}
}
Expand Down Expand Up @@ -205,3 +211,35 @@ pub async fn delete_pg_db_opportunity_history(

Ok(())
}

#[instrument(
target = "metrics",
name = "db_delete_pg_auction_history"
fields(category = "db_queries", result = "success", name = "delete_pg_auction_history", tracing_enabled),
skip_all
)]
pub async fn delete_pg_db_auction_history(
db: &PgPool,
delete_threshold_secs: u64,
) -> anyhow::Result<()> {
let threshold = OffsetDateTime::now_utc() - Duration::from_secs(delete_threshold_secs);
let n_auctions_deleted = sqlx::query!(
"WITH rows_to_delete AS (
SELECT id FROM auction WHERE creation_time < $1 LIMIT $2
) DELETE FROM auction WHERE id IN (SELECT id FROM rows_to_delete)",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bids has protected foreign key to this table. So if the connected bid to this auction exists, this query will fail to delete those rows :-?
Can you test the behaviour on your local?

I think even if one of the rows fail to delete, postgres would throw error and no rows will be deleted

PrimitiveDateTime::new(threshold.date(), threshold.time()),
DELETE_BATCH_SIZE as i64,
)
.execute(db)
.await
.map_err(|e| {
tracing::Span::current().record("result", "error");
tracing::error!("Failed to delete PG DB auction history: {}", e);
e
})?
.rows_affected();

metrics::histogram!("db_delete_pg_auction_count").record(n_auctions_deleted as f64);

Ok(())
}
1 change: 1 addition & 0 deletions integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ def main():
f.write(f'export DELETE_ENABLED=true\n')
f.write(f'export DELETE_INTERVAL_SECONDS={1}\n')
f.write(f'export DELETE_THRESHOLD_SECONDS={60*60*24*2}\n')
f.write(f'export DELETE_BUFFER_AUCTION_SECONDS={60*60}\n')

mint_buy = Keypair.from_json((open('keypairs/mint_buy.json').read())).pubkey()
mint_sell = Keypair.from_json((open('keypairs/mint_sell.json').read())).pubkey()
Expand Down
Loading