Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions lib/reconcile.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ type SupabaseDB = SupabaseClient<Database>;
/**
* Reconcile a storyline's plot_count and last_plot_time from the plots table.
* Uses COUNT(*) and MAX(block_timestamp) — idempotent and safe for replays.
* Relies on the unique constraint on (storyline_id, plot_index) to prevent
* duplicate rows that would inflate the count.
*
* Throws on any Supabase error so callers can handle failures.
*/
Expand Down
4 changes: 2 additions & 2 deletions src/app/api/cron/backfill/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ async function processStorylineCreated(
};
const { error: plotError } = await supabase
.from("plots")
.upsert(plotRow, { onConflict: "tx_hash,log_index" });
.upsert(plotRow, { onConflict: "storyline_id,plot_index", ignoreDuplicates: true });
if (plotError) {
throw new Error(`Database error (genesis plot): ${plotError.message}`);
}
Expand Down Expand Up @@ -360,7 +360,7 @@ async function processPlotChained(

const { error: plotError } = await supabase
.from("plots")
.upsert(row, { onConflict: "tx_hash,log_index" });
.upsert(row, { onConflict: "storyline_id,plot_index" });
if (plotError) {
throw new Error(`Database error (plot): ${plotError.message}`);
}
Expand Down
2 changes: 1 addition & 1 deletion src/app/api/index/plot/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextResponse } from "next/server";
import { type Hex, decodeEventLog, encodeEventTopics } from "viem";
import { publicClient, getReceiptWithRetry } from "../../../../../lib/rpc";

Check warning on line 3 in src/app/api/index/plot/route.ts

View workflow job for this annotation

GitHub Actions / lint-and-typecheck

'getReceiptWithRetry' is defined but never used
import { createServerClient } from "../../../../../lib/supabase";
import { validateRecentTx } from "../../../../../lib/index-auth";
import {
Expand Down Expand Up @@ -129,7 +129,7 @@

const { error: dbError } = await supabase.from("plots").upsert(
row,
{ onConflict: "tx_hash,log_index" }
{ onConflict: "storyline_id,plot_index" }
);

if (dbError) {
Expand Down
2 changes: 1 addition & 1 deletion src/app/api/index/storyline/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextResponse } from "next/server";
import { type Hex, decodeEventLog, encodeEventTopics } from "viem";
import { publicClient, getReceiptWithRetry } from "../../../../../lib/rpc";

Check warning on line 3 in src/app/api/index/storyline/route.ts

View workflow job for this annotation

GitHub Actions / lint-and-typecheck

'getReceiptWithRetry' is defined but never used
import { createServerClient } from "../../../../../lib/supabase";
import { validateRecentTx } from "../../../../../lib/index-auth";
import {
Expand Down Expand Up @@ -173,7 +173,7 @@

const { error: plotDbError } = await supabase.from("plots").upsert(
plotRow,
{ onConflict: "tx_hash,log_index" }
{ onConflict: "storyline_id,plot_index", ignoreDuplicates: true }
);

if (plotDbError) {
Expand Down
33 changes: 33 additions & 0 deletions supabase/migrations/00030_dedupe_plots_unique_constraint.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
-- Deduplicate plots rows with the same (storyline_id, plot_index), preferring
-- the row with the richest data (non-null title first, then latest id as tiebreaker).
-- Then add a unique constraint to prevent recurrence.
-- Finally re-reconcile plot_count from the deduplicated data.

-- Step 1: For each (storyline_id, plot_index) group, keep the best row:
-- prefer rows with a non-empty title, then the latest id as tiebreaker.
DELETE FROM plots
WHERE id NOT IN (
SELECT DISTINCT ON (storyline_id, plot_index) id
FROM plots
ORDER BY storyline_id, plot_index,
(COALESCE(title, '') != '') DESC,
id DESC
);

-- Step 2: Add unique constraint to prevent future duplicates
ALTER TABLE plots
ADD CONSTRAINT plots_storyline_plot_unique UNIQUE (storyline_id, plot_index);

-- Step 3: Re-reconcile plot_count and last_plot_time from deduplicated data
UPDATE storylines s
SET plot_count = sub.cnt,
last_plot_time = sub.latest
FROM (
SELECT storyline_id,
COUNT(*) AS cnt,
MAX(block_timestamp) AS latest
FROM plots
GROUP BY storyline_id
) sub
WHERE s.storyline_id = sub.storyline_id
AND (s.plot_count IS DISTINCT FROM sub.cnt OR s.last_plot_time IS DISTINCT FROM sub.latest);
Loading