Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion apps/dashboard/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
"next-themes": "^0.4.6",
"nextjs-toploader": "^1.6.12",
"nuqs": "^2.4.3",
"p-limit": "^6.2.0",
"papaparse": "^5.5.3",
"pluralize": "^8.0.0",
"posthog-js": "1.256.1",
Expand Down
48 changes: 34 additions & 14 deletions apps/dashboard/src/@/hooks/useCsvUpload.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { useQuery } from "@tanstack/react-query";
import pLimit from "p-limit";
import { useQuery, useQueryClient } from "@tanstack/react-query";
import Papa from "papaparse";
import { useCallback, useState } from "react";
import { isAddress, type ThirdwebClient, ZERO_ADDRESS } from "thirdweb";
Expand Down Expand Up @@ -95,6 +94,7 @@ export function useCsvUpload<
// Always gonna need the wallet address
T extends { address: string },
>(props: Props<T>) {
const queryClient = useQueryClient();
const [rawData, setRawData] = useState<
T[] | Array<T & { [key in string]: unknown }>
>(props.defaultRawData || []);
Expand Down Expand Up @@ -132,16 +132,29 @@ export function useCsvUpload<
[props.csvParser],
);

const [normalizeProgress, setNormalizeProgress] = useState({
total: 0,
current: 0,
});

Comment on lines +135 to +139
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Reset normalizeProgress on reset to avoid stale progress UI

Currently reset() clears rawData/noCsv but normalizeProgress remains at the last values, which can render misleading progress numbers on subsequent uploads. Initialize a constant and reuse it in both state init and reset.

Follow-up change outside the selected range (add near the hook top and use in reset):

const initialProgress = { total: 0, current: 0 };
const [normalizeProgress, setNormalizeProgress] = useState(initialProgress);

// inside reset():
setNormalizeProgress(initialProgress);
🤖 Prompt for AI Agents
In apps/dashboard/src/@/hooks/useCsvUpload.ts around lines 135 to 139,
normalizeProgress is initialized but not reset by reset(), causing stale
progress UI; define a shared initialProgress constant at the top of the hook
(e.g. const initialProgress = { total: 0, current: 0 }), use it to initialize
useState (useState(initialProgress)) and call
setNormalizeProgress(initialProgress) inside reset() so progress is cleared when
resetting.

const normalizeQuery = useQuery({
queryFn: async () => {
const limit = pLimit(50);
const results = await Promise.all(
rawData.map((item) => {
return limit(() =>
const batchSize = 50;
const results = [];
for (let i = 0; i < rawData.length; i += batchSize) {
const batch = rawData.slice(i, i + batchSize);
setNormalizeProgress({
total: rawData.length,
current: i,
});
const batchResults = await Promise.all(
batch.map((item) =>
checkIsAddress({ item: item, thirdwebClient: props.client }),
);
}),
);
),
);
results.push(...batchResults);
}

return {
invalidFound: !!results.find((item) => !item?.isValid),
result: processAirdropData(results),
Expand All @@ -153,12 +166,18 @@ export function useCsvUpload<

const removeInvalid = useCallback(() => {
const filteredData = normalizeQuery.data?.result.filter(
({ isValid }) => isValid,
(d) => d.isValid && d.resolvedAddress !== ZERO_ADDRESS,
);
// double type assertion is save here because we don't really use this variable (only check for its length)
// Also filteredData's type is the superset of T[]
setRawData(filteredData as unknown as T[]);
}, [normalizeQuery.data?.result]);

if (filteredData && normalizeQuery.data) {
// Directly update the query result instead of setting new state to avoid triggering refetch
queryClient.setQueryData(["snapshot-check-isAddress", rawData], {
...normalizeQuery.data,
result: filteredData,
invalidFound: false, // Since we removed all invalid items
});
}
}, [normalizeQuery.data, queryClient, rawData]);

Comment on lines 167 to 181
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

❓ Verification inconclusive

Heavy queryKey with rawData array may hurt performance on 100k rows

Putting the entire rawData array in the queryKey forces TanStack Query to hash/compare a huge structure, which can be costly. Prefer a compact, stable key and trigger refetches explicitly.

Two options:

  • Simple: use only the length (plus a version) in the key and refetch on rawData changes.
  • Accurate: compute a lightweight hash (e.g., of first/last N rows + length).

Example minimal change:

-    queryKey: ["snapshot-check-isAddress", rawData],
+    queryKey: ["snapshot-check-isAddress", rawData.length],

And trigger refetch when rawData changes:

useEffect(() => {
  if (rawData.length > 0) {
    normalizeQuery.refetch();
  }
}, [rawData.length]); // keep dependency lightweight

#!/bin/bash
set -eux

# Locate the useCsvUpload.ts file
file=$(fd --full-path useCsvUpload.ts)

echo "Inspecting file: $file"

# Show lines around the useQuery declaration to inspect queryKey
rg -n -C3 "useQuery" "$file"

# Show all occurrences of queryKey usage in this file
rg -n "queryKey" "$file"

# Show where rawData is defined or referenced
rg -n "rawData" "$file"

Action Required: Optimize queryKey for Large rawData

Including the full rawData array in your queryKey forces TanStack Query to deep-compare/hash a potentially huge structure on each operation, which can severely impact performance for ~100k-row CSVs.

Affected location:

  • File: apps/dashboard/src/@/hooks/useCsvUpload.ts
  • Lines: 167–181

Suggested changes:

-    // heavy key: deep-compares entire array
-    queryClient.setQueryData(["snapshot-check-isAddress", rawData], {
+    // lightweight key: only uses array length
+    queryClient.setQueryData(["snapshot-check-isAddress", rawData.length], {
       ...normalizeQuery.data,
       result: filteredData,
       invalidFound: false,
     });

And ensure the query refetches when rawData changes:

useEffect(() => {
  if (rawData.length > 0) {
    normalizeQuery.refetch();
  }
}, [rawData.length]); // lightweight dependency

This keeps your cache key compact and stable while still updating the query whenever the CSV payload changes.

🤖 Prompt for AI Agents
In apps/dashboard/src/@/hooks/useCsvUpload.ts around lines 167–181 the TanStack
Query key currently includes the full rawData array which causes expensive deep
hashing for large CSVs; replace the heavy queryKey that contains rawData with a
lightweight stable identifier (for example rawData.length or a short hash) when
calling useQuery and setQueryData, and update setQueryData to use that new
compact key; additionally add a useEffect that triggers normalizeQuery.refetch()
when rawData.length changes (or when the chosen lightweight identifier changes)
so the query refreshes when the CSV payload changes.

const processData = useCallback(
(data: T[]) => {
Expand All @@ -181,5 +200,6 @@ export function useCsvUpload<
removeInvalid,
reset,
setFiles,
normalizeProgress,
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,28 +27,31 @@ export const ClaimerSelection = () => {
setOpenSnapshotIndex: setOpenIndex,
isAdmin,
claimConditionType,
phaseSnapshots,
setPhaseSnapshot,
} = useClaimConditionsFormContext();

const handleClaimerChange = (value: string) => {
const val = value as "any" | "specific" | "overrides";

if (val === "any") {
form.setValue(`phases.${phaseIndex}.snapshot`, undefined);
setPhaseSnapshot(phaseIndex, undefined);
} else {
if (val === "specific") {
form.setValue(`phases.${phaseIndex}.maxClaimablePerWallet`, 0);
}
if (val === "overrides" && field.maxClaimablePerWallet !== 1) {
form.setValue(`phases.${phaseIndex}.maxClaimablePerWallet`, 1);
}
form.setValue(`phases.${phaseIndex}.snapshot`, []);
setPhaseSnapshot(phaseIndex, []);
setOpenIndex(phaseIndex);
}
};

let helperText: React.ReactNode;

const disabledSnapshotButton = isAdmin && formDisabled;
const snapshot = phaseSnapshots[phaseIndex];

if (dropType === "specific") {
helperText = (
Expand Down Expand Up @@ -87,10 +90,7 @@ export const ClaimerSelection = () => {

return (
<FormFieldSetup
errorMessage={
form.getFieldState(`phases.${phaseIndex}.snapshot`, form.formState)
?.error?.message
}
errorMessage={undefined}
helperText={helperText}
label={label}
isRequired={false}
Expand All @@ -117,7 +117,7 @@ export const ClaimerSelection = () => {
)}

{/* Edit or See Snapshot */}
{field.snapshot ? (
{snapshot ? (
<div className="flex items-center gap-3">
{/* disable the "Edit" button when form is disabled, but not when it's a "See" button */}
<Button
Expand All @@ -133,17 +133,16 @@ export const ClaimerSelection = () => {
<div
className={cn(
"flex gap-2 items-center",
field.snapshot?.length === 0
snapshot?.length === 0
? "text-muted-foreground"
: "text-green-600 dark:text-green-500",
disabledSnapshotButton ? "opacity-50" : "",
)}
>
<div className="size-2 bg-current rounded-full" />
<span className="text-sm">
{field.snapshot?.length}{" "}
{field.snapshot?.length === 1 ? "address" : "addresses"} in
snapshot
{snapshot?.length}{" "}
{snapshot?.length === 1 ? "address" : "addresses"} in snapshot
</span>
</div>
</div>
Expand Down
Loading
Loading