-
Notifications
You must be signed in to change notification settings - Fork 619
[BLD-108] Dashboard: Update Claim conditions UI to handle very large snapshots #7846
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,5 +1,4 @@ | ||
| import { useQuery } from "@tanstack/react-query"; | ||
| import pLimit from "p-limit"; | ||
| import { useQuery, useQueryClient } from "@tanstack/react-query"; | ||
| import Papa from "papaparse"; | ||
| import { useCallback, useState } from "react"; | ||
| import { isAddress, type ThirdwebClient, ZERO_ADDRESS } from "thirdweb"; | ||
|
|
@@ -95,6 +94,7 @@ export function useCsvUpload< | |
| // Always gonna need the wallet address | ||
| T extends { address: string }, | ||
| >(props: Props<T>) { | ||
| const queryClient = useQueryClient(); | ||
| const [rawData, setRawData] = useState< | ||
| T[] | Array<T & { [key in string]: unknown }> | ||
| >(props.defaultRawData || []); | ||
|
|
@@ -132,16 +132,29 @@ export function useCsvUpload< | |
| [props.csvParser], | ||
| ); | ||
|
|
||
| const [normalizeProgress, setNormalizeProgress] = useState({ | ||
| total: 0, | ||
| current: 0, | ||
| }); | ||
|
|
||
| const normalizeQuery = useQuery({ | ||
| queryFn: async () => { | ||
| const limit = pLimit(50); | ||
| const results = await Promise.all( | ||
| rawData.map((item) => { | ||
| return limit(() => | ||
| const batchSize = 50; | ||
| const results = []; | ||
| for (let i = 0; i < rawData.length; i += batchSize) { | ||
| const batch = rawData.slice(i, i + batchSize); | ||
| setNormalizeProgress({ | ||
| total: rawData.length, | ||
| current: i, | ||
| }); | ||
| const batchResults = await Promise.all( | ||
| batch.map((item) => | ||
| checkIsAddress({ item: item, thirdwebClient: props.client }), | ||
| ); | ||
| }), | ||
| ); | ||
| ), | ||
| ); | ||
| results.push(...batchResults); | ||
| } | ||
|
|
||
| return { | ||
| invalidFound: !!results.find((item) => !item?.isValid), | ||
| result: processAirdropData(results), | ||
|
|
@@ -153,12 +166,18 @@ export function useCsvUpload< | |
|
|
||
| const removeInvalid = useCallback(() => { | ||
| const filteredData = normalizeQuery.data?.result.filter( | ||
| ({ isValid }) => isValid, | ||
| (d) => d.isValid && d.resolvedAddress !== ZERO_ADDRESS, | ||
| ); | ||
| // double type assertion is save here because we don't really use this variable (only check for its length) | ||
| // Also filteredData's type is the superset of T[] | ||
| setRawData(filteredData as unknown as T[]); | ||
| }, [normalizeQuery.data?.result]); | ||
|
|
||
| if (filteredData && normalizeQuery.data) { | ||
| // Directly update the query result instead of setting new state to avoid triggering refetch | ||
| queryClient.setQueryData(["snapshot-check-isAddress", rawData], { | ||
| ...normalizeQuery.data, | ||
| result: filteredData, | ||
| invalidFound: false, // Since we removed all invalid items | ||
| }); | ||
| } | ||
| }, [normalizeQuery.data, queryClient, rawData]); | ||
|
|
||
|
Comment on lines
167
to
181
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Verification agent ❓ Verification inconclusiveHeavy queryKey with rawData array may hurt performance on 100k rows Putting the entire rawData array in the queryKey forces TanStack Query to hash/compare a huge structure, which can be costly. Prefer a compact, stable key and trigger refetches explicitly. Two options:
Example minimal change: - queryKey: ["snapshot-check-isAddress", rawData],
+ queryKey: ["snapshot-check-isAddress", rawData.length],And trigger refetch when rawData changes: useEffect(() => {
if (rawData.length > 0) {
normalizeQuery.refetch();
}
}, [rawData.length]); // keep dependency lightweight#!/bin/bash
set -eux
# Locate the useCsvUpload.ts file
file=$(fd --full-path useCsvUpload.ts)
echo "Inspecting file: $file"
# Show lines around the useQuery declaration to inspect queryKey
rg -n -C3 "useQuery" "$file"
# Show all occurrences of queryKey usage in this file
rg -n "queryKey" "$file"
# Show where rawData is defined or referenced
rg -n "rawData" "$file"Action Required: Optimize queryKey for Large rawData Including the full Affected location:
Suggested changes: - // heavy key: deep-compares entire array
- queryClient.setQueryData(["snapshot-check-isAddress", rawData], {
+ // lightweight key: only uses array length
+ queryClient.setQueryData(["snapshot-check-isAddress", rawData.length], {
...normalizeQuery.data,
result: filteredData,
invalidFound: false,
});And ensure the query refetches when useEffect(() => {
if (rawData.length > 0) {
normalizeQuery.refetch();
}
}, [rawData.length]); // lightweight dependencyThis keeps your cache key compact and stable while still updating the query whenever the CSV payload changes. 🤖 Prompt for AI Agents |
||
| const processData = useCallback( | ||
| (data: T[]) => { | ||
|
|
@@ -181,5 +200,6 @@ export function useCsvUpload< | |
| removeInvalid, | ||
| reset, | ||
| setFiles, | ||
| normalizeProgress, | ||
| }; | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Reset normalizeProgress on reset to avoid stale progress UI
Currently reset() clears rawData/noCsv but normalizeProgress remains at the last values, which can render misleading progress numbers on subsequent uploads. Initialize a constant and reuse it in both state init and reset.
Follow-up change outside the selected range (add near the hook top and use in reset):
🤖 Prompt for AI Agents