Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions app/layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import { ThemeProvider } from "@/providers/ThemeProvider";
import { ConfettiProvider } from "@/providers/ConfettiProvider";
import Footer from "@/components/ui/Footer";
import { GeolocationProvider } from "@/providers/GeolocationProvider";

import { SynapseProvider } from "@/providers/SynapseProvider";
const queryClient = new QueryClient();

const config = createConfig({
Expand Down Expand Up @@ -60,11 +60,13 @@ export default function RootLayout({
modalSize="compact"
initialChain={filecoinCalibration.id}
>
<main className="flex flex-col min-h-screen">
<Navbar />
{children}
</main>
<Footer />
<SynapseProvider>
<main className="flex flex-col min-h-screen">
<Navbar />
{children}
</main>
<Footer />
</SynapseProvider>
</RainbowKitProvider>
</WagmiProvider>
</QueryClientProvider>
Expand Down
21 changes: 6 additions & 15 deletions components/DatasetsViewer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ export const DatasetsViewer = () => {
(dataset: DataSet | undefined) =>
dataset && (
<div
key={dataset.railId}
key={dataset.clientDataSetId}
className="bg-gray-50 rounded-lg p-4 border border-gray-200"
>
<div className="flex justify-between items-start mb-4">
Expand Down Expand Up @@ -66,12 +66,13 @@ export const DatasetsViewer = () => {
className="cursor-pointer"
onClick={() => {
navigator.clipboard.writeText(
dataset.provider?.serviceURL || ""
dataset.provider?.products.PDP?.data.serviceURL ||
""
);
window.alert("PDP URL copied to clipboard");
}}
>
{dataset.provider?.serviceURL}
{dataset.provider?.products.PDP?.data.serviceURL}
</span>
</p>
</div>
Expand Down Expand Up @@ -118,11 +119,7 @@ export const DatasetsViewer = () => {
</div>
<div className="space-y-2">
{dataset.data.pieces.map((piece) => (
<PieceDetails
key={piece.pieceId}
piece={piece}
dataset={dataset!}
/>
<PieceDetails key={piece.pieceId} piece={piece} />
))}
</div>
</div>
Expand All @@ -145,13 +142,7 @@ export const DatasetsViewer = () => {
/**
* Component to display a piece and a download button
*/
const PieceDetails = ({
piece,
dataset,
}: {
piece: DataSetPieceData;
dataset: DataSet;
}) => {
const PieceDetails = ({ piece }: { piece: DataSetPieceData }) => {
const filename = `piece-${piece.pieceCid}.png`;
const { downloadMutation } = useDownloadPiece(
piece.pieceCid.toString(),
Expand Down
11 changes: 4 additions & 7 deletions hooks/useBalances.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
import { useQuery } from "@tanstack/react-query";
import { Synapse, TOKENS } from "@filoz/synapse-sdk";
import { useEthersProvider } from "@/hooks/useEthers";
import { TOKENS } from "@filoz/synapse-sdk";
import { useAccount } from "wagmi";
import { calculateStorageMetrics } from "@/utils/calculateStorageMetrics";
import { formatUnits } from "viem";
import { defaultBalances, UseBalancesResponse } from "@/types";
import { useSynapse } from "@/providers/SynapseProvider";

/**
* Hook to fetch and format wallet balances and storage metrics
*/
export const useBalances = () => {
const provider = useEthersProvider();
const { synapse } = useSynapse();
const { address } = useAccount();

const query = useQuery({
enabled: !!address && !!provider,
queryKey: ["balances", address],
queryFn: async (): Promise<UseBalancesResponse> => {
if (!provider) throw new Error("Provider not found");

const synapse = await Synapse.create({ provider });
if (!synapse) throw new Error("Synapse not found");

// Fetch raw balances
const [filRaw, usdfcRaw, paymentsRaw] = await Promise.all([
Expand Down
128 changes: 83 additions & 45 deletions hooks/useDatasets.ts
Original file line number Diff line number Diff line change
@@ -1,100 +1,138 @@
import { WarmStorageService } from "@filoz/synapse-sdk/warm-storage";
import { useEthersSigner } from "@/hooks/useEthers";
"use client";

import { useQuery } from "@tanstack/react-query";
import { EnhancedDataSetInfo, Synapse, PDPServer } from "@filoz/synapse-sdk";
import { EnhancedDataSetInfo, PDPServer } from "@filoz/synapse-sdk";
import { useAccount } from "wagmi";
import { DataSet } from "@/types";
import { useSynapse } from "@/providers/SynapseProvider";

/**
* Hook to fetch and manage datasets
* @returns Query result containing datasets and their details
* Hook to fetch and manage user datasets from Filecoin storage
*
* @description This hook demonstrates a complex data fetching workflow:
* 1. Initialize Synapse and WarmStorage services
* 2. Fetch approved providers and user datasets in parallel
* 3. Map provider relationships and fetch provider details
* 4. Enrich datasets with provider information and PDP data
* 5. Handle errors gracefully while maintaining data integrity
* 6. Implement caching and background refresh strategies
*
* @returns React Query result containing enriched datasets with provider info
*
* @example
* const { data, isLoading, error } = useDatasets();
*
* if (data?.datasets?.length > 0) {
* const firstPieceCid = data.datasets[0]?.data?.pieces[0]?.pieceCid;
* console.log('Flag (First Piece CID):', firstPieceCid);
* }
*/
export const useDatasets = () => {
const signer = useEthersSigner();
const { address } = useAccount();
const { synapse, warmStorageService } = useSynapse();

return useQuery({
enabled: !!address,
queryKey: ["datasets", address],
queryFn: async () => {
if (!signer) throw new Error("Signer not found");
// STEP 1: Validate prerequisites
if (!synapse) throw new Error("Synapse not found");
if (!address) throw new Error("Address not found");
if (!warmStorageService)
throw new Error("Warm storage service not found");
// Initialize WarmStorage service for dataset management

const synapse = await Synapse.create({
signer,
disableNonceManager: false,
});
// STEP 3: Fetch providers and datasets in parallel for efficiency
const [providerIds, datasets] = await Promise.all([
warmStorageService.getApprovedProviderIds(),
warmStorageService.getClientDataSetsWithDetails(address),
]);

// STEP 4: Create provider ID to address mapping from datasets
const providerIdToAddressMap = datasets.reduce((acc, dataset) => {
acc[dataset.providerId] = dataset.payee;
return acc;
}, {} as Record<number, string>);

// Initialize Pandora service
const warmStorageService = new WarmStorageService(
synapse.getProvider(),
synapse.getWarmStorageAddress(),
synapse.getPDPVerifierAddress()
// STEP 5: Fetch provider information with error handling
const providers = await Promise.all(
providerIds.map(async (providerId) => {
const providerAddress = providerIdToAddressMap[providerId];
if (!providerAddress) {
return null; // Skip if no address mapping exists
}
try {
return await synapse.getProviderInfo(providerId);
} catch (error) {
console.warn(`Failed to fetch provider ${providerId}:`, error);
return null; // Continue with other providers
}
})
);

// Fetch providers and datasets in parallel
const [providers, datasets] = await Promise.all([
warmStorageService.getAllApprovedProviders(),
warmStorageService.getClientDataSetsWithDetails(address),
]);
// Filter out failed provider requests
const filteredProviders = providers.filter(
(provider) => provider !== null
);

// Create a map of provider URLs for quick lookup
const providerUrlMap = new Map(
providers.map((provider) => [
provider.serviceProvider.toLowerCase(),
provider.serviceURL,
])
// STEP 6: Create provider ID to service URL mapping
const providerIdToServiceUrlMap = filteredProviders.reduce(
(acc, provider) => {
acc[provider.id] = provider.products.PDP?.data.serviceURL || "";
return acc;
},
{} as Record<string, string>
);

// Fetch dataset details in parallel with proper error handling
// STEP 7: Fetch detailed dataset information with PDP data
const datasetDetailsPromises = datasets.map(
async (dataset: EnhancedDataSetInfo) => {
const serviceURL = providerUrlMap.get(dataset.payee.toLowerCase());
// Find the full provider details
const provider = providers.find(
(p) =>
p.serviceProvider.toLowerCase() === dataset.payee.toLowerCase()
const serviceURL = providerIdToServiceUrlMap[dataset.providerId];
const provider = filteredProviders.find(
(p) => p.id === dataset.providerId
);

try {
// Connect to PDP server to get piece information
const pdpServer = new PDPServer(null, serviceURL || "");
const data = await pdpServer.getDataSet(
dataset.pdpVerifierDataSetId
);

return {
...dataset,
provider: provider,
data,
serviceURL: serviceURL,
data, // Contains pieces array with CIDs
} as DataSet;
} catch (error) {
console.error(
"Error getting dataset data for dataset : ",
dataset.pdpVerifierDataSetId,
console.warn(
`Failed to fetch dataset details for ${dataset.pdpVerifierDataSetId}:`,
error
);
// Return dataset without detailed data but preserve basic info
return {
...dataset,
} as DataSet;
provider: provider,
serviceURL: serviceURL,
} as unknown as DataSet;
}
}
);

// STEP 8: Wait for all dataset details to resolve
const datasetDataResults = await Promise.all(datasetDetailsPromises);

// Combine datasets with their details
// STEP 9: Map results back to original dataset order
const datasetsWithDetails = datasets.map((dataset) => {
const dataResult = datasetDataResults.find(
(result) =>
result.pdpVerifierDataSetId === dataset.pdpVerifierDataSetId
);
return dataResult;
});

return { datasets: datasetsWithDetails };
},
retry: false,
gcTime: 2 * 60 * 1000,
refetchInterval: 2 * 60 * 1000,
refetchOnWindowFocus: false,
refetchOnMount: false,
refetchOnReconnect: false,
});
};
23 changes: 6 additions & 17 deletions hooks/useDownloadPiece.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,18 @@
import { useMutation } from "@tanstack/react-query";
import { Synapse } from "@filoz/synapse-sdk";
import { useEthersSigner } from "@/hooks/useEthers";
import { useAccount } from "wagmi";
import { config } from "@/config";
import { useSynapse } from "@/providers/SynapseProvider";

/**
* Hook to download a piece from the Filecoin network using Synapse.
*/
export const useDownloadPiece = (commp: string, filename: string) => {
const signer = useEthersSigner();
const { address, chainId } = useAccount();
const { synapse } = useSynapse();

const mutation = useMutation({
mutationKey: ["download-piece", address, commp, filename],
// Keep keys serializable to avoid circular JSON errors
mutationKey: ["download-piece", commp, filename],
mutationFn: async () => {
if (!signer) throw new Error("Signer not found");
if (!address) throw new Error("Address not found");
if (!chainId) throw new Error("Chain ID not found");

// 1) Create Synapse instance
const synapse = await Synapse.create({
provider: signer.provider,
withCDN: config.withCDN,
});
if (!synapse) throw new Error("Synapse not found");

// 2) Download file
const uint8ArrayBytes = await synapse.storage.download(commp);

const file = new File([uint8ArrayBytes as BlobPart], filename);
Expand Down
26 changes: 8 additions & 18 deletions hooks/useFileUpload.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import { useState } from "react";
import { useMutation } from "@tanstack/react-query";
import { Synapse } from "@filoz/synapse-sdk";
import { useEthersSigner } from "@/hooks/useEthers";
import { useConfetti } from "@/hooks/useConfetti";
import { useAccount } from "wagmi";
import { preflightCheck } from "@/utils/preflightCheck";
import { getDataset } from "@/utils/getDataset";
import { config } from "@/config";
import { useSynapse } from "@/providers/SynapseProvider";
import { Synapse } from "@filoz/synapse-sdk";

export type UploadedInfo = {
fileName?: string;
Expand All @@ -22,16 +20,14 @@ export const useFileUpload = () => {
const [progress, setProgress] = useState(0);
const [status, setStatus] = useState("");
const [uploadedInfo, setUploadedInfo] = useState<UploadedInfo | null>(null);

const signer = useEthersSigner();
const { synapse } = useSynapse();
const { triggerConfetti } = useConfetti();
const { address, chainId } = useAccount();
const { address } = useAccount();
const mutation = useMutation({
mutationKey: ["file-upload", address, chainId],
mutationKey: ["file-upload", address],
mutationFn: async (file: File) => {
if (!signer) throw new Error("Signer not found");
if (!synapse) throw new Error("Synapse not found");
if (!address) throw new Error("Address not found");
if (!chainId) throw new Error("Chain ID not found");
setProgress(0);
setUploadedInfo(null);
setStatus("🔄 Initializing file upload to Filecoin...");
Expand All @@ -42,16 +38,11 @@ export const useFileUpload = () => {
const uint8ArrayBytes = new Uint8Array(arrayBuffer);

// 3) Create Synapse instance
const synapse = await Synapse.create({
signer,
disableNonceManager: false,
withCDN: config.withCDN,
});

// 4) Get dataset
const { providerId } = await getDataset(synapse, address);
const datasets = await synapse.storage.findDataSets(address);
// 5) Check if we have a dataset
const datasetExists = !!providerId;
const datasetExists = datasets.length > 0;
// Include proofset creation fee if no proofset exists
const includeDatasetCreationFee = !datasetExists;

Expand All @@ -71,7 +62,6 @@ export const useFileUpload = () => {

// 7) Create storage service
const storageService = await synapse.createStorage({
providerId,
callbacks: {
onDataSetResolved: (info) => {
console.log("Dataset resolved:", info);
Expand Down
Loading