diff --git a/.env.tpl b/.env.tpl index 9bd0bafb58..7346843dfb 100644 --- a/.env.tpl +++ b/.env.tpl @@ -32,10 +32,9 @@ DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlh # Postgres Database DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres -# Cluster -CLUSTER_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 -CLUSTER_SERVICE = -CLUSTER_API_URL = http://127.0.0.1:9094 +# Pickup (can be mocked with ipfs-cluster for local dev) +PICKUP_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 +PICKUP_API_URL = http://127.0.0.1:9094 # Maintenance Mode MAINTENANCE_MODE = rw diff --git a/packages/api/README.md b/packages/api/README.md index bcb9b0f1f7..288ba1ef85 100644 --- a/packages/api/README.md +++ b/packages/api/README.md @@ -66,8 +66,7 @@ wrangler secret put MAGIC_SECRET_KEY --env production # Get from magic.link acco wrangler secret put SALT --env production # open `https://csprng.xyz/v1/api` in the browser and use the value of `Data` wrangler secret put SENTRY_DSN --env USER # Get from Sentry wrangler secret put DATABASE_TOKEN --env production # Get from database account -wrangler secret put CLUSTER_BASIC_AUTH_TOKEN --env production # Get from nft.storage vault in 1password -wrangler secret put CLUSTER_SERVICE --env production # Which cluster should be used. Options 'IpfsCluster' / 'IpfsCluster2' / 'IpfsCluster3' +wrangler secret put PICKUP_BASIC_AUTH_TOKEN --env production # Get from nft.storage vault in 1password wrangler secret put MAILCHIMP_API_KEY --env production # Get from mailchimp wrangler secret put LOGTAIL_TOKEN --env production # Get from Logtail wrangler secret put METAPLEX_AUTH_TOKEN --env production # User ID meteplex endpoint should use (not required for dev) @@ -128,3 +127,12 @@ see: https://github.com/web3-storage/linkdex-api We write Uploaded CARs to both S3 and R2 in parallel. The R2 Bucket is bound to the worker as `env.CARPARK`. The API docs for an R2Bucket instance are here: https://developers.cloudflare.com/r2/runtime-apis/#bucket-method-definitions We key our R2 uploads by CAR CID, and record them in the DB under `upload.backup_urls`. The URL prefix for CARs in R2 is set by the `env.CARPARK_URL`. This is currently pointing to a subdomain on web3.storage which we could configure when we need direct http access to the bucket, but does not exist at time of writing. + +## Pickup + +We use [pickup](https://github.com/web3-storage/pickup) to fetch DAGs from IPFS and save them to a bucket where E-IPFS can index them. It provides a subset of the ipfs-cluster api for `GET /pins` and `POST /pins` that we use as the backend for the [pinning service](https://ipfs.github.io/pinning-services-api-spec/) implementation. + +- `PICKUP_URL` defines the service enpoint to use, and is set in the wrangler.toml. +- `PICKUP_BASIC_AUTH_TOKEN` must be set as a secret in the env. + +For local dev, we use a local ipfs-cluster container for the same service. diff --git a/packages/api/docker/run-with-dependencies.sh b/packages/api/docker/run-with-dependencies.sh index 514eb0b411..bb97e65aaf 100755 --- a/packages/api/docker/run-with-dependencies.sh +++ b/packages/api/docker/run-with-dependencies.sh @@ -82,7 +82,7 @@ export DATABASE_CONNECTION="postgres://postgres:postgres@$DB_HOST_PORT/postgres" # The vars below are used to configure the service export DATABASE_URL="http://$POSTGREST_HOST_PORT" -export CLUSTER_API_URL="http://$CLUSTER_HOST_PORT" +export PICKUP_URL="http://$CLUSTER_HOST_PORT" export S3_ENDPOINT="http://$MINIO_HOST_PORT" echo "services started." @@ -90,7 +90,7 @@ echo "environment overrides:" echo "MINIO_API_PORT=${MINIO_API_PORT}" echo "DATABASE_CONNECTION=${DATABASE_CONNECTION}" echo "DATABASE_URL=${DATABASE_URL}" -echo "CLUSTER_API_URL=${CLUSTER_API_URL}" +echo "PICKUP_URL=${PICKUP_URL}" echo "S3_ENDPOINT=${S3_ENDPOINT}" echo diff --git a/packages/api/src/bindings.d.ts b/packages/api/src/bindings.d.ts index 2ccc992437..8ed098ada0 100644 --- a/packages/api/src/bindings.d.ts +++ b/packages/api/src/bindings.d.ts @@ -55,11 +55,11 @@ export interface ServiceConfiguration { /** UCAN private signing key */ PRIVATE_KEY: string - /** API url for active IPFS cluster endpoint */ - CLUSTER_API_URL: string + /** API url for pickup endpoint */ + PICKUP_URL: string - /** Auth token for IPFS culster */ - CLUSTER_BASIC_AUTH_TOKEN: string + /** Auth token for pickup pinning service */ + PICKUP_BASIC_AUTH_TOKEN: string /** Postgrest endpoint URL */ DATABASE_URL: string diff --git a/packages/api/src/cluster.js b/packages/api/src/cluster.js index f9ac419313..8839e57555 100644 --- a/packages/api/src/cluster.js +++ b/packages/api/src/cluster.js @@ -2,11 +2,12 @@ import { Cluster } from '@nftstorage/ipfs-cluster' import { getServiceConfig } from './config.js' import { HTTPError } from './errors.js' -const { CLUSTER_API_URL, CLUSTER_BASIC_AUTH_TOKEN } = getServiceConfig() +// pickup provides a cluster compatible api for get /pins & post /pins +const { PICKUP_URL, PICKUP_BASIC_AUTH_TOKEN } = getServiceConfig() -const client = new Cluster(CLUSTER_API_URL, { +const client = new Cluster(PICKUP_URL, { headers: { - Authorization: `Basic ${CLUSTER_BASIC_AUTH_TOKEN}`, + Authorization: `Basic ${PICKUP_BASIC_AUTH_TOKEN}`, }, }) @@ -113,3 +114,23 @@ export function toPSAStatus(status) { if (pinInfos.some((i) => i.status === 'pin_queued')) return 'queued' return 'failed' } + +/** + * @param {import('@nftstorage/ipfs-cluster').API.StatusResponse} status + * @returns {import('./utils/db-client.js').definitions["pin"]["status"]} status + */ +export function toDBPinStatus(status) { + const pinInfos = Object.values(status.peerMap) + if (pinInfos.some((i) => i.status === 'pinned')) return 'Pinned' + if (pinInfos.some((i) => i.status === 'pinning')) return 'Pinning' + if (pinInfos.some((i) => i.status === 'pin_queued')) return 'PinQueued' + return 'PinError' +} + +/** + * @param {string} cid + * @param {import("@nftstorage/ipfs-cluster").API.StatusOptions} [options] + */ +export function status(cid, options) { + return client.status(cid, options) +} diff --git a/packages/api/src/config.js b/packages/api/src/config.js index ea46c2de50..f40b17fd37 100644 --- a/packages/api/src/config.js +++ b/packages/api/src/config.js @@ -9,16 +9,6 @@ import { * @typedef {import('./bindings').RuntimeEnvironmentName} RuntimeEnvironmentName */ -/** - * If the CLUSTER_SERVICE variable is set, the service URL will be resolved from here. - * - * @type Record */ -const CLUSTER_SERVICE_URLS = { - IpfsCluster: 'https://nft.storage.ipfscluster.io/api/', - IpfsCluster2: 'https://nft2.storage.ipfscluster.io/api/', - IpfsCluster3: 'https://nft3.storage.ipfscluster.io/api/', -} - /** * Load a {@link ServiceConfiguration} from the global environment. * @returns {ServiceConfiguration} @@ -37,22 +27,6 @@ export const getServiceConfig = () => { * @returns {ServiceConfiguration} */ export function serviceConfigFromVariables(vars) { - let clusterUrl - if (vars.CLUSTER_SERVICE) { - clusterUrl = CLUSTER_SERVICE_URLS[vars.CLUSTER_SERVICE] - if (!clusterUrl) { - throw new Error(`unknown cluster service: ${vars.CLUSTER_SERVICE}`) - } - } - if (vars.CLUSTER_API_URL) { - clusterUrl = vars.CLUSTER_API_URL - } - if (!clusterUrl || (vars.CLUSTER_SERVICE && vars.CLUSTER_API_URL)) { - throw new Error( - `One of CLUSTER_SERVICE or CLUSTER_API_URL must be set in ENV` - ) - } - return { ENV: parseRuntimeEnv(vars.ENV), DEBUG: boolValue(vars.DEBUG), @@ -65,8 +39,8 @@ export function serviceConfigFromVariables(vars) { CARPARK_URL: vars.CARPARK_URL, DATABASE_URL: vars.DATABASE_URL, DATABASE_TOKEN: vars.DATABASE_TOKEN, - CLUSTER_API_URL: clusterUrl, - CLUSTER_BASIC_AUTH_TOKEN: vars.CLUSTER_BASIC_AUTH_TOKEN, + PICKUP_URL: vars.PICKUP_URL, + PICKUP_BASIC_AUTH_TOKEN: vars.PICKUP_BASIC_AUTH_TOKEN, MAGIC_SECRET_KEY: vars.MAGIC_SECRET_KEY, SENTRY_DSN: vars.SENTRY_DSN, METAPLEX_AUTH_TOKEN: vars.METAPLEX_AUTH_TOKEN, @@ -114,6 +88,8 @@ export function loadConfigVariables() { 'DUDEWHERE', 'CARPARK', 'CARPARK_URL', + 'PICKUP_URL', + 'PICKUP_BASIC_AUTH_TOKEN', 'DATABASE_URL', 'DATABASE_TOKEN', 'MAGIC_SECRET_KEY', @@ -122,7 +98,6 @@ export function loadConfigVariables() { 'LOGTAIL_TOKEN', 'PRIVATE_KEY', 'SENTRY_DSN', - 'CLUSTER_BASIC_AUTH_TOKEN', 'MAINTENANCE_MODE', 'S3_REGION', 'S3_ACCESS_KEY_ID', @@ -145,8 +120,6 @@ export function loadConfigVariables() { } const optional = [ - 'CLUSTER_SERVICE', - 'CLUSTER_API_URL', 'LINKDEX_URL', 'S3_ENDPOINT', 'SLACK_USER_REQUEST_WEBHOOK_URL', diff --git a/packages/api/src/routes/pins-add.js b/packages/api/src/routes/pins-add.js index 52acfea840..e1d20cd0fe 100644 --- a/packages/api/src/routes/pins-add.js +++ b/packages/api/src/routes/pins-add.js @@ -51,6 +51,12 @@ export async function pinsAdd(event, ctx) { }) const upload = await db.createUpload({ + pins: [ + { + status: 'PinQueued', + service: 'ElasticIpfs', // via pickup + }, + ], type: 'Remote', content_cid: cid.contentCid, source_cid: cid.sourceCid, diff --git a/packages/api/src/routes/pins-get.js b/packages/api/src/routes/pins-get.js index b7676e1ed1..8077e9fd38 100644 --- a/packages/api/src/routes/pins-get.js +++ b/packages/api/src/routes/pins-get.js @@ -1,4 +1,5 @@ -import { checkAuth, validate } from '../utils/auth.js' +import * as cluster from '../cluster.js' +import { checkAuth } from '../utils/auth.js' import { toPinsResponse } from '../utils/db-transforms.js' import { JSONResponse } from '../utils/json-response.js' import { parseCidPinning } from '../utils/utils.js' @@ -21,7 +22,7 @@ export async function pinsGet(event, ctx) { ) } - const upload = await db.getUpload(cid.sourceCid, user.id) + let upload = await db.getUpload(cid.sourceCid, user.id) if (!upload) { return new JSONResponse( @@ -30,5 +31,19 @@ export async function pinsGet(event, ctx) { ) } + // check if the status has changed upstream + const status = upload.content.pin[0].status + if (status === 'Pinning' || status === 'PinQueued') { + const res = await cluster.status(cid.sourceCid) + const newStatus = cluster.toDBPinStatus(res) + if (status !== newStatus) { + await ctx.db.updatePinStatus(upload.content_cid, { + service: 'ElasticIpfs', + status: newStatus, + }) + upload = (await db.getUpload(cid.sourceCid, user.id)) ?? upload + } + } + return new JSONResponse(toPinsResponse(upload)) } diff --git a/packages/api/src/routes/pins-replace.js b/packages/api/src/routes/pins-replace.js index 53b87fcf70..93d50dca76 100644 --- a/packages/api/src/routes/pins-replace.js +++ b/packages/api/src/routes/pins-replace.js @@ -73,6 +73,12 @@ export async function pinsReplace(event, ctx) { }) const upload = await db.createUpload({ + pins: [ + { + status: 'PinQueued', + service: 'ElasticIpfs', // via pickup + }, + ], type: 'Remote', content_cid: cid.contentCid, source_cid: cid.sourceCid, diff --git a/packages/api/src/utils/router.js b/packages/api/src/utils/router.js index 500f05e3bb..8904d37985 100644 --- a/packages/api/src/utils/router.js +++ b/packages/api/src/utils/router.js @@ -182,8 +182,8 @@ class Router { listen(event) { const url = new URL(event.request.url) // Add more if needed for other backends - const { DATABASE_URL, CLUSTER_API_URL } = getServiceConfig() - const passThrough = [DATABASE_URL, CLUSTER_API_URL] + const { DATABASE_URL, PICKUP_URL } = getServiceConfig() + const passThrough = [DATABASE_URL, PICKUP_URL] // Ignore http requests from the passthrough list above if (!passThrough.includes(`${url.protocol}//${url.host}`)) { diff --git a/packages/api/test/config.spec.js b/packages/api/test/config.spec.js index 533d0bfb7e..7bd7919eca 100644 --- a/packages/api/test/config.spec.js +++ b/packages/api/test/config.spec.js @@ -26,14 +26,13 @@ const BASE_CONFIG = { DATABASE_TOKEN: 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI', DATABASE_CONNECTION: 'postgresql://postgres:postgres@localhost:5432/postgres', - CLUSTER_BASIC_AUTH_TOKEN: 'dGVzdDp0ZXN0', MAINTENANCE_MODE: 'rw', S3_REGION: 'us-east-1', S3_ACCESS_KEY_ID: 'minioadmin', S3_SECRET_ACCESS_KEY: 'minioadmin', S3_BUCKET_NAME: 'dotstorage-dev-0', - CLUSTER_SERVICE: '', - CLUSTER_API_URL: 'http://127.0.0.1:9094', + PICKUP_URL: 'http://127.0.0.1:9094', + PICKUP_BASIC_AUTH_TOKEN: 'dGVzdDp0ZXN0', S3_ENDPOINT: 'http://127.0.0.1:9000', SLACK_USER_REQUEST_WEBHOOK_URL: '', SATNAV: '?', @@ -167,8 +166,8 @@ test.serial( 'SALT', 'METAPLEX_AUTH_TOKEN', 'PRIVATE_KEY', - 'CLUSTER_API_URL', - 'CLUSTER_BASIC_AUTH_TOKEN', + 'PICKUP_URL', + 'PICKUP_BASIC_AUTH_TOKEN', 'DATABASE_URL', 'DATABASE_TOKEN', 'S3_ENDPOINT', diff --git a/packages/api/test/scripts/globals.js b/packages/api/test/scripts/globals.js index b6b0871c5f..e670fe1099 100644 --- a/packages/api/test/scripts/globals.js +++ b/packages/api/test/scripts/globals.js @@ -13,10 +13,9 @@ globalThis.PRIVATE_KEY = 'xmbtWjE9eYuAxae9G65lQSkw36HV6H+0LSFq2aKqVwY=' globalThis.SENTRY_DSN = 'https://test@test.ingest.sentry.io/0000000' globalThis.SLACK_USER_REQUEST_WEBHOOK_URL = 'test' -globalThis.CLUSTER_API_URL = 'http://127.0.0.1:9094' +globalThis.PICKUP_API_URL = 'http://127.0.0.1:9094' // will be used with we can active auth in cluster base64 of test:test -globalThis.CLUSTER_BASIC_AUTH_TOKEN = 'dGVzdDp0ZXN0' -globalThis.CLUSTER_SERVICE = '' +globalThis.PICKUP_BASIC_AUTH_TOKEN = 'dGVzdDp0ZXN0' globalThis.MAINTENANCE_MODE = 'rw' diff --git a/packages/api/test/scripts/helpers.js b/packages/api/test/scripts/helpers.js index 9fd0d04186..8071bb9862 100644 --- a/packages/api/test/scripts/helpers.js +++ b/packages/api/test/scripts/helpers.js @@ -13,8 +13,8 @@ import { getMiniflareContext, getTestServiceConfig } from './test-context.js' * @returns {Cluster} */ export const getCluster = (config) => { - return new Cluster(config.CLUSTER_API_URL, { - headers: { Authorization: `Basic ${config.CLUSTER_BASIC_AUTH_TOKEN}` }, + return new Cluster(config.PICKUP_URL, { + headers: { Authorization: `Basic ${config.PICKUP_BASIC_AUTH_TOKEN}` }, }) } diff --git a/packages/api/test/scripts/test-context.js b/packages/api/test/scripts/test-context.js index 99ce05a7f8..2c0a9188ce 100644 --- a/packages/api/test/scripts/test-context.js +++ b/packages/api/test/scripts/test-context.js @@ -22,7 +22,7 @@ const pkg = JSON.parse( export function makeMiniflare(bindings, fetchMock) { const envPath = path.join(__dirname, '../../../../.env') - const { DATABASE_URL, CLUSTER_API_URL, S3_ENDPOINT } = process.env + const { DATABASE_URL, PICKUP_URL, S3_ENDPOINT } = process.env return new Miniflare({ // Autoload configuration from `.env`, `package.json` and `wrangler.toml` @@ -36,7 +36,7 @@ export function makeMiniflare(bindings, fetchMock) { bindings: { ...bindings, DATABASE_URL, - CLUSTER_API_URL, + PICKUP_URL, S3_ENDPOINT, }, fetchMock, diff --git a/packages/api/wrangler.toml b/packages/api/wrangler.toml index abe16e4533..5ae7ac7a3d 100644 --- a/packages/api/wrangler.toml +++ b/packages/api/wrangler.toml @@ -23,6 +23,7 @@ ENV = "dev" DEBUG = "true" DATABASE_URL = "http://localhost:3000" CARPARK_URL = "https://carpark-dev.web3.storage" +PICKUP_URL = "https://staging.pickup.dag.haus" [build] command = "scripts/cli.js build" @@ -44,6 +45,7 @@ ENV = "staging" DEBUG = "true" DATABASE_URL = "https://nft-storage-pgrest-staging.herokuapp.com" CARPARK_URL = "https://carpark-staging.web3.storage" +PICKUP_URL = "https://staging.pickup.dag.haus" [env.staging.build] command = "scripts/cli.js build --env staging" @@ -69,6 +71,7 @@ ENV = "production" DEBUG = "false" DATABASE_URL = "https://nft-storage-pgrest-prod.herokuapp.com" CARPARK_URL = "https://carpark.web3.storage" +PICKUP_URL = "https://pickup.dag.haus" [env.production.build] command = "scripts/cli.js build --env production"