From a127fe64790ff7db6420bb8d4ccd95fe915dddda Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Thu, 9 Feb 2023 11:21:26 +0000 Subject: [PATCH 1/5] feat: use pickup to fetch pinning service pins pickup pulls dags into E-IPFS, so this PR updates the api and cron jobs to reflect that in our db. - Update the pinning service add and update api routes to record `ElasticIpfs` as the service that the pin is queued on. - Update pins and pins-failed cron jobs to check pin status in pickup instead of cluster Fixes #2309 TODO - [ ] add `PICKUP_BASIC_AUTH_TOKEN` as a secret to the repo for cron jobs. - [ ] update `CLUSTER-*` api env vars. License: MIT Signed-off-by: Oli Evans --- .github/workflows/cron-pins-failed.yml | 5 +++++ .github/workflows/cron-pins.yml | 7 +++++++ packages/api/src/routes/pins-add.js | 6 ++++++ packages/api/src/routes/pins-replace.js | 6 ++++++ packages/cron/src/bin/pins-failed.js | 18 ++++++++++++++++-- packages/cron/src/bin/pins.js | 11 +++++++++-- packages/cron/src/jobs/pins.js | 12 ++++++++---- packages/cron/src/lib/utils.js | 15 +++++++++++++++ 8 files changed, 72 insertions(+), 8 deletions(-) diff --git a/.github/workflows/cron-pins-failed.yml b/.github/workflows/cron-pins-failed.yml index 703148d796..1f90d60047 100644 --- a/.github/workflows/cron-pins-failed.yml +++ b/.github/workflows/cron-pins-failed.yml @@ -17,6 +17,9 @@ jobs: strategy: matrix: env: ['production'] + include: + - env: production + pickup_url: http://pickup.dag.haus steps: - uses: actions/checkout@v2 with: @@ -41,5 +44,7 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} AFTER: ${{ github.event.inputs.after }} run: yarn --cwd packages/cron start:pins-failed diff --git a/.github/workflows/cron-pins.yml b/.github/workflows/cron-pins.yml index 1cd02f1999..9986b9111d 100644 --- a/.github/workflows/cron-pins.yml +++ b/.github/workflows/cron-pins.yml @@ -12,6 +12,11 @@ jobs: strategy: matrix: env: ['staging', 'production'] + include: + - env: production + pickup_url: http://pickup.dag.haus + - env: staging + pickup_url: http://staging.pickup.dag.haus timeout-minutes: 60 steps: - uses: actions/checkout@v2 @@ -37,6 +42,8 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} + PICKUP_URL: ${{ matrix.pickup_url }} + PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} run: yarn --cwd packages/cron start:pins - name: Heartbeat if: ${{ success() }} diff --git a/packages/api/src/routes/pins-add.js b/packages/api/src/routes/pins-add.js index 52acfea840..e1d20cd0fe 100644 --- a/packages/api/src/routes/pins-add.js +++ b/packages/api/src/routes/pins-add.js @@ -51,6 +51,12 @@ export async function pinsAdd(event, ctx) { }) const upload = await db.createUpload({ + pins: [ + { + status: 'PinQueued', + service: 'ElasticIpfs', // via pickup + }, + ], type: 'Remote', content_cid: cid.contentCid, source_cid: cid.sourceCid, diff --git a/packages/api/src/routes/pins-replace.js b/packages/api/src/routes/pins-replace.js index 53b87fcf70..93d50dca76 100644 --- a/packages/api/src/routes/pins-replace.js +++ b/packages/api/src/routes/pins-replace.js @@ -73,6 +73,12 @@ export async function pinsReplace(event, ctx) { }) const upload = await db.createUpload({ + pins: [ + { + status: 'PinQueued', + service: 'ElasticIpfs', // via pickup + }, + ], type: 'Remote', content_cid: cid.contentCid, source_cid: cid.sourceCid, diff --git a/packages/cron/src/bin/pins-failed.js b/packages/cron/src/bin/pins-failed.js index f63f701efa..1618e814ab 100644 --- a/packages/cron/src/bin/pins-failed.js +++ b/packages/cron/src/bin/pins-failed.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { checkFailedPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -21,11 +27,19 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) const after = process.env.AFTER ? new Date(process.env.AFTER) : oneMonthAgo() - await checkFailedPinStatuses({ pg, cluster1, cluster2, cluster3, after }) + await checkFailedPinStatuses({ + pg, + cluster1, + cluster2, + cluster3, + pickup, + after, + }) } finally { await pg.end() } diff --git a/packages/cron/src/bin/pins.js b/packages/cron/src/bin/pins.js index 3e245f5441..72f62ce51e 100755 --- a/packages/cron/src/bin/pins.js +++ b/packages/cron/src/bin/pins.js @@ -5,7 +5,13 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { updatePendingPinStatuses } from '../jobs/pins.js' -import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' +import { + getPg, + getCluster1, + getCluster2, + getCluster3, + getPickup, +} from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -18,8 +24,9 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) + const pickup = getPickup(process.env) - await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3 }) + await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3, pickup }) } finally { await pg.end() } diff --git a/packages/cron/src/jobs/pins.js b/packages/cron/src/jobs/pins.js index 5d910a6ab4..dbafe10eab 100644 --- a/packages/cron/src/jobs/pins.js +++ b/packages/cron/src/jobs/pins.js @@ -8,7 +8,11 @@ const CONCURRENCY = 5 * http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers */ const MAX_CLUSTER_STATUS_CIDS = 120 -const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] +/** + * @typedef {import('../../../api/src/utils/db-types').definitions} definitions + * @type Array + **/ +const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3', 'ElasticIpfs'] /** * @typedef {import('pg').Client} Client @@ -17,8 +21,8 @@ const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] * cluster1: import('@nftstorage/ipfs-cluster').Cluster * cluster2: import('@nftstorage/ipfs-cluster').Cluster * cluster3: import('@nftstorage/ipfs-cluster').Cluster + * pickup: import('@nftstorage/ipfs-cluster').Cluster * }} Config - * @typedef {import('../../../api/src/utils/db-types').definitions} definitions * @typedef {Pick & { source_cid: string }} Pin * @typedef {import('@supabase/postgrest-js').PostgrestQueryBuilder} PinQuery */ @@ -145,7 +149,7 @@ UPDATE pin AS p * }} config */ async function updatePinStatuses(config) { - const { countPins, fetchPins, pg, cluster3 } = config + const { countPins, fetchPins, pg, pickup } = config if (!log.enabled) { console.log('ℹ️ Enable logging by setting DEBUG=pins:updatePinStatuses') } @@ -182,7 +186,7 @@ async function updatePinStatuses(config) { /** @type {Pin[]} */ const updatedPins = [] const cids = pins.map((p) => p.source_cid) - const statuses = await cluster3.statusAll({ cids }) + const statuses = await pickup.statusAll({ cids }) const statusByCid = Object.fromEntries(statuses.map((s) => [s.cid, s])) for (const pin of pins) { diff --git a/packages/cron/src/lib/utils.js b/packages/cron/src/lib/utils.js index 8eb6ebe76e..fb8161fee1 100644 --- a/packages/cron/src/lib/utils.js +++ b/packages/cron/src/lib/utils.js @@ -44,6 +44,21 @@ export function getCluster3(env) { }) } +/** + * Create a new IPFS Cluster instance from the passed environment variables. + * @param {Record} env + */ +export function getPickup(env) { + const pickupUrl = env.PICKUP_URL + if (!pickupUrl) throw new Error('PICKUP_URL must be set in env') + const basicAuthToken = env.PICKUP_BASIC_AUTH_TOKEN + if (!basicAuthToken) + throw new Error('PICKUP_BASIC_AUTH_TOKEN must be set in env') + return new Cluster(pickupUrl, { + headers: { authorization: `Basic ${basicAuthToken}` }, + }) +} + /** * Create a new DBClient instance from the passed environment variables. * @param {Record} env From 864a8876eb2fdad087d59117c78a68f80ba4a3b8 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 7 Mar 2023 12:27:35 +0000 Subject: [PATCH 2/5] feat: update status if changed on pickup License: MIT Signed-off-by: Oli Evans --- packages/api/src/cluster.js | 20 ++++++++++++++++++++ packages/api/src/routes/pins-get.js | 19 +++++++++++++++++-- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/packages/api/src/cluster.js b/packages/api/src/cluster.js index f9ac419313..06457cad5f 100644 --- a/packages/api/src/cluster.js +++ b/packages/api/src/cluster.js @@ -113,3 +113,23 @@ export function toPSAStatus(status) { if (pinInfos.some((i) => i.status === 'pin_queued')) return 'queued' return 'failed' } + +/** + * @param {import('@nftstorage/ipfs-cluster').API.StatusResponse} status + * @returns {import('./utils/db-client.js').definitions["pin"]["status"]} status + */ +export function toDBPinStatus(status) { + const pinInfos = Object.values(status.peerMap) + if (pinInfos.some((i) => i.status === 'pinned')) return 'Pinned' + if (pinInfos.some((i) => i.status === 'pinning')) return 'Pinning' + if (pinInfos.some((i) => i.status === 'pin_queued')) return 'PinQueued' + return 'PinError' +} + +/** + * @param {string} cid + * @param {import("@nftstorage/ipfs-cluster").API.StatusOptions} [options] + */ +export function status(cid, options) { + return client.status(cid, options) +} diff --git a/packages/api/src/routes/pins-get.js b/packages/api/src/routes/pins-get.js index b7676e1ed1..8077e9fd38 100644 --- a/packages/api/src/routes/pins-get.js +++ b/packages/api/src/routes/pins-get.js @@ -1,4 +1,5 @@ -import { checkAuth, validate } from '../utils/auth.js' +import * as cluster from '../cluster.js' +import { checkAuth } from '../utils/auth.js' import { toPinsResponse } from '../utils/db-transforms.js' import { JSONResponse } from '../utils/json-response.js' import { parseCidPinning } from '../utils/utils.js' @@ -21,7 +22,7 @@ export async function pinsGet(event, ctx) { ) } - const upload = await db.getUpload(cid.sourceCid, user.id) + let upload = await db.getUpload(cid.sourceCid, user.id) if (!upload) { return new JSONResponse( @@ -30,5 +31,19 @@ export async function pinsGet(event, ctx) { ) } + // check if the status has changed upstream + const status = upload.content.pin[0].status + if (status === 'Pinning' || status === 'PinQueued') { + const res = await cluster.status(cid.sourceCid) + const newStatus = cluster.toDBPinStatus(res) + if (status !== newStatus) { + await ctx.db.updatePinStatus(upload.content_cid, { + service: 'ElasticIpfs', + status: newStatus, + }) + upload = (await db.getUpload(cid.sourceCid, user.id)) ?? upload + } + } + return new JSONResponse(toPinsResponse(upload)) } From 990747236ca01e793dba65ff18d118c5ad4e680a Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Tue, 7 Mar 2023 12:48:02 +0000 Subject: [PATCH 3/5] chore: extract cron changes cron changes now in https://github.com/nftstorage/nft.storage/pull/2339 License: MIT Signed-off-by: Oli Evans --- .github/workflows/cron-pins-failed.yml | 5 ----- .github/workflows/cron-pins.yml | 7 ------- packages/cron/src/bin/pins-failed.js | 18 ++---------------- packages/cron/src/bin/pins.js | 11 ++--------- packages/cron/src/jobs/pins.js | 12 ++++-------- packages/cron/src/lib/utils.js | 15 --------------- 6 files changed, 8 insertions(+), 60 deletions(-) diff --git a/.github/workflows/cron-pins-failed.yml b/.github/workflows/cron-pins-failed.yml index 1f90d60047..703148d796 100644 --- a/.github/workflows/cron-pins-failed.yml +++ b/.github/workflows/cron-pins-failed.yml @@ -17,9 +17,6 @@ jobs: strategy: matrix: env: ['production'] - include: - - env: production - pickup_url: http://pickup.dag.haus steps: - uses: actions/checkout@v2 with: @@ -44,7 +41,5 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} - PICKUP_URL: ${{ matrix.pickup_url }} - PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} AFTER: ${{ github.event.inputs.after }} run: yarn --cwd packages/cron start:pins-failed diff --git a/.github/workflows/cron-pins.yml b/.github/workflows/cron-pins.yml index 9986b9111d..1cd02f1999 100644 --- a/.github/workflows/cron-pins.yml +++ b/.github/workflows/cron-pins.yml @@ -12,11 +12,6 @@ jobs: strategy: matrix: env: ['staging', 'production'] - include: - - env: production - pickup_url: http://pickup.dag.haus - - env: staging - pickup_url: http://staging.pickup.dag.haus timeout-minutes: 60 steps: - uses: actions/checkout@v2 @@ -42,8 +37,6 @@ jobs: CLUSTER2_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER2_BASIC_AUTH_TOKEN }} CLUSTER3_API_URL: ${{ secrets.CLUSTER3_API_URL }} CLUSTER3_BASIC_AUTH_TOKEN: ${{ secrets.CLUSTER3_BASIC_AUTH_TOKEN }} - PICKUP_URL: ${{ matrix.pickup_url }} - PICKUP_BASIC_AUTH_TOKEN: ${{ secrets.PICKUP_BASIC_AUTH_TOKEN }} run: yarn --cwd packages/cron start:pins - name: Heartbeat if: ${{ success() }} diff --git a/packages/cron/src/bin/pins-failed.js b/packages/cron/src/bin/pins-failed.js index 1618e814ab..f63f701efa 100644 --- a/packages/cron/src/bin/pins-failed.js +++ b/packages/cron/src/bin/pins-failed.js @@ -5,13 +5,7 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { checkFailedPinStatuses } from '../jobs/pins.js' -import { - getPg, - getCluster1, - getCluster2, - getCluster3, - getPickup, -} from '../lib/utils.js' +import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -27,19 +21,11 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) - const pickup = getPickup(process.env) const after = process.env.AFTER ? new Date(process.env.AFTER) : oneMonthAgo() - await checkFailedPinStatuses({ - pg, - cluster1, - cluster2, - cluster3, - pickup, - after, - }) + await checkFailedPinStatuses({ pg, cluster1, cluster2, cluster3, after }) } finally { await pg.end() } diff --git a/packages/cron/src/bin/pins.js b/packages/cron/src/bin/pins.js index 72f62ce51e..3e245f5441 100755 --- a/packages/cron/src/bin/pins.js +++ b/packages/cron/src/bin/pins.js @@ -5,13 +5,7 @@ import { fileURLToPath } from 'url' import dotenv from 'dotenv' import fetch from '@web-std/fetch' import { updatePendingPinStatuses } from '../jobs/pins.js' -import { - getPg, - getCluster1, - getCluster2, - getCluster3, - getPickup, -} from '../lib/utils.js' +import { getPg, getCluster1, getCluster2, getCluster3 } from '../lib/utils.js' const __dirname = path.dirname(fileURLToPath(import.meta.url)) global.fetch = fetch @@ -24,9 +18,8 @@ async function main() { const cluster1 = getCluster1(process.env) const cluster2 = getCluster2(process.env) const cluster3 = getCluster3(process.env) - const pickup = getPickup(process.env) - await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3, pickup }) + await updatePendingPinStatuses({ pg, cluster1, cluster2, cluster3 }) } finally { await pg.end() } diff --git a/packages/cron/src/jobs/pins.js b/packages/cron/src/jobs/pins.js index dbafe10eab..5d910a6ab4 100644 --- a/packages/cron/src/jobs/pins.js +++ b/packages/cron/src/jobs/pins.js @@ -8,11 +8,7 @@ const CONCURRENCY = 5 * http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers */ const MAX_CLUSTER_STATUS_CIDS = 120 -/** - * @typedef {import('../../../api/src/utils/db-types').definitions} definitions - * @type Array - **/ -const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3', 'ElasticIpfs'] +const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3'] /** * @typedef {import('pg').Client} Client @@ -21,8 +17,8 @@ const CLUSTERS = ['IpfsCluster', 'IpfsCluster2', 'IpfsCluster3', 'ElasticIpfs'] * cluster1: import('@nftstorage/ipfs-cluster').Cluster * cluster2: import('@nftstorage/ipfs-cluster').Cluster * cluster3: import('@nftstorage/ipfs-cluster').Cluster - * pickup: import('@nftstorage/ipfs-cluster').Cluster * }} Config + * @typedef {import('../../../api/src/utils/db-types').definitions} definitions * @typedef {Pick & { source_cid: string }} Pin * @typedef {import('@supabase/postgrest-js').PostgrestQueryBuilder} PinQuery */ @@ -149,7 +145,7 @@ UPDATE pin AS p * }} config */ async function updatePinStatuses(config) { - const { countPins, fetchPins, pg, pickup } = config + const { countPins, fetchPins, pg, cluster3 } = config if (!log.enabled) { console.log('ℹ️ Enable logging by setting DEBUG=pins:updatePinStatuses') } @@ -186,7 +182,7 @@ async function updatePinStatuses(config) { /** @type {Pin[]} */ const updatedPins = [] const cids = pins.map((p) => p.source_cid) - const statuses = await pickup.statusAll({ cids }) + const statuses = await cluster3.statusAll({ cids }) const statusByCid = Object.fromEntries(statuses.map((s) => [s.cid, s])) for (const pin of pins) { diff --git a/packages/cron/src/lib/utils.js b/packages/cron/src/lib/utils.js index fb8161fee1..8eb6ebe76e 100644 --- a/packages/cron/src/lib/utils.js +++ b/packages/cron/src/lib/utils.js @@ -44,21 +44,6 @@ export function getCluster3(env) { }) } -/** - * Create a new IPFS Cluster instance from the passed environment variables. - * @param {Record} env - */ -export function getPickup(env) { - const pickupUrl = env.PICKUP_URL - if (!pickupUrl) throw new Error('PICKUP_URL must be set in env') - const basicAuthToken = env.PICKUP_BASIC_AUTH_TOKEN - if (!basicAuthToken) - throw new Error('PICKUP_BASIC_AUTH_TOKEN must be set in env') - return new Cluster(pickupUrl, { - headers: { authorization: `Basic ${basicAuthToken}` }, - }) -} - /** * Create a new DBClient instance from the passed environment variables. * @param {Record} env From afb2ca250d3bbbd0eb025b2eabcbb24268c78fde Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Wed, 22 Mar 2023 12:02:06 +0000 Subject: [PATCH 4/5] chore: create new env var for pickup so we can set them in advance of deploy, and have the new code make use of them when it goes live. License: MIT Signed-off-by: Oli Evans --- packages/api/src/bindings.d.ts | 8 ++++---- packages/api/src/cluster.js | 7 ++++--- packages/api/src/config.js | 34 ++++------------------------------ packages/api/wrangler.toml | 3 +++ 4 files changed, 15 insertions(+), 37 deletions(-) diff --git a/packages/api/src/bindings.d.ts b/packages/api/src/bindings.d.ts index 2ccc992437..8ed098ada0 100644 --- a/packages/api/src/bindings.d.ts +++ b/packages/api/src/bindings.d.ts @@ -55,11 +55,11 @@ export interface ServiceConfiguration { /** UCAN private signing key */ PRIVATE_KEY: string - /** API url for active IPFS cluster endpoint */ - CLUSTER_API_URL: string + /** API url for pickup endpoint */ + PICKUP_URL: string - /** Auth token for IPFS culster */ - CLUSTER_BASIC_AUTH_TOKEN: string + /** Auth token for pickup pinning service */ + PICKUP_BASIC_AUTH_TOKEN: string /** Postgrest endpoint URL */ DATABASE_URL: string diff --git a/packages/api/src/cluster.js b/packages/api/src/cluster.js index 06457cad5f..8839e57555 100644 --- a/packages/api/src/cluster.js +++ b/packages/api/src/cluster.js @@ -2,11 +2,12 @@ import { Cluster } from '@nftstorage/ipfs-cluster' import { getServiceConfig } from './config.js' import { HTTPError } from './errors.js' -const { CLUSTER_API_URL, CLUSTER_BASIC_AUTH_TOKEN } = getServiceConfig() +// pickup provides a cluster compatible api for get /pins & post /pins +const { PICKUP_URL, PICKUP_BASIC_AUTH_TOKEN } = getServiceConfig() -const client = new Cluster(CLUSTER_API_URL, { +const client = new Cluster(PICKUP_URL, { headers: { - Authorization: `Basic ${CLUSTER_BASIC_AUTH_TOKEN}`, + Authorization: `Basic ${PICKUP_BASIC_AUTH_TOKEN}`, }, }) diff --git a/packages/api/src/config.js b/packages/api/src/config.js index ea46c2de50..f559e67dd0 100644 --- a/packages/api/src/config.js +++ b/packages/api/src/config.js @@ -9,16 +9,6 @@ import { * @typedef {import('./bindings').RuntimeEnvironmentName} RuntimeEnvironmentName */ -/** - * If the CLUSTER_SERVICE variable is set, the service URL will be resolved from here. - * - * @type Record */ -const CLUSTER_SERVICE_URLS = { - IpfsCluster: 'https://nft.storage.ipfscluster.io/api/', - IpfsCluster2: 'https://nft2.storage.ipfscluster.io/api/', - IpfsCluster3: 'https://nft3.storage.ipfscluster.io/api/', -} - /** * Load a {@link ServiceConfiguration} from the global environment. * @returns {ServiceConfiguration} @@ -37,22 +27,6 @@ export const getServiceConfig = () => { * @returns {ServiceConfiguration} */ export function serviceConfigFromVariables(vars) { - let clusterUrl - if (vars.CLUSTER_SERVICE) { - clusterUrl = CLUSTER_SERVICE_URLS[vars.CLUSTER_SERVICE] - if (!clusterUrl) { - throw new Error(`unknown cluster service: ${vars.CLUSTER_SERVICE}`) - } - } - if (vars.CLUSTER_API_URL) { - clusterUrl = vars.CLUSTER_API_URL - } - if (!clusterUrl || (vars.CLUSTER_SERVICE && vars.CLUSTER_API_URL)) { - throw new Error( - `One of CLUSTER_SERVICE or CLUSTER_API_URL must be set in ENV` - ) - } - return { ENV: parseRuntimeEnv(vars.ENV), DEBUG: boolValue(vars.DEBUG), @@ -65,8 +39,8 @@ export function serviceConfigFromVariables(vars) { CARPARK_URL: vars.CARPARK_URL, DATABASE_URL: vars.DATABASE_URL, DATABASE_TOKEN: vars.DATABASE_TOKEN, - CLUSTER_API_URL: clusterUrl, - CLUSTER_BASIC_AUTH_TOKEN: vars.CLUSTER_BASIC_AUTH_TOKEN, + PICKUP_URL: vars.PICKUP_URL, + PICKUP_BASIC_AUTH_TOKEN: vars.PICKUP_BASIC_AUTH_TOKEN, MAGIC_SECRET_KEY: vars.MAGIC_SECRET_KEY, SENTRY_DSN: vars.SENTRY_DSN, METAPLEX_AUTH_TOKEN: vars.METAPLEX_AUTH_TOKEN, @@ -114,6 +88,8 @@ export function loadConfigVariables() { 'DUDEWHERE', 'CARPARK', 'CARPARK_URL', + 'PICKUP_URL', + 'PICKUP_BASIC_AUTH_TOKEN', 'DATABASE_URL', 'DATABASE_TOKEN', 'MAGIC_SECRET_KEY', @@ -145,8 +121,6 @@ export function loadConfigVariables() { } const optional = [ - 'CLUSTER_SERVICE', - 'CLUSTER_API_URL', 'LINKDEX_URL', 'S3_ENDPOINT', 'SLACK_USER_REQUEST_WEBHOOK_URL', diff --git a/packages/api/wrangler.toml b/packages/api/wrangler.toml index abe16e4533..5ae7ac7a3d 100644 --- a/packages/api/wrangler.toml +++ b/packages/api/wrangler.toml @@ -23,6 +23,7 @@ ENV = "dev" DEBUG = "true" DATABASE_URL = "http://localhost:3000" CARPARK_URL = "https://carpark-dev.web3.storage" +PICKUP_URL = "https://staging.pickup.dag.haus" [build] command = "scripts/cli.js build" @@ -44,6 +45,7 @@ ENV = "staging" DEBUG = "true" DATABASE_URL = "https://nft-storage-pgrest-staging.herokuapp.com" CARPARK_URL = "https://carpark-staging.web3.storage" +PICKUP_URL = "https://staging.pickup.dag.haus" [env.staging.build] command = "scripts/cli.js build --env staging" @@ -69,6 +71,7 @@ ENV = "production" DEBUG = "false" DATABASE_URL = "https://nft-storage-pgrest-prod.herokuapp.com" CARPARK_URL = "https://carpark.web3.storage" +PICKUP_URL = "https://pickup.dag.haus" [env.production.build] command = "scripts/cli.js build --env production" From efa19ee33ccb3e34c1530e56570b04d35dcae2e2 Mon Sep 17 00:00:00 2001 From: Oli Evans Date: Wed, 22 Mar 2023 12:30:06 +0000 Subject: [PATCH 5/5] chore: update tests License: MIT Signed-off-by: Oli Evans --- .env.tpl | 7 +++---- packages/api/README.md | 12 ++++++++++-- packages/api/docker/run-with-dependencies.sh | 4 ++-- packages/api/src/config.js | 1 - packages/api/src/utils/router.js | 4 ++-- packages/api/test/config.spec.js | 9 ++++----- packages/api/test/scripts/globals.js | 5 ++--- packages/api/test/scripts/helpers.js | 4 ++-- packages/api/test/scripts/test-context.js | 4 ++-- 9 files changed, 27 insertions(+), 23 deletions(-) diff --git a/.env.tpl b/.env.tpl index 9bd0bafb58..7346843dfb 100644 --- a/.env.tpl +++ b/.env.tpl @@ -32,10 +32,9 @@ DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlh # Postgres Database DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres -# Cluster -CLUSTER_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 -CLUSTER_SERVICE = -CLUSTER_API_URL = http://127.0.0.1:9094 +# Pickup (can be mocked with ipfs-cluster for local dev) +PICKUP_BASIC_AUTH_TOKEN = dGVzdDp0ZXN0 +PICKUP_API_URL = http://127.0.0.1:9094 # Maintenance Mode MAINTENANCE_MODE = rw diff --git a/packages/api/README.md b/packages/api/README.md index bcb9b0f1f7..288ba1ef85 100644 --- a/packages/api/README.md +++ b/packages/api/README.md @@ -66,8 +66,7 @@ wrangler secret put MAGIC_SECRET_KEY --env production # Get from magic.link acco wrangler secret put SALT --env production # open `https://csprng.xyz/v1/api` in the browser and use the value of `Data` wrangler secret put SENTRY_DSN --env USER # Get from Sentry wrangler secret put DATABASE_TOKEN --env production # Get from database account -wrangler secret put CLUSTER_BASIC_AUTH_TOKEN --env production # Get from nft.storage vault in 1password -wrangler secret put CLUSTER_SERVICE --env production # Which cluster should be used. Options 'IpfsCluster' / 'IpfsCluster2' / 'IpfsCluster3' +wrangler secret put PICKUP_BASIC_AUTH_TOKEN --env production # Get from nft.storage vault in 1password wrangler secret put MAILCHIMP_API_KEY --env production # Get from mailchimp wrangler secret put LOGTAIL_TOKEN --env production # Get from Logtail wrangler secret put METAPLEX_AUTH_TOKEN --env production # User ID meteplex endpoint should use (not required for dev) @@ -128,3 +127,12 @@ see: https://github.com/web3-storage/linkdex-api We write Uploaded CARs to both S3 and R2 in parallel. The R2 Bucket is bound to the worker as `env.CARPARK`. The API docs for an R2Bucket instance are here: https://developers.cloudflare.com/r2/runtime-apis/#bucket-method-definitions We key our R2 uploads by CAR CID, and record them in the DB under `upload.backup_urls`. The URL prefix for CARs in R2 is set by the `env.CARPARK_URL`. This is currently pointing to a subdomain on web3.storage which we could configure when we need direct http access to the bucket, but does not exist at time of writing. + +## Pickup + +We use [pickup](https://github.com/web3-storage/pickup) to fetch DAGs from IPFS and save them to a bucket where E-IPFS can index them. It provides a subset of the ipfs-cluster api for `GET /pins` and `POST /pins` that we use as the backend for the [pinning service](https://ipfs.github.io/pinning-services-api-spec/) implementation. + +- `PICKUP_URL` defines the service enpoint to use, and is set in the wrangler.toml. +- `PICKUP_BASIC_AUTH_TOKEN` must be set as a secret in the env. + +For local dev, we use a local ipfs-cluster container for the same service. diff --git a/packages/api/docker/run-with-dependencies.sh b/packages/api/docker/run-with-dependencies.sh index 514eb0b411..bb97e65aaf 100755 --- a/packages/api/docker/run-with-dependencies.sh +++ b/packages/api/docker/run-with-dependencies.sh @@ -82,7 +82,7 @@ export DATABASE_CONNECTION="postgres://postgres:postgres@$DB_HOST_PORT/postgres" # The vars below are used to configure the service export DATABASE_URL="http://$POSTGREST_HOST_PORT" -export CLUSTER_API_URL="http://$CLUSTER_HOST_PORT" +export PICKUP_URL="http://$CLUSTER_HOST_PORT" export S3_ENDPOINT="http://$MINIO_HOST_PORT" echo "services started." @@ -90,7 +90,7 @@ echo "environment overrides:" echo "MINIO_API_PORT=${MINIO_API_PORT}" echo "DATABASE_CONNECTION=${DATABASE_CONNECTION}" echo "DATABASE_URL=${DATABASE_URL}" -echo "CLUSTER_API_URL=${CLUSTER_API_URL}" +echo "PICKUP_URL=${PICKUP_URL}" echo "S3_ENDPOINT=${S3_ENDPOINT}" echo diff --git a/packages/api/src/config.js b/packages/api/src/config.js index f559e67dd0..f40b17fd37 100644 --- a/packages/api/src/config.js +++ b/packages/api/src/config.js @@ -98,7 +98,6 @@ export function loadConfigVariables() { 'LOGTAIL_TOKEN', 'PRIVATE_KEY', 'SENTRY_DSN', - 'CLUSTER_BASIC_AUTH_TOKEN', 'MAINTENANCE_MODE', 'S3_REGION', 'S3_ACCESS_KEY_ID', diff --git a/packages/api/src/utils/router.js b/packages/api/src/utils/router.js index 500f05e3bb..8904d37985 100644 --- a/packages/api/src/utils/router.js +++ b/packages/api/src/utils/router.js @@ -182,8 +182,8 @@ class Router { listen(event) { const url = new URL(event.request.url) // Add more if needed for other backends - const { DATABASE_URL, CLUSTER_API_URL } = getServiceConfig() - const passThrough = [DATABASE_URL, CLUSTER_API_URL] + const { DATABASE_URL, PICKUP_URL } = getServiceConfig() + const passThrough = [DATABASE_URL, PICKUP_URL] // Ignore http requests from the passthrough list above if (!passThrough.includes(`${url.protocol}//${url.host}`)) { diff --git a/packages/api/test/config.spec.js b/packages/api/test/config.spec.js index 533d0bfb7e..7bd7919eca 100644 --- a/packages/api/test/config.spec.js +++ b/packages/api/test/config.spec.js @@ -26,14 +26,13 @@ const BASE_CONFIG = { DATABASE_TOKEN: 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI', DATABASE_CONNECTION: 'postgresql://postgres:postgres@localhost:5432/postgres', - CLUSTER_BASIC_AUTH_TOKEN: 'dGVzdDp0ZXN0', MAINTENANCE_MODE: 'rw', S3_REGION: 'us-east-1', S3_ACCESS_KEY_ID: 'minioadmin', S3_SECRET_ACCESS_KEY: 'minioadmin', S3_BUCKET_NAME: 'dotstorage-dev-0', - CLUSTER_SERVICE: '', - CLUSTER_API_URL: 'http://127.0.0.1:9094', + PICKUP_URL: 'http://127.0.0.1:9094', + PICKUP_BASIC_AUTH_TOKEN: 'dGVzdDp0ZXN0', S3_ENDPOINT: 'http://127.0.0.1:9000', SLACK_USER_REQUEST_WEBHOOK_URL: '', SATNAV: '?', @@ -167,8 +166,8 @@ test.serial( 'SALT', 'METAPLEX_AUTH_TOKEN', 'PRIVATE_KEY', - 'CLUSTER_API_URL', - 'CLUSTER_BASIC_AUTH_TOKEN', + 'PICKUP_URL', + 'PICKUP_BASIC_AUTH_TOKEN', 'DATABASE_URL', 'DATABASE_TOKEN', 'S3_ENDPOINT', diff --git a/packages/api/test/scripts/globals.js b/packages/api/test/scripts/globals.js index b6b0871c5f..e670fe1099 100644 --- a/packages/api/test/scripts/globals.js +++ b/packages/api/test/scripts/globals.js @@ -13,10 +13,9 @@ globalThis.PRIVATE_KEY = 'xmbtWjE9eYuAxae9G65lQSkw36HV6H+0LSFq2aKqVwY=' globalThis.SENTRY_DSN = 'https://test@test.ingest.sentry.io/0000000' globalThis.SLACK_USER_REQUEST_WEBHOOK_URL = 'test' -globalThis.CLUSTER_API_URL = 'http://127.0.0.1:9094' +globalThis.PICKUP_API_URL = 'http://127.0.0.1:9094' // will be used with we can active auth in cluster base64 of test:test -globalThis.CLUSTER_BASIC_AUTH_TOKEN = 'dGVzdDp0ZXN0' -globalThis.CLUSTER_SERVICE = '' +globalThis.PICKUP_BASIC_AUTH_TOKEN = 'dGVzdDp0ZXN0' globalThis.MAINTENANCE_MODE = 'rw' diff --git a/packages/api/test/scripts/helpers.js b/packages/api/test/scripts/helpers.js index 9fd0d04186..8071bb9862 100644 --- a/packages/api/test/scripts/helpers.js +++ b/packages/api/test/scripts/helpers.js @@ -13,8 +13,8 @@ import { getMiniflareContext, getTestServiceConfig } from './test-context.js' * @returns {Cluster} */ export const getCluster = (config) => { - return new Cluster(config.CLUSTER_API_URL, { - headers: { Authorization: `Basic ${config.CLUSTER_BASIC_AUTH_TOKEN}` }, + return new Cluster(config.PICKUP_URL, { + headers: { Authorization: `Basic ${config.PICKUP_BASIC_AUTH_TOKEN}` }, }) } diff --git a/packages/api/test/scripts/test-context.js b/packages/api/test/scripts/test-context.js index 99ce05a7f8..2c0a9188ce 100644 --- a/packages/api/test/scripts/test-context.js +++ b/packages/api/test/scripts/test-context.js @@ -22,7 +22,7 @@ const pkg = JSON.parse( export function makeMiniflare(bindings, fetchMock) { const envPath = path.join(__dirname, '../../../../.env') - const { DATABASE_URL, CLUSTER_API_URL, S3_ENDPOINT } = process.env + const { DATABASE_URL, PICKUP_URL, S3_ENDPOINT } = process.env return new Miniflare({ // Autoload configuration from `.env`, `package.json` and `wrangler.toml` @@ -36,7 +36,7 @@ export function makeMiniflare(bindings, fetchMock) { bindings: { ...bindings, DATABASE_URL, - CLUSTER_API_URL, + PICKUP_URL, S3_ENDPOINT, }, fetchMock,