Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions drizzle/0038_store_listing_oauth_probes_lexicon_keys.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
ALTER TABLE "store_listing_oauth_probes" ADD COLUMN "oauth_lexicon_keys" text[] DEFAULT '{}'::text[] NOT NULL;
--> statement-breakpoint
CREATE INDEX "store_listing_oauth_probes_oauth_lexicon_keys_idx" ON "store_listing_oauth_probes" USING gin ("oauth_lexicon_keys");
5 changes: 5 additions & 0 deletions drizzle/0039_oauth_lexicon_hub_snapshot.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
CREATE TABLE "oauth_lexicon_hub_snapshot" (
"singleton_key" text PRIMARY KEY NOT NULL,
"payload" jsonb NOT NULL,
"computed_at" timestamp with time zone NOT NULL
);
14 changes: 14 additions & 0 deletions drizzle/meta/_journal.json
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,20 @@
"when": 1778800000000,
"tag": "0037_fund_at_mirror_tables",
"breakpoints": true
},
{
"idx": 38,
"version": "7",
"when": 1778900000000,
"tag": "0038_store_listing_oauth_probes_lexicon_keys",
"breakpoints": true
},
{
"idx": 39,
"version": "7",
"when": 1779000000000,
"tag": "0039_oauth_lexicon_hub_snapshot",
"breakpoints": true
}
]
}
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
"listing:restore-from-backup-db": "tsx -r dotenv/config scripts/restore-store-listing-from-backup-db.ts",
"oauth:detect-scopes": "tsx scripts/detect-listing-oauth-scopes.ts",
"listing:oauth-probes-sync": "tsx -r dotenv/config scripts/sync-listing-oauth-probes.ts",
"listing:oauth-lexicon-hub-refresh": "tsx -r dotenv/config scripts/refresh-oauth-lexicon-hub.ts",
"listing:oauth-lexicon-keys-backfill": "tsx -r dotenv/config scripts/backfill-oauth-lexicon-keys.ts",
"db:generate": "drizzle-kit generate",
"db:migrate": "tsx scripts/db-migrate.ts",
"db:migrate:kit": "drizzle-kit migrate",
Expand Down
88 changes: 88 additions & 0 deletions scripts/backfill-oauth-lexicon-keys.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
#!/usr/bin/env node
/**
* Fills `store_listing_oauth_probes.oauth_lexicon_keys` from existing
* `oauth_scopes_distinct` and expanded permission-set checklists in `report_json` (no HTTP).
*
* pnpm exec tsx -r dotenv/config scripts/backfill-oauth-lexicon-keys.ts
*/
import "dotenv/config";
import * as schema from "#/db/schema";
import { refreshOAuthLexiconHubSnapshot } from "#/lib/oauth-lexicon-hub-snapshot.server";
import { extractOAuthLexiconKeysForStorefrontProbe } from "#/lib/oauth-scope-lexicon-keys";
import { eq } from "drizzle-orm";

function ts(): string {
return new Date().toISOString();
}

async function main() {
if (!process.env.DATABASE_URL?.trim()) {
console.error(
`[backfill-oauth-lexicon-keys] ${ts()} DATABASE_URL is required`,
);
process.exit(1);
}

const { db, dbClient } = await import("#/db/index.server");
const probes = schema.storeListingOAuthProbes;

const rows = await db
.select({
storeListingId: probes.storeListingId,
oauthScopesDistinct: probes.oauthScopesDistinct,
oauthLexiconKeys: probes.oauthLexiconKeys,
reportJson: probes.reportJson,
})
.from(probes);

let updated = 0;
let skipped = 0;

for (const row of rows) {
const next = extractOAuthLexiconKeysForStorefrontProbe({
oauthScopesDistinct: row.oauthScopesDistinct ?? [],
scopeHumanReadable: row.reportJson?.summary?.scopeHumanReadable,
});
const prevSorted = [...row.oauthLexiconKeys].toSorted((a, b) =>
a.localeCompare(b),
);
const same =
next.length === prevSorted.length &&
next.every((k, i) => k === prevSorted[i]);
if (same) {
skipped++;
continue;
}
await db
.update(probes)
.set({ oauthLexiconKeys: next })
.where(eq(probes.storeListingId, row.storeListingId));
updated++;
}

console.log(
`[backfill-oauth-lexicon-keys] ${ts()} rows=${String(rows.length)} updated=${String(updated)} skipped_unchanged=${String(skipped)}`,
);

try {
const hub = await refreshOAuthLexiconHubSnapshot(db);
console.log(
`[backfill-oauth-lexicon-keys] ${ts()} oauth_lexicon_hub_snapshot clusterCount=${String(hub.clusterCount)}`,
);
} catch (error) {
console.error(
`[backfill-oauth-lexicon-keys] ${ts()} oauth_lexicon_hub_snapshot refresh failed`,
error instanceof Error ? (error.stack ?? error.message) : error,
);
}

await dbClient.end({ timeout: 5 }).catch(() => {});
}

main().catch((error) => {
console.error(
`[backfill-oauth-lexicon-keys] fatal`,
error instanceof Error ? (error.stack ?? error.message) : error,
);
process.exit(1);
});
39 changes: 39 additions & 0 deletions scripts/refresh-oauth-lexicon-hub.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/usr/bin/env node
/**
* Recompute `oauth_lexicon_hub_snapshot` from current probes (slow: resolves lexicon JSON).
*
* pnpm listing:oauth-lexicon-hub-refresh
*/
import "dotenv/config";
import { refreshOAuthLexiconHubSnapshot } from "#/lib/oauth-lexicon-hub-snapshot.server";

function ts(): string {
return new Date().toISOString();
}

async function main() {
if (!process.env.DATABASE_URL?.trim()) {
console.error(
`[refresh-oauth-lexicon-hub] ${ts()} DATABASE_URL is required`,
);
process.exit(1);
}

const { db, dbClient } = await import("#/db/index.server");
try {
const hub = await refreshOAuthLexiconHubSnapshot(db);
console.log(
`[refresh-oauth-lexicon-hub] ${ts()} clusterCount=${String(hub.clusterCount)} computedAt=${hub.computedAt.toISOString()}`,
);
} finally {
await dbClient.end({ timeout: 5 }).catch(() => {});
}
}

main().catch((error) => {
console.error(
`[refresh-oauth-lexicon-hub] fatal`,
error instanceof Error ? (error.stack ?? error.message) : error,
);
process.exit(1);
});
27 changes: 27 additions & 0 deletions scripts/sync-listing-oauth-probes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
/**
* Batch-probe every `store_listings.external_url` for OAuth / authorization metadata
* (same logic as `pnpm oauth:detect-scopes`) and upsert into `store_listing_oauth_probes`.
* After a non–dry-run pass completes, recomputes the `/apps/lexicons` hub snapshot
* (`oauth_lexicon_hub_snapshot`) so the hub stays fast without on-demand HTTP.
*
* Railway cron (suggested): weekly is enough for slow-changing OAuth metadata; avoids
* hammering third-party sites. Example crontab UTC:
Expand All @@ -20,7 +22,9 @@
*/
import "dotenv/config";
import * as schema from "#/db/schema";
import { refreshOAuthLexiconHubSnapshot } from "#/lib/oauth-lexicon-hub-snapshot.server";
import { probeOAuthListingAuth } from "#/lib/oauth-listing-auth-probe";
import { extractOAuthLexiconKeysForStorefrontProbe } from "#/lib/oauth-scope-lexicon-keys";
import { asc, isNotNull } from "drizzle-orm";

function ts(): string {
Expand Down Expand Up @@ -149,6 +153,7 @@ async function main() {
probedUrl: null as string | null,
probedAt: now,
oauthScopesDistinct: [] as Array<string>,
oauthLexiconKeys: [] as Array<string>,
transitionalScopes: [] as Array<string>,
publishesAtprotoScope: null as boolean | null,
clientScopeRawLine: null as string | null,
Expand Down Expand Up @@ -187,6 +192,10 @@ async function main() {
probedUrl: report.inputUrl,
probedAt: now,
oauthScopesDistinct: report.summary.oauthScopesDistinct,
oauthLexiconKeys: extractOAuthLexiconKeysForStorefrontProbe({
oauthScopesDistinct: report.summary.oauthScopesDistinct,
scopeHumanReadable: report.summary.scopeHumanReadable,
}),
transitionalScopes: report.summary.transitionalScopesPresent,
publishesAtprotoScope: report.summary.publishesAtprotoAs,
clientScopeRawLine: report.summary.clientScopeRawLine,
Expand Down Expand Up @@ -224,6 +233,7 @@ async function main() {
probedUrl: rawUrl,
probedAt: now,
oauthScopesDistinct: [] as Array<string>,
oauthLexiconKeys: [] as Array<string>,
transitionalScopes: [] as Array<string>,
publishesAtprotoScope: null as boolean | null,
clientScopeRawLine: null as string | null,
Expand Down Expand Up @@ -301,6 +311,23 @@ async function main() {
elapsedMs,
});

if (!dryRun) {
try {
const hub = await refreshOAuthLexiconHubSnapshot(db);
log("info", "oauth_lexicon_hub_snapshot_refreshed", {
clusterCount: hub.clusterCount,
computedAt: hub.computedAt.toISOString(),
});
} catch (error) {
log("error", "oauth_lexicon_hub_snapshot_refresh_failed", {
error:
error instanceof Error
? (error.stack ?? error.message)
: String(error),
});
}
}

await dbClient.end({ timeout: 5 }).catch(() => {});
}

Expand Down
14 changes: 7 additions & 7 deletions src/components/AppTagHero.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ import { publicMediaUrlOrNull } from "../lib/listing-image-url";
interface AppTagHeroProps {
eyebrow?: string;
title: string;
description?: string;
description?: React.ReactNode;
/**
* When provided, render an accent hero: gradient + emoji scatter with eyebrow, title,
* description, and action **inside** the panel (`uiColor.textContrast` + shadows for
Expand Down Expand Up @@ -339,7 +339,7 @@ export function AppTagHero({
</div>
) : null}
<Flex style={styles.accentBannerContent}>
<Flex direction="row" style={styles.bannerTopStack}>
<Flex direction="row" gap="4xl" style={styles.bannerTopStack}>
<Flex direction="column" gap="4xl">
{eyebrow ? (
<SmallBody style={styles.bannerEyebrow}>{eyebrow}</SmallBody>
Expand Down Expand Up @@ -370,14 +370,14 @@ export function AppTagHero({
</div>
) : (
<>
<div {...stylex.props(styles.imageFrame, ui.bgSubtle)}>
{bannerSrc ? (
{bannerSrc ? (
<div {...stylex.props(styles.imageFrame, ui.bgSubtle)}>
<img {...stylex.props(styles.image)} alt="" src={bannerSrc} />
) : null}
</div>
</div>
) : null}

<Flex justify="between" gap="5xl" align="end" wrap>
<Flex direction="column" gap="5xl" style={styles.copy}>
<Flex direction="column" gap="6xl" style={styles.copy}>
{eyebrow ? (
<SmallBody style={styles.eyebrow}>{eyebrow}</SmallBody>
) : null}
Expand Down
1 change: 1 addition & 0 deletions src/components/SiteFooter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ const FOOTER_LINK_GROUPS = [
links: [
{ href: "/apps/all", label: "All Apps" },
{ href: "/apps/tags", label: "Categories" },
{ href: "/apps/lexicons", label: "Shared data" },
],
},
] as const;
Expand Down
24 changes: 24 additions & 0 deletions src/db/schema.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import type { ListingLink } from "#/lib/atproto/listing-record";
import type { DirectoryOAuthLexiconHubData } from "#/lib/oauth-lexicon-hub.types";
import type { OAuthAuthProbeReport } from "#/lib/oauth-listing-auth-probe";

import { relations, sql } from "drizzle-orm";
Expand Down Expand Up @@ -674,6 +675,15 @@ export const storeListingOAuthProbes = pgTable(
.array()
.notNull()
.default(sql`'{}'::text[]`),
/**
* Normalized lexicon identifiers from {@link oauthScopesDistinct} and from
* resolved `include:` permission-set checklists in {@link reportJson}
* (`repo:…` / `rpc:…` NSIDs inside bundles). See `extractOAuthLexiconKeysForStorefrontProbe`.
*/
oauthLexiconKeys: text("oauth_lexicon_keys")
.array()
.notNull()
.default(sql`'{}'::text[]`),
publishesAtprotoScope: boolean("publishes_atproto_scope"),

clientScopeRawLine: text("client_scope_raw_line"),
Expand All @@ -697,9 +707,23 @@ export const storeListingOAuthProbes = pgTable(
(table) => [
index("store_listing_oauth_probes_probed_at_idx").on(table.probedAt),
index("store_listing_oauth_probes_slug_idx").on(table.slug),
index("store_listing_oauth_probes_oauth_lexicon_keys_idx").using(
"gin",
table.oauthLexiconKeys,
),
],
);

/**
* Precomputed payload for `/apps/lexicons` (clusters + resolved lexicon descriptions).
* Rebuilt by `scripts/sync-listing-oauth-probes.ts` after OAuth probes complete, or manually.
*/
export const oauthLexiconHubSnapshot = pgTable("oauth_lexicon_hub_snapshot", {
singletonKey: text("singleton_key").primaryKey().notNull(),
payload: jsonb("payload").$type<DirectoryOAuthLexiconHubData>().notNull(),
computedAt: timestamp("computed_at", { withTimezone: true }).notNull(),
});

/** Ordered homepage hero slots managed from admin. */
export const homePageHeroListings = pgTable(
"home_page_hero_listings",
Expand Down
Loading
Loading