Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/accounts/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ export {
createAccountsDB,
} from "./db";
export * from "./types";
export { generateToken, hashToken, verifyToken } from "./util/hash";
export { generateToken, tokenHash } from "./util/hash";
export { generateSlug } from "./util/slug";
30 changes: 30 additions & 0 deletions packages/accounts/migrate/migrations/009_session_lookup.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
-- Session and invitation tokens are stored as their sha256 digest in
-- token_hash, used as a unique-indexed lookup key. Raw tokens are 256-bit
-- CSPRNG output, so sha256 alone provides preimage resistance equivalent
-- to argon2 against an offline DB dump — without paying ~60ms per verify
-- and without the O(n) scan-and-verify pattern the previous schema forced.

-- Drop existing rows: we don't store raw tokens, so we cannot derive
-- token_hash for them. All current CLI sessions become invalid (next
-- command yields a 401, "Invalid or expired session"; user runs `me login`).
-- All pending invitations must be re-issued.
truncate {{schema}}.session;
truncate {{schema}}.invitation;

-- `drop column` cascades to dependent indexes and constraints. This removes
-- session_token_key (unique constraint) and idx_session_token, plus
-- invitation_token_key (unique constraint) and idx_invitation_token.
alter table {{schema}}.session drop column token;
alter table {{schema}}.invitation drop column token;

alter table {{schema}}.session
add column token_hash bytea not null;
alter table {{schema}}.invitation
add column token_hash bytea not null;

create unique index session_token_hash_uniq
on {{schema}}.session (token_hash);

create unique index invitation_token_hash_uniq
on {{schema}}.invitation (token_hash)
where accepted_at is null;
4 changes: 4 additions & 0 deletions packages/accounts/migrate/runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ import migration007 from "./migrations/007_device_authorization.sql" with {
import migration008 from "./migrations/008_drop_org_owner_trigger.sql" with {
type: "text",
};
import migration009 from "./migrations/009_session_lookup.sql" with {
type: "text",
};
import { type AccountsConfig, resolveConfig, template } from "./template";

interface Migration {
Expand All @@ -37,6 +40,7 @@ const migrations: Migration[] = [
{ name: "006_ops_support", sql: migration006 },
{ name: "007_device_authorization", sql: migration007 },
{ name: "008_drop_org_owner_trigger", sql: migration008 },
{ name: "009_session_lookup", sql: migration009 },
];

export interface MigrateResult {
Expand Down
40 changes: 18 additions & 22 deletions packages/accounts/ops/invitation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,14 @@ import type {
Invitation,
OrgRole,
} from "../types";
import { generateToken, hashToken, verifyToken } from "../util/hash";
import { generateToken, tokenHash } from "../util/hash";
import { withTx } from "./_tx";

interface InvitationRow {
id: string;
org_id: string;
email: string;
role: OrgRole;
token: string;
invited_by: string;
expires_at: Date;
accepted_at: Date | null;
Expand Down Expand Up @@ -43,24 +42,24 @@ export function invitationOps(ctx: AccountsContext) {
const { orgId, email, role, invitedBy, expiresInDays = 7 } = params;

const rawToken = generateToken();
const tokenHash = await hashToken(rawToken);
const hash = tokenHash(rawToken);
const expiresAt = new Date();
expiresAt.setDate(expiresAt.getDate() + expiresInDays);

return withTx(ctx, "createInvitation", async (sql) => {
// Upsert: replace existing pending invitation for same org+email
const rows = await sql<InvitationRow[]>`
insert into ${sql.unsafe(schema)}.invitation
(org_id, email, role, token, invited_by, expires_at)
values (${orgId}, ${email}, ${role}, ${tokenHash}, ${invitedBy}, ${expiresAt})
(org_id, email, role, token_hash, invited_by, expires_at)
values (${orgId}, ${email}, ${role}, ${hash}, ${invitedBy}, ${expiresAt})
on conflict (org_id, email)
do update set
role = excluded.role,
token = excluded.token,
token_hash = excluded.token_hash,
invited_by = excluded.invited_by,
expires_at = excluded.expires_at,
accepted_at = null
returning id, org_id, email, role, token, invited_by, expires_at, accepted_at, created_at
returning id, org_id, email, role, invited_by, expires_at, accepted_at, created_at
`;
const row = rows[0];
if (!row) {
Expand All @@ -74,24 +73,21 @@ export function invitationOps(ctx: AccountsContext) {
},

async getInvitationByToken(rawToken: string): Promise<Invitation | null> {
const hash = tokenHash(rawToken);

return withTx(ctx, "getInvitationByToken", async (sql) => {
// Get all pending invitations and verify token against each
// This is necessary because we can't query by hash directly
// Single indexed lookup on the partial unique index:
// invitation_token_hash_uniq (token_hash) where accepted_at is null.
const rows = await sql<InvitationRow[]>`
select id, org_id, email, role, token, invited_by, expires_at, accepted_at, created_at
select id, org_id, email, role, invited_by, expires_at, accepted_at, created_at
from ${sql.unsafe(schema)}.invitation
where accepted_at is null
where token_hash = ${hash}
and accepted_at is null
and expires_at > now()
limit 1
`;

for (const row of rows) {
const valid = await verifyToken(rawToken, row.token);
if (valid) {
return rowToInvitation(row);
}
}

return null;
const row = rows[0];
return row ? rowToInvitation(row) : null;
});
},

Expand All @@ -102,7 +98,7 @@ export function invitationOps(ctx: AccountsContext) {
set accepted_at = now()
where id = ${id}
and accepted_at is null
returning id, org_id, email, role, token, invited_by, expires_at, accepted_at, created_at
returning id, org_id, email, role, invited_by, expires_at, accepted_at, created_at
`;
const row = rows[0];
return row ? rowToInvitation(row) : null;
Expand All @@ -122,7 +118,7 @@ export function invitationOps(ctx: AccountsContext) {
async listPendingInvitations(orgId: string): Promise<Invitation[]> {
return withTx(ctx, "listPendingInvitations", async (sql) => {
const rows = await sql<InvitationRow[]>`
select id, org_id, email, role, token, invited_by, expires_at, accepted_at, created_at
select id, org_id, email, role, invited_by, expires_at, accepted_at, created_at
from ${sql.unsafe(schema)}.invitation
where org_id = ${orgId}
and accepted_at is null
Expand Down
51 changes: 25 additions & 26 deletions packages/accounts/ops/session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@ import type {
Identity,
Session,
} from "../types";
import { generateToken, hashToken, verifyToken } from "../util/hash";
import { generateToken, tokenHash } from "../util/hash";
import { withTx } from "./_tx";

interface SessionRow {
id: string;
identity_id: string;
token: string;
expires_at: Date;
created_at: Date;
}
Expand Down Expand Up @@ -42,15 +41,15 @@ export function sessionOps(ctx: AccountsContext) {
const { identityId, expiresInDays = 30 } = params;

const rawToken = generateToken();
const tokenHash = await hashToken(rawToken);
const hash = tokenHash(rawToken);
const expiresAt = new Date();
expiresAt.setDate(expiresAt.getDate() + expiresInDays);

return withTx(ctx, "createSession", async (sql) => {
const rows = await sql<SessionRow[]>`
insert into ${sql.unsafe(schema)}.session (identity_id, token, expires_at)
values (${identityId}, ${tokenHash}, ${expiresAt})
returning id, identity_id, token, expires_at, created_at
insert into ${sql.unsafe(schema)}.session (identity_id, token_hash, expires_at)
values (${identityId}, ${hash}, ${expiresAt})
returning id, identity_id, expires_at, created_at
`;
const row = rows[0];
if (!row) {
Expand All @@ -66,35 +65,35 @@ export function sessionOps(ctx: AccountsContext) {
async validateSession(
rawToken: string,
): Promise<{ session: Session; identity: Identity } | null> {
const hash = tokenHash(rawToken);

return withTx(ctx, "validateSession", async (sql) => {
// Get all non-expired sessions and verify token against each
// Single indexed lookup: token_hash is unique. No loop, no argon2.
const rows = await sql<SessionWithIdentityRow[]>`
select
s.id, s.identity_id, s.token, s.expires_at, s.created_at,
s.id, s.identity_id, s.expires_at, s.created_at,
i.email as identity_email, i.name as identity_name,
i.created_at as identity_created_at, i.updated_at as identity_updated_at
from ${sql.unsafe(schema)}.session s
inner join ${sql.unsafe(schema)}.identity i on i.id = s.identity_id
where s.expires_at > now()
where s.token_hash = ${hash}
and s.expires_at > now()
limit 1
`;

for (const row of rows) {
const valid = await verifyToken(rawToken, row.token);
if (valid) {
return {
session: rowToSession(row),
identity: {
id: row.identity_id,
email: row.identity_email,
name: row.identity_name,
createdAt: row.identity_created_at,
updatedAt: row.identity_updated_at,
},
};
}
const row = rows[0];
if (!row) {
return null;
}

return null;
return {
session: rowToSession(row),
identity: {
id: row.identity_id,
email: row.identity_email,
name: row.identity_name,
createdAt: row.identity_created_at,
updatedAt: row.identity_updated_at,
},
};
});
},

Expand Down
49 changes: 49 additions & 0 deletions packages/accounts/util/hash.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import { describe, expect, test } from "bun:test";
import { generateToken, tokenHash } from "./hash";

describe("generateToken", () => {
test("produces a base64url string of the expected length", () => {
const token = generateToken();
// 32 bytes base64url-encoded with padding stripped: ceil(32 / 3) * 4 = 44,
// minus the trailing '=' padding character → 43 chars.
expect(token).toHaveLength(43);
expect(token).toMatch(/^[A-Za-z0-9_-]+$/);
});

test("produces unique values across calls", () => {
const seen = new Set<string>();
for (let i = 0; i < 100; i++) {
seen.add(generateToken());
}
expect(seen.size).toBe(100);
});
});

describe("tokenHash", () => {
test("returns 32 raw bytes", () => {
const hash = tokenHash("anything");
expect(hash).toBeInstanceOf(Buffer);
expect(hash.length).toBe(32);
});

test("is deterministic", () => {
const a = tokenHash("hello world");
const b = tokenHash("hello world");
expect(a.equals(b)).toBe(true);
});

test("differs for different inputs", () => {
const a = tokenHash("hello world");
const b = tokenHash("hello world!");
expect(a.equals(b)).toBe(false);
});

test("matches the published sha256 of a known string", () => {
// Known value: sha256("abc") = ba7816bf8f01cfea414140de5dae2223
// b00361a396177a9cb410ff61f20015ad
const hex = tokenHash("abc").toString("hex");
expect(hex).toBe(
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
);
});
});
39 changes: 14 additions & 25 deletions packages/accounts/util/hash.ts
Original file line number Diff line number Diff line change
@@ -1,33 +1,14 @@
/**
* Token hashing utilities using Argon2id
* Token utilities for session and invitation tokens.
*
* Used for session tokens and invitation tokens - we only need to verify,
* not retrieve the original value.
* Tokens are 256-bit CSPRNG output (base64url-encoded). We store sha256(token)
* in a unique-indexed column and look up by it directly — no slow-hash verifier
* is needed because the token's entropy alone defeats offline preimage attacks.
* See migration 009_session_lookup.sql for the rationale.
*/

/**
* Hash a token for storage using Argon2id
*/
export async function hashToken(token: string): Promise<string> {
return Bun.password.hash(token, {
algorithm: "argon2id",
memoryCost: 19456,
timeCost: 2,
});
}

/**
* Verify a token against its hash
*/
export async function verifyToken(
token: string,
hash: string,
): Promise<boolean> {
return Bun.password.verify(token, hash);
}

/**
* Generate a random token (32 bytes, base64url encoded)
* Generate a random token (32 bytes, base64url encoded).
*/
export function generateToken(): string {
const bytes = crypto.getRandomValues(new Uint8Array(32));
Expand All @@ -36,3 +17,11 @@ export function generateToken(): string {
.replace(/\//g, "_")
.replace(/=/g, "");
}

/**
* Compute the lookup hash stored in the session/invitation `token_hash` column.
* Returns 32 raw bytes suitable for binding directly to a `bytea` parameter.
*/
export function tokenHash(rawToken: string): Buffer {
return new Bun.CryptoHasher("sha256").update(rawToken).digest();
}
Loading