Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
2dcb935
feat: session recording database schema, S3 utilities, and batch uplo…
BilalG1 Feb 11, 2026
c62446b
feat: client-side session recording SDK with rrweb capture
BilalG1 Feb 11, 2026
6799c8e
feat: admin SDK methods and internal API endpoints for session record…
BilalG1 Feb 11, 2026
f991184
feat: dashboard replay viewer for multi-tab session recordings
BilalG1 Feb 11, 2026
3f3f8e8
small
BilalG1 Feb 11, 2026
5092434
replay frontend fix
BilalG1 Feb 11, 2026
2a590e6
record pre-login
BilalG1 Feb 11, 2026
52a4388
Merge branch 'dev' into analytics-replays-1
BilalG1 Feb 11, 2026
9f99dcf
Merge branch 'analytics-replays-1' into analytics-replays-2
BilalG1 Feb 11, 2026
f0e07e9
Merge branch 'analytics-replays-2' into analytics-replays-3
BilalG1 Feb 11, 2026
7d9925a
Merge branch 'analytics-replays-3' into analytics-replays-4
BilalG1 Feb 11, 2026
a18fa97
stack s3 private key
BilalG1 Feb 12, 2026
dad51aa
upsert session by refresh token
BilalG1 Feb 12, 2026
05e4ade
fix
BilalG1 Feb 12, 2026
ddcf529
Merge branch 'dev' into analytics-replays-1
BilalG1 Feb 12, 2026
f098e96
Merge branch 'analytics-replays-1' into analytics-replays-2
BilalG1 Feb 12, 2026
0cd4301
Merge branch 'analytics-replays-2' into analytics-replays-3
BilalG1 Feb 12, 2026
c4d6503
Merge branch 'analytics-replays-3' into analytics-replays-4
BilalG1 Feb 12, 2026
6e42342
sessions by refresh token, disable replays by default
BilalG1 Feb 12, 2026
659c561
fix lint
BilalG1 Feb 12, 2026
36ab8ff
remove sot test
BilalG1 Feb 12, 2026
3660f10
Merge remote-tracking branch 'origin/dev' into analytics-replays-4
BilalG1 Feb 12, 2026
c312176
improve chunk fetching logic
BilalG1 Feb 12, 2026
5e1e828
max session time, improved replayer testing
BilalG1 Feb 12, 2026
278a745
grouped chunk event fetching
BilalG1 Feb 12, 2026
175ac9d
fix tests
BilalG1 Feb 12, 2026
0a1097d
Merge branch 'dev' into analytics-replays-4
BilalG1 Feb 13, 2026
3d51c70
js replays
BilalG1 Feb 13, 2026
5868a35
small pr fixes
BilalG1 Feb 13, 2026
fa3242e
fix stuck replayer bug
BilalG1 Feb 13, 2026
b9e9047
handle analytics app disabled in event ingestion
BilalG1 Feb 16, 2026
6f641fa
Merge branch 'dev' into analytics-replays-4
BilalG1 Feb 16, 2026
e6b0cd0
merge dev
BilalG1 Feb 16, 2026
e1c9d8e
Merge branch 'dev' into analytics-replays-4
BilalG1 Feb 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified CLAUDE.md
Binary file not shown.
1 change: 1 addition & 0 deletions apps/backend/.env
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ STACK_S3_REGION=
STACK_S3_ACCESS_KEY_ID=
STACK_S3_SECRET_ACCESS_KEY=
STACK_S3_BUCKET=
STACK_S3_PRIVATE_BUCKET=

# AWS configuration
STACK_AWS_REGION=
Expand Down
1 change: 1 addition & 0 deletions apps/backend/.env.development
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ STACK_S3_REGION=us-east-1
STACK_S3_ACCESS_KEY_ID=s3mockroot
STACK_S3_SECRET_ACCESS_KEY=s3mockroot
STACK_S3_BUCKET=stack-storage
STACK_S3_PRIVATE_BUCKET=stack-storage-private

# AWS region defaults to LocalStack
STACK_AWS_REGION=us-east-1
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
-- Session recording MVP: store session metadata in Postgres and rrweb events in S3.

CREATE TABLE "SessionRecording" (
"id" UUID NOT NULL,
"tenancyId" UUID NOT NULL,
"projectUserId" UUID NOT NULL,
"refreshTokenId" UUID NOT NULL,
"startedAt" TIMESTAMP(3) NOT NULL,
"lastEventAt" TIMESTAMP(3) NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "SessionRecording_pkey" PRIMARY KEY ("tenancyId","id")
);

CREATE TABLE "SessionRecordingChunk" (
"id" UUID NOT NULL,
"tenancyId" UUID NOT NULL,
"sessionRecordingId" UUID NOT NULL,
"batchId" UUID NOT NULL,
"tabId" TEXT NOT NULL,
"browserSessionId" TEXT NOT NULL,
"s3Key" TEXT NOT NULL,
"eventCount" INTEGER NOT NULL,
"byteLength" INTEGER NOT NULL,
"firstEventAt" TIMESTAMP(3) NOT NULL,
"lastEventAt" TIMESTAMP(3) NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "SessionRecordingChunk_pkey" PRIMARY KEY ("id")
);

ALTER TABLE "SessionRecording"
ADD CONSTRAINT "SessionRecording_tenancyId_fkey"
FOREIGN KEY ("tenancyId") REFERENCES "Tenancy"("id") ON DELETE CASCADE ON UPDATE CASCADE;

ALTER TABLE "SessionRecording"
ADD CONSTRAINT "SessionRecording_tenancyId_projectUserId_fkey"
FOREIGN KEY ("tenancyId", "projectUserId") REFERENCES "ProjectUser"("tenancyId", "projectUserId") ON DELETE CASCADE ON UPDATE CASCADE;

ALTER TABLE "SessionRecordingChunk"
ADD CONSTRAINT "SessionRecordingChunk_tenancyId_fkey"
FOREIGN KEY ("tenancyId") REFERENCES "Tenancy"("id") ON DELETE CASCADE ON UPDATE CASCADE;

ALTER TABLE "SessionRecordingChunk"
ADD CONSTRAINT "SessionRecordingChunk_tenancyId_sessionRecordingId_fkey"
FOREIGN KEY ("tenancyId","sessionRecordingId") REFERENCES "SessionRecording"("tenancyId","id") ON DELETE CASCADE ON UPDATE CASCADE;

CREATE INDEX "SessionRecording_tenancyId_projectUserId_startedAt_idx"
ON "SessionRecording"("tenancyId", "projectUserId", "startedAt");

CREATE INDEX "SessionRecording_tenancyId_lastEventAt_idx"
ON "SessionRecording"("tenancyId", "lastEventAt");

CREATE INDEX "SessionRecording_tenancyId_refreshTokenId_updatedAt_idx"
ON "SessionRecording"("tenancyId", "refreshTokenId", "updatedAt");

CREATE UNIQUE INDEX "SessionRecordingChunk_tenancyId_sessionRecordingId_batchId_key"
ON "SessionRecordingChunk"("tenancyId", "sessionRecordingId", "batchId");

CREATE INDEX "SessionRecordingChunk_tenancyId_sessionRecordingId_createdA_idx"
ON "SessionRecordingChunk"("tenancyId", "sessionRecordingId", "createdAt");
59 changes: 59 additions & 0 deletions apps/backend/prisma/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ model Tenancy {
organizationId String? @db.Uuid
hasNoOrganization BooleanTrue?
emailOutboxes EmailOutbox[]
sessionRecordings SessionRecording[]
sessionRecordingChunks SessionRecordingChunk[]

@@unique([projectId, branchId, organizationId])
@@unique([projectId, branchId, hasNoOrganization])
Expand Down Expand Up @@ -234,6 +236,7 @@ model ProjectUser {
Project Project? @relation(fields: [projectId], references: [id])
projectId String?
userNotificationPreference UserNotificationPreference[]
sessionRecordings SessionRecording[]

@@id([tenancyId, projectUserId])
@@unique([mirroredProjectId, mirroredBranchId, projectUserId])
Expand Down Expand Up @@ -277,6 +280,62 @@ model ProjectUserOAuthAccount {
@@index([tenancyId, projectUserId])
}

model SessionRecording {
id String @db.Uuid

tenancyId String @db.Uuid
projectUserId String @db.Uuid
refreshTokenId String @db.Uuid

startedAt DateTime
lastEventAt DateTime

createdAt DateTime @default(now())
updatedAt DateTime @updatedAt

projectUser ProjectUser @relation(fields: [tenancyId, projectUserId], references: [tenancyId, projectUserId], onDelete: Cascade)
tenancy Tenancy @relation(fields: [tenancyId], references: [id], onDelete: Cascade)

chunks SessionRecordingChunk[]

@@id([tenancyId, id])
@@index([tenancyId, projectUserId, startedAt])
@@index([tenancyId, lastEventAt])
// index by updatedAt instead of lastEventAt because event timing can be spoofed
@@index([tenancyId, refreshTokenId, updatedAt])
Comment thread
BilalG1 marked this conversation as resolved.
}

model SessionRecordingChunk {
id String @id @default(uuid()) @db.Uuid

tenancyId String @db.Uuid
sessionRecordingId String @db.Uuid

// Unique per uploaded batch for a given session id.
batchId String @db.Uuid

// Ephemeral in-memory id generated by the client. Stored for future tab separation if needed.
tabId String

// Client-generated session id from localStorage, stored as metadata.
browserSessionId String

s3Key String
eventCount Int
byteLength Int

firstEventAt DateTime
lastEventAt DateTime

createdAt DateTime @default(now())

sessionRecording SessionRecording @relation(fields: [tenancyId, sessionRecordingId], references: [tenancyId, id], onDelete: Cascade)
tenancy Tenancy @relation(fields: [tenancyId], references: [id], onDelete: Cascade)

@@unique([tenancyId, sessionRecordingId, batchId])
@@index([tenancyId, sessionRecordingId, createdAt])
}

enum ContactChannelType {
EMAIL
// PHONE
Expand Down
69 changes: 69 additions & 0 deletions apps/backend/prisma/seed.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1118,6 +1118,13 @@ async function seedDummyProject(options: DummyProjectSeedOptions) {
userEmailToId,
});

await seedDummySessionRecordings({
prisma: dummyPrisma,
tenancyId: dummyTenancy.id,
userEmailToId,
targetSessionRecordingCount: 75
});

console.log('Seeded dummy project data');
}

Expand Down Expand Up @@ -1765,3 +1772,65 @@ async function seedDummySessionActivityEvents(options: SessionActivityEventSeedO

console.log('Finished seeding session activity events');
}

type SessionRecordingSeedOptions = {
prisma: PrismaClientTransaction,
tenancyId: string,
userEmailToId: Map<string, string>,
targetSessionRecordingCount?: number,
};

async function seedDummySessionRecordings(options: SessionRecordingSeedOptions) {
const {
prisma,
tenancyId,
userEmailToId,
targetSessionRecordingCount = 250,
} = options;

const existingCount = await prisma.sessionRecording.count({
where: {
tenancyId,
},
});

if (existingCount >= targetSessionRecordingCount) {
console.log(`Dummy project already has ${existingCount} session recordings, skipping seeding`);
return;
}

const toCreate = targetSessionRecordingCount - existingCount;
const userIds = Array.from(userEmailToId.values());
if (userIds.length === 0) {
throw new Error('Cannot seed session recordings: no dummy project users exist');
}

const now = new Date();
const twoWeeksAgo = new Date(now);
twoWeeksAgo.setDate(twoWeeksAgo.getDate() - 14);

const seeds: Prisma.SessionRecordingCreateManyInput[] = [];
for (let i = 0; i < toCreate; i++) {
const startedAt = new Date(
twoWeeksAgo.getTime() + Math.random() * (now.getTime() - twoWeeksAgo.getTime()),
);
const durationMs = 10_000 + Math.floor(Math.random() * (20 * 60 * 1000)); // 10s..20m
const lastEventAt = new Date(startedAt.getTime() + durationMs);
const projectUserId = userIds[Math.floor(Math.random() * userIds.length)]!;

seeds.push({
tenancyId,
refreshTokenId: generateUuid(),
projectUserId,
id: generateUuid(),
startedAt,
lastEventAt,
});
}

await prisma.sessionRecording.createMany({
data: seeds,
});

console.log(`Seeded ${toCreate} session recordings`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import { getPrismaClientForTenancy } from "@/prisma-client";
import { downloadBytes } from "@/s3";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { KnownErrors } from "@stackframe/stack-shared";
import { StackAssertionError } from "@stackframe/stack-shared/dist/utils/errors";
import { adaptSchema, adminAuthTypeSchema, yupArray, yupMixed, yupNumber, yupObject, yupString } from "@stackframe/stack-shared/dist/schema-fields";
import { promisify } from "node:util";
import { gunzip as gunzipCb } from "node:zlib";

const gunzip = promisify(gunzipCb);

export const GET = createSmartRouteHandler({
metadata: { hidden: true },
request: yupObject({
auth: yupObject({
type: adminAuthTypeSchema.defined(),
tenancy: adaptSchema.defined(),
}).defined(),
params: yupObject({
session_recording_id: yupString().defined(),
chunk_id: yupString().defined(),
}).defined(),
}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["json"]).defined(),
body: yupObject({
events: yupArray(yupMixed().defined()).defined(),
}).defined(),
}),
async handler({ auth, params }) {
const prisma = await getPrismaClientForTenancy(auth.tenancy);

const sessionRecordingId = params.session_recording_id;
const chunkId = params.chunk_id;

const chunk = await prisma.sessionRecordingChunk.findFirst({
where: {
tenancyId: auth.tenancy.id,
sessionRecordingId,
id: chunkId,
},
select: {
s3Key: true,
},
});
if (!chunk) {
throw new KnownErrors.ItemNotFound(chunkId);
}

let bytes: Uint8Array;
try {
bytes = await downloadBytes({ key: chunk.s3Key, private: true });
} catch (e: any) {
const status = e?.$metadata?.httpStatusCode;
if (status === 404) {
throw new KnownErrors.ItemNotFound(chunkId);
}
throw e;
}
const unzipped = new Uint8Array(await gunzip(bytes));

let parsed: any;
try {
parsed = JSON.parse(new TextDecoder().decode(unzipped));
} catch (e) {
throw new StackAssertionError("Failed to decode session recording chunk JSON", { cause: e });
}

if (typeof parsed !== "object" || parsed === null) {
throw new StackAssertionError("Decoded session recording chunk is not an object");
}
if (parsed.session_recording_id !== sessionRecordingId) {
throw new StackAssertionError("Decoded session recording chunk session_recording_id mismatch", {
expected: sessionRecordingId,
actual: parsed.session_recording_id,
});
}
if (!Array.isArray(parsed.events)) {
throw new StackAssertionError("Decoded session recording chunk events is not an array");
}

return {
statusCode: 200,
bodyType: "json",
body: {
events: parsed.events,
},
};
},
});
Loading
Loading