Skip to content
Merged
14 changes: 14 additions & 0 deletions apps/web/src/app/admin/api-request-log/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export default function ApiRequestLogPage() {
const [userId, setUserId] = useState('');
const [startDate, setStartDate] = useState(weekAgo);
const [endDate, setEndDate] = useState(today);
const [model, setModel] = useState('');
const [error, setError] = useState<string | null>(null);

function handleDownload() {
Expand All @@ -36,6 +37,9 @@ export default function ApiRequestLogPage() {
startDate,
endDate,
});
if (model.trim()) {
params.set('model', model.trim());
}

// Navigate directly to preserve server-side streaming
window.location.href = `/admin/api/api-request-log/download?${params}`;
Expand All @@ -61,6 +65,16 @@ export default function ApiRequestLogPage() {
/>
</div>

<div className="space-y-2">
<Label htmlFor="model">Model (optional)</Label>
<Input
id="model"
placeholder="e.g. claude-sonnet-4-20250514"
value={model}
onChange={e => setModel(e.target.value)}
/>
</div>

<div className="grid grid-cols-2 gap-4">
<div className="space-y-2">
<Label htmlFor="startDate">Start Date</Label>
Expand Down
100 changes: 69 additions & 31 deletions apps/web/src/app/admin/api/api-request-log/download/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@ import { connection, type NextRequest } from 'next/server';
import { getUserFromAuth } from '@/lib/user.server';
import { db } from '@/lib/drizzle';
import { api_request_log } from '@kilocode/db/schema';
import { and, gte, lte, eq, asc } from 'drizzle-orm';
import { and, gte, lte, eq, asc, gt, count } from 'drizzle-orm';
import archiver from 'archiver';
import { PassThrough } from 'node:stream';

const BATCH_SIZE = 100;

function formatTimestamp(isoString: string): string {
return isoString.replaceAll(':', '-').replaceAll(' ', '_');
}
Expand Down Expand Up @@ -54,6 +56,18 @@ function jsonError(message: string, status: number) {
});
}

function buildFilter(userId: string, parsedStart: Date, parsedEnd: Date, model: string | null) {
const conditions = [
eq(api_request_log.kilo_user_id, userId),
gte(api_request_log.created_at, parsedStart.toISOString()),
lte(api_request_log.created_at, parsedEnd.toISOString()),
];
if (model) {
conditions.push(eq(api_request_log.model, model));
}
return and(...conditions);
}

export async function GET(request: NextRequest) {
await connection();

Expand All @@ -66,6 +80,7 @@ export async function GET(request: NextRequest) {
const userId = searchParams.get('userId');
const startDate = searchParams.get('startDate');
const endDate = searchParams.get('endDate');
const model = searchParams.get('model');

if (!userId || !startDate || !endDate) {
return jsonError('userId, startDate, and endDate are required', 400);
Expand All @@ -77,19 +92,10 @@ export async function GET(request: NextRequest) {
return jsonError('Invalid date format. Use YYYY-MM-DD.', 400);
}

const rows = await db
.select()
.from(api_request_log)
.where(
and(
eq(api_request_log.kilo_user_id, userId),
gte(api_request_log.created_at, parsedStart.toISOString()),
lte(api_request_log.created_at, parsedEnd.toISOString())
)
)
.orderBy(asc(api_request_log.created_at));

if (rows.length === 0) {
const filter = buildFilter(userId, parsedStart, parsedEnd, model);

const [result] = await db.select({ total: count() }).from(api_request_log).where(filter);
if (result.total === 0) {
return jsonError('No records found for the given criteria', 404);
}

Expand All @@ -98,24 +104,54 @@ export async function GET(request: NextRequest) {

archive.pipe(passthrough);

for (const row of rows) {
const ts = formatTimestamp(row.created_at);
const id = String(row.id);

const requestExt = isJson(row.request) ? 'json' : 'txt';
const requestContent = tryFormatJson(row.request);
if (requestContent) {
archive.append(requestContent, { name: `${ts}_${id}_request.${requestExt}` });
// Fetch and archive rows in batches using cursor-based pagination to
// avoid loading the entire result set into memory at once.
const appendRows = async () => {
let cursor: bigint | null = null;
for (;;) {
const rows = await db
.select()
.from(api_request_log)
.where(cursor ? and(filter, gt(api_request_log.id, cursor)) : filter)
.orderBy(asc(api_request_log.id))
.limit(BATCH_SIZE);

if (rows.length === 0) break;

for (const row of rows) {
const ts = formatTimestamp(row.created_at);
const id = String(row.id);

const requestExt = isJson(row.request) ? 'json' : 'txt';
const requestContent = tryFormatJson(row.request);
if (requestContent) {
archive.append(requestContent, { name: `${ts}_${id}_request.${requestExt}` });
}

const responseExt = isJson(row.response) ? 'json' : 'txt';
const responseContent = tryFormatJson(row.response);
if (responseContent) {
archive.append(responseContent, { name: `${ts}_${id}_response.${responseExt}` });
}
}

cursor = rows[rows.length - 1].id;

// Wait for the passthrough stream to drain before fetching the next
// batch so we don't buffer unbounded data in memory.
await new Promise<void>(resolve => {
if (passthrough.writableNeedDrain) {
passthrough.once('drain', resolve);
} else {
resolve();
}
});
}

const responseExt = isJson(row.response) ? 'json' : 'txt';
const responseContent = tryFormatJson(row.response);
if (responseContent) {
archive.append(responseContent, { name: `${ts}_${id}_response.${responseExt}` });
}
}
await archive.finalize();
};

void archive.finalize();
void appendRows().catch(error => passthrough.destroy(error));

const webStream = new ReadableStream({
start(controller) {
Expand All @@ -125,8 +161,10 @@ export async function GET(request: NextRequest) {
},
});

const safeUserId = userId.replaceAll('/', '-').replaceAll(':', '-');
const filename = `api-request-log_${safeUserId}_${startDate}_${endDate}.zip`;
const sanitize = (s: string) => s.replaceAll('/', '-').replaceAll(':', '-');
const safeUserId = sanitize(userId);
const safeModel = model ? `_${sanitize(model)}` : '';
const filename = `api-request-log_${safeUserId}_${startDate}_${endDate}${safeModel}.zip`;

return new Response(webStream, {
headers: {
Expand Down
Loading