diff --git a/docker-compose.smoke.yml b/docker-compose.smoke.yml index af00e4b..4709a3e 100644 --- a/docker-compose.smoke.yml +++ b/docker-compose.smoke.yml @@ -4,8 +4,6 @@ # Usage: docker compose -f docker-compose.yml -f docker-compose.smoke.yml up --build services: app: - ports: - - "${APP_PORT:-3060}:3050" environment: - EMBEDDING_PROVIDER=transformers - EMBEDDING_MODEL=Xenova/all-MiniLM-L6-v2 diff --git a/docker-compose.yml b/docker-compose.yml index 5a141ed..dd62d4f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,13 @@ services: postgres: image: pgvector/pgvector:pg17 + restart: unless-stopped environment: POSTGRES_USER: supermem POSTGRES_PASSWORD: supermem POSTGRES_DB: supermem ports: - - "5433:5432" + - "${POSTGRES_PORT:-5433}:5432" volumes: - pgdata:/var/lib/postgresql/data healthcheck: @@ -19,7 +20,7 @@ services: build: . restart: unless-stopped ports: - - "3050:3050" + - "${APP_PORT:-3050}:3050" environment: DATABASE_URL: postgresql://supermem:supermem@postgres:5432/supermem PORT: "3050" diff --git a/scripts/docker-smoke-test.sh b/scripts/docker-smoke-test.sh index 46bcaef..38e9468 100755 --- a/scripts/docker-smoke-test.sh +++ b/scripts/docker-smoke-test.sh @@ -23,8 +23,9 @@ set -euo pipefail SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" -COMPOSE_PROJECT="supermem-smoke-test" -APP_PORT=3060 # Use non-default port to avoid conflicts with dev server +COMPOSE_PROJECT="${COMPOSE_PROJECT:-}" +APP_PORT="${APP_PORT:-}" +POSTGRES_PORT="${POSTGRES_PORT:-}" HEALTH_TIMEOUT=90 HEALTH_INTERVAL=2 @@ -42,6 +43,33 @@ log() { echo -e "${GREEN}[smoke]${NC} $*"; } warn() { echo -e "${YELLOW}[smoke]${NC} $*"; } fail() { echo -e "${RED}[FAIL]${NC} $*"; } +port_in_use() { + local port="$1" + lsof -n -P -iTCP:"$port" -sTCP:LISTEN >/dev/null 2>&1 +} + +find_free_port() { + local port="$1" + while port_in_use "$port"; do + port=$((port + 1)) + done + echo "$port" +} + +resolve_port() { + local requested="$1" + local fallback="$2" + if [[ -n "$requested" ]]; then + if port_in_use "$requested"; then + fail "Requested port is already in use: $requested" + exit 1 + fi + echo "$requested" + return + fi + find_free_port "$fallback" +} + assert_ok() { local name="$1" total=$((total + 1)) @@ -62,7 +90,7 @@ cleanup() { trap cleanup EXIT # --- Pre-flight checks --- -for cmd in docker curl jq; do +for cmd in docker curl jq lsof; do if ! command -v "$cmd" &>/dev/null; then fail "Required command not found: $cmd" exit 1 @@ -76,11 +104,17 @@ fi cd "$PROJECT_DIR" +APP_PORT="$(resolve_port "$APP_PORT" 3060)" +POSTGRES_PORT="$(resolve_port "$POSTGRES_PORT" 5444)" +if [[ -z "$COMPOSE_PROJECT" ]]; then + COMPOSE_PROJECT="supermem-smoke-test-${APP_PORT}-${POSTGRES_PORT}" +fi + # --- Build + Start --- -log "Starting compose stack (project=$COMPOSE_PROJECT, port=$APP_PORT)..." +log "Starting compose stack (project=$COMPOSE_PROJECT, app_port=$APP_PORT, postgres_port=$POSTGRES_PORT)..." -# Override the app port to avoid conflicts -export APP_PORT +# Override published ports to avoid conflicts with local dev stacks +export APP_PORT POSTGRES_PORT if [[ "${SKIP_BUILD:-}" == "1" ]]; then docker compose -p "$COMPOSE_PROJECT" \ @@ -154,10 +188,9 @@ fi # --- Test 4: Database connectivity (via stats endpoint) --- log "Test: database connectivity" stats_status=$(curl -sf -o /dev/null -w '%{http_code}' \ - -X POST "$BASE/memories/stats" \ - -H "Content-Type: application/json" \ - -d '{"user_id":"smoke-test-user"}') -assert_ok "POST /memories/stats returns 200 (DB connected)" \ + -G "$BASE/memories/stats" \ + --data-urlencode "user_id=smoke-test-user") +assert_ok "GET /memories/stats returns 200 (DB connected)" \ '[ "$stats_status" = "200" ]' # --- Test 5: Quick ingest endpoint (no LLM required — embedding-only dedup) --- @@ -171,7 +204,7 @@ ingest_response=$(curl -sf -w '\n%{http_code}' \ "source_site": "docker-smoke-test" }') ingest_status=$(echo "$ingest_response" | tail -1) -ingest_body=$(echo "$ingest_response" | head -n -1) +ingest_body=$(echo "$ingest_response" | sed '$d') assert_ok "POST /memories/ingest/quick returns 200" \ '[ "$ingest_status" = "200" ]' assert_ok "Ingest stored at least 1 memory" \ @@ -188,7 +221,7 @@ search_response=$(curl -sf -w '\n%{http_code}' \ "source_site": "docker-smoke-test" }') search_status=$(echo "$search_response" | tail -1) -search_body=$(echo "$search_response" | head -n -1) +search_body=$(echo "$search_response" | sed '$d') assert_ok "POST /memories/search returns 200" \ '[ "$search_status" = "200" ]' assert_ok "Search returns at least 1 result" \ @@ -205,10 +238,10 @@ assert_ok "POST /memories/reset-source returns 200" \ # --- Test 8: Input validation --- log "Test: input validation" -bad_ingest_status=$(curl -sf -o /dev/null -w '%{http_code}' \ +bad_ingest_status=$(curl -s -o /dev/null -w '%{http_code}' \ -X POST "$BASE/memories/ingest" \ -H "Content-Type: application/json" \ - -d '{"user_id":"x"}' 2>/dev/null || echo "400") + -d '{"user_id":"x"}') assert_ok "Missing required fields returns 400" \ '[ "$bad_ingest_status" = "400" ]' diff --git a/src/__tests__/route-validation.test.ts b/src/__tests__/route-validation.test.ts new file mode 100644 index 0000000..9e7fef0 --- /dev/null +++ b/src/__tests__/route-validation.test.ts @@ -0,0 +1,144 @@ +/** + * Route-level validation tests for memory API endpoints. + * Tests UUID validation on param/query inputs and filter behavior + * on the list endpoint. Requires DATABASE_URL in .env.test. + */ + +import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest'; + +// Mock embedText to avoid hitting the real embedding provider in CI where +// OPENAI_API_KEY is a placeholder. Returns a deterministic zero vector +// matching the configured embedding dimensions. +vi.mock('../services/embedding.js', async (importOriginal) => { + const actual = await importOriginal(); + return { + ...actual, + embedText: vi.fn(async () => { + const { config: cfg } = await import('../config.js'); + return new Array(cfg.embeddingDimensions).fill(0); + }), + }; +}); + +import { pool } from '../db/pool.js'; +import { config } from '../config.js'; +import { MemoryRepository } from '../db/memory-repository.js'; +import { ClaimRepository } from '../db/claim-repository.js'; +import { MemoryService } from '../services/memory-service.js'; +import { createMemoryRouter } from '../routes/memories.js'; +import express from 'express'; +import { readFileSync } from 'node:fs'; +import { resolve, dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const TEST_USER = 'route-validation-test-user'; +const VALID_UUID = '00000000-0000-0000-0000-000000000001'; +const INVALID_UUID = 'not-a-uuid'; + +let server: ReturnType; +let baseUrl: string; +const app = express(); +app.use(express.json()); + +beforeAll(async () => { + const raw = readFileSync(resolve(__dirname, '../db/schema.sql'), 'utf-8'); + const sql = raw.replace(/\{\{EMBEDDING_DIMENSIONS\}\}/g, String(config.embeddingDimensions)); + await pool.query(sql); + + const repo = new MemoryRepository(pool); + const claimRepo = new ClaimRepository(pool); + const service = new MemoryService(repo, claimRepo); + app.use('/memories', createMemoryRouter(service)); + + await new Promise((resolve) => { + server = app.listen(0, () => { + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + baseUrl = `http://localhost:${port}`; + resolve(); + }); + }); +}); + +afterAll(async () => { + await new Promise((resolve) => server.close(() => resolve())); + await pool.end(); +}); + +describe('GET /memories/:id — UUID validation', () => { + it('returns 400 for an invalid UUID', async () => { + const res = await fetch(`${baseUrl}/memories/${INVALID_UUID}?user_id=${TEST_USER}`); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.error).toMatch(/valid UUID/); + }); + + it('returns 404 for a valid but non-existent UUID', async () => { + const res = await fetch(`${baseUrl}/memories/${VALID_UUID}?user_id=${TEST_USER}`); + expect(res.status).toBe(404); + }); +}); + +describe('DELETE /memories/:id — UUID validation', () => { + it('returns 400 for an invalid UUID', async () => { + const res = await fetch(`${baseUrl}/memories/${INVALID_UUID}?user_id=${TEST_USER}`, { + method: 'DELETE', + }); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.error).toMatch(/valid UUID/); + }); +}); + +describe('POST /memories/ingest/quick — skip_extraction (storeVerbatim)', () => { + it('stores a single memory without extraction when skip_extraction is true', async () => { + const res = await fetch(`${baseUrl}/memories/ingest/quick`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + user_id: TEST_USER, + conversation: 'Verbatim content that should not be extracted into facts.', + source_site: 'verbatim-test', + source_url: 'https://example.com/verbatim', + skip_extraction: true, + }), + }); + expect(res.status).toBe(200); + const body = await res.json(); + expect(body.memoriesStored).toBe(1); + expect(body.memoryIds).toHaveLength(1); + }); +}); + +describe('GET /memories/list — source_site filter', () => { + it('returns memories filtered by source_site', async () => { + const res = await fetch( + `${baseUrl}/memories/list?user_id=${TEST_USER}&source_site=test-site`, + ); + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toHaveProperty('memories'); + expect(body).toHaveProperty('count'); + }); +}); + +describe('GET /memories/list — episode_id filter', () => { + it('returns 400 for an invalid episode_id', async () => { + const res = await fetch( + `${baseUrl}/memories/list?user_id=${TEST_USER}&episode_id=${INVALID_UUID}`, + ); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.error).toMatch(/valid UUID/); + }); + + it('accepts a valid episode_id UUID', async () => { + const res = await fetch( + `${baseUrl}/memories/list?user_id=${TEST_USER}&episode_id=${VALID_UUID}`, + ); + expect(res.status).toBe(200); + const body = await res.json(); + expect(body).toHaveProperty('memories'); + }); +}); diff --git a/src/db/memory-repository.ts b/src/db/memory-repository.ts index 02df130..b4d93bb 100644 --- a/src/db/memory-repository.ts +++ b/src/db/memory-repository.ts @@ -161,8 +161,8 @@ export class MemoryRepository { return getMemoryWithClient(client, id, userId, true); } - async listMemories(userId: string, limit: number = 20, offset: number = 0) { - return listMemories(this.pool, userId, limit, offset); + async listMemories(userId: string, limit: number = 20, offset: number = 0, sourceSite?: string, episodeId?: string) { + return listMemories(this.pool, userId, limit, offset, sourceSite, episodeId); } async listMemoriesInWorkspace(workspaceId: string, limit: number = 20, offset: number = 0) { diff --git a/src/db/repository-read.ts b/src/db/repository-read.ts index e7387b3..54479d9 100644 --- a/src/db/repository-read.ts +++ b/src/db/repository-read.ts @@ -55,13 +55,23 @@ export async function getMemoryWithClient( return result.rows[0] ? normalizeMemoryRow(result.rows[0]) : null; } -export async function listMemories(pool: pg.Pool, userId: string, limit: number, offset: number): Promise { +export async function listMemories(pool: pg.Pool, userId: string, limit: number, offset: number, sourceSite?: string, episodeId?: string): Promise { + const params: unknown[] = [userId, limit, offset]; + let extraClauses = ''; + if (sourceSite) { + params.push(sourceSite); + extraClauses += ` AND source_site = $${params.length}`; + } + if (episodeId) { + params.push(episodeId); + extraClauses += ` AND episode_id = $${params.length}`; + } const result = await pool.query( `SELECT * FROM memories WHERE user_id = $1 AND deleted_at IS NULL AND expired_at IS NULL AND status = 'active' - AND workspace_id IS NULL + AND workspace_id IS NULL${extraClauses} ORDER BY created_at DESC LIMIT $2 OFFSET $3`, - [userId, limit, offset], + params, ); return result.rows.map(normalizeMemoryRow); } diff --git a/src/routes/memories.ts b/src/routes/memories.ts index 10bdc7d..96bdb9c 100644 --- a/src/routes/memories.ts +++ b/src/routes/memories.ts @@ -10,6 +10,7 @@ import type { RetrievalMode } from '../services/memory-service-types.js'; import type { AgentScope, WorkspaceContext } from '../db/repository-types.js'; import { InputError, handleRouteError } from './route-errors.js'; +const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; const MAX_SEARCH_LIMIT = 100; const MAX_CONVERSATION_LENGTH = 100_000; const ALLOWED_ORIGINS = new Set( @@ -63,9 +64,20 @@ function registerIngestRoute(router: Router, service: MemoryService): void { } function registerQuickIngestRoute(router: Router, service: MemoryService): void { - registerIngestHandler(router, '/ingest/quick', service, (body) => - service.quickIngest(body.userId, body.conversation, body.sourceSite, body.sourceUrl), - ); + router.post('/ingest/quick', async (req: Request, res: Response) => { + try { + const body = parseIngestBody(req.body); + const skipExtraction = req.body.skip_extraction === true; + const result = body.workspace + ? await service.workspaceIngest(body.userId, body.conversation, body.sourceSite, body.sourceUrl, body.workspace) + : skipExtraction + ? await service.storeVerbatim(body.userId, body.conversation, body.sourceSite, body.sourceUrl) + : await service.quickIngest(body.userId, body.conversation, body.sourceSite, body.sourceUrl); + res.json(result); + } catch (err) { + handleRouteError(res, 'POST /memories/ingest/quick', err); + } + }); } /** Shared handler for ingest routes — workspace requests always use workspaceIngest. */ @@ -174,9 +186,11 @@ function registerListRoute(router: Router, service: MemoryService): void { const { userId, limit } = parseUserIdAndLimit(req.query); const offset = parseInt(String(req.query.offset ?? '0'), 10); const workspaceId = optionalQueryString(req.query.workspace_id); + const sourceSite = optionalQueryString(req.query.source_site); + const episodeId = optionalUuidQuery(req.query.episode_id, 'episode_id'); const memories = workspaceId ? await service.listInWorkspace(workspaceId, limit, offset) - : await service.list(userId, limit, offset); + : await service.list(userId, limit, offset, sourceSite, episodeId); res.json({ memories, count: memories.length }); } catch (err) { handleRouteError(res, 'GET /memories/list', err); @@ -354,11 +368,12 @@ function registerResetSourceRoute(router: Router, service: MemoryService): void function registerGetRoute(router: Router, service: MemoryService): void { router.get('/:id', async (req: Request, res: Response) => { try { + const memoryId = requireUuidParam(String(req.params.id), 'id'); const userId = requireQueryString(req.query.user_id, 'user_id is required'); const workspaceId = optionalQueryString(req.query.workspace_id); const memory = workspaceId - ? await service.getInWorkspace(String(req.params.id), workspaceId) - : await service.get(String(req.params.id), userId); + ? await service.getInWorkspace(memoryId, workspaceId) + : await service.get(memoryId, userId); if (!memory) { res.status(404).json({ error: 'Memory not found' }); return; @@ -373,12 +388,13 @@ function registerGetRoute(router: Router, service: MemoryService): void { function registerDeleteRoute(router: Router, service: MemoryService): void { router.delete('/:id', async (req: Request, res: Response) => { try { + const memoryId = requireUuidParam(String(req.params.id), 'id'); const userId = requireQueryString(req.query.user_id, 'user_id is required'); const workspaceId = optionalQueryString(req.query.workspace_id); if (workspaceId) { - await service.deleteInWorkspace(String(req.params.id), workspaceId); + await service.deleteInWorkspace(memoryId, workspaceId); } else { - await service.delete(String(req.params.id), userId); + await service.delete(memoryId, userId); } res.json({ success: true }); } catch (err) { @@ -414,9 +430,10 @@ function registerAuditRecentRoute(router: Router, service: MemoryService): void function registerAuditTrailRoute(router: Router, service: MemoryService): void { router.get('/:id/audit', async (req: Request, res: Response) => { try { + const memoryId = requireUuidParam(String(req.params.id), 'id'); const userId = requireQueryString(req.query.user_id, 'user_id is required'); - const trail = await service.getAuditTrail(userId, String(req.params.id)); - res.json({ memoryId: req.params.id, trail, versionCount: trail.length }); + const trail = await service.getAuditTrail(userId, memoryId); + res.json({ memoryId, trail, versionCount: trail.length }); } catch (err) { handleRouteError(res, 'GET /memories/:id/audit', err); } @@ -497,6 +514,18 @@ function optionalQueryString(value: unknown): string | undefined { return typeof value === 'string' && value.length > 0 ? value : undefined; } +function requireUuidParam(value: string, label: string): string { + if (!UUID_REGEX.test(value)) throw new InputError(`${label} must be a valid UUID`); + return value; +} + +function optionalUuidQuery(value: unknown, label: string): string | undefined { + const str = optionalQueryString(value); + if (!str) return undefined; + if (!UUID_REGEX.test(str)) throw new InputError(`${label} must be a valid UUID`); + return str; +} + function parseOptionalWorkspaceContext(body: Record): WorkspaceContext | undefined { const workspaceId = optionalBodyString(body.workspace_id); const agentId = optionalBodyString(body.agent_id); diff --git a/src/routes/route-errors.ts b/src/routes/route-errors.ts index 1df534d..354388a 100644 --- a/src/routes/route-errors.ts +++ b/src/routes/route-errors.ts @@ -12,8 +12,9 @@ export class InputError extends Error {} export function handleRouteError(res: Response, context: string, err: unknown): void { const isExpected = err instanceof InputError; const status = isExpected ? 400 : 500; - const internalMessage = err instanceof Error ? err.message : String(err); - console.error(`${context} error:`, internalMessage); + const internalMessage = err instanceof Error ? err.message : String(err ?? 'Internal server error'); + const stack = err instanceof Error ? err.stack : undefined; + console.error(`${context} error: [${status}] ${internalMessage}${stack ? `\n${stack}` : ''}`); const clientMessage = isExpected ? internalMessage : 'Internal server error'; res.status(status).json({ error: clientMessage }); } diff --git a/src/services/memory-crud.ts b/src/services/memory-crud.ts index 46b52ca..3bc38b0 100644 --- a/src/services/memory-crud.ts +++ b/src/services/memory-crud.ts @@ -18,8 +18,8 @@ export interface ClaimSlotBackfillResult { updated: number; } -export async function listMemories(deps: MemoryServiceDeps, userId: string, limit: number = 20, offset: number = 0) { - return deps.repo.listMemories(userId, limit, offset); +export async function listMemories(deps: MemoryServiceDeps, userId: string, limit: number = 20, offset: number = 0, sourceSite?: string, episodeId?: string) { + return deps.repo.listMemories(userId, limit, offset, sourceSite, episodeId); } export async function listMemoriesInWorkspace(deps: MemoryServiceDeps, workspaceId: string, limit: number = 20, offset: number = 0) { diff --git a/src/services/memory-ingest.ts b/src/services/memory-ingest.ts index f69cb49..029d086 100644 --- a/src/services/memory-ingest.ts +++ b/src/services/memory-ingest.ts @@ -137,6 +137,51 @@ export async function performQuickIngest( return buildIngestResult(episodeId, extractedFacts.length, acc, linksCreated, 0); } +/** + * Store content as a single memory without fact extraction. + * Used for user-created contexts (text/file uploads) where + * the content should remain as one canonical memory record. + */ +export async function performStoreVerbatim( + deps: MemoryServiceDeps, + userId: string, + content: string, + sourceSite: string, + sourceUrl: string = '', +): Promise { + const episodeId = await deps.repo.storeEpisode({ userId, content, sourceSite, sourceUrl }); + const embedding = await embedText(content); + const writeSecurity = assessWriteSecurity(content, sourceSite); + const trustScore = writeSecurity.allowed ? writeSecurity.trust.score : 0.5; + + const memoryId = await deps.repo.storeMemory({ + userId, + content, + embedding, + memoryType: 'semantic', + importance: 0.5, + sourceSite, + sourceUrl, + episodeId, + status: 'active', + keywords: '', + summary: content.slice(0, 200), + trustScore, + }); + + return { + episodeId, + factsExtracted: 1, + memoriesStored: 1, + memoriesUpdated: 0, + memoriesDeleted: 0, + memoriesSkipped: 0, + memoryIds: [memoryId], + linksCreated: 0, + compositesCreated: 0, + }; +} + /** Workspace-scoped ingest: stores memories tagged with workspace_id and agent_id. */ export async function performWorkspaceIngest( deps: MemoryServiceDeps, @@ -197,9 +242,11 @@ async function quickIngestFact( const candidates = mergeCandidates(vectorCandidates, slotCandidates); if (candidates.length > 0) { - const topSim = Math.max(...candidates.map((c) => c.similarity)); - if (topSim >= config.fastAudnDuplicateThreshold) { - return { outcome: 'skipped', memoryId: null }; + const topCandidate = candidates.reduce((a, b) => a.similarity > b.similarity ? a : b); + if (topCandidate.similarity >= config.fastAudnDuplicateThreshold) { + // Near-duplicate: skip but return the existing memory ID so callers + // can link to the canonical memory (e.g. integration sync pointer rows). + return { outcome: 'skipped', memoryId: topCandidate.id }; } } diff --git a/src/services/memory-service.ts b/src/services/memory-service.ts index cd88e96..a8803be 100644 --- a/src/services/memory-service.ts +++ b/src/services/memory-service.ts @@ -17,7 +17,7 @@ import { type ReconciliationResult } from './deferred-audn.js'; import type { AgentScope, AuditTrailEntry, MutationSummary, WorkspaceContext } from '../db/repository-types.js'; import type { FactInput, IngestResult, MemoryServiceDeps, Outcome, RetrievalOptions, RetrievalResult } from './memory-service-types.js'; -import { performIngest, performQuickIngest, performWorkspaceIngest } from './memory-ingest.js'; +import { performIngest, performQuickIngest, performStoreVerbatim, performWorkspaceIngest } from './memory-ingest.js'; import { performSearch, performFastSearch, performWorkspaceSearch } from './memory-search.js'; import * as crud from './memory-crud.js'; @@ -57,6 +57,15 @@ export class MemoryService { return performQuickIngest(this.deps, userId, conversationText, sourceSite, sourceUrl, sessionTimestamp); } + /** + * Store content as a single memory without fact extraction. + * Used for user-created contexts (text/file uploads) where + * the content should remain as one canonical memory record. + */ + async storeVerbatim(userId: string, content: string, sourceSite: string, sourceUrl: string = ''): Promise { + return performStoreVerbatim(this.deps, userId, content, sourceSite, sourceUrl); + } + async workspaceIngest(userId: string, conversationText: string, sourceSite: string, sourceUrl: string = '', workspace: WorkspaceContext, sessionTimestamp?: Date): Promise { return performWorkspaceIngest(this.deps, userId, conversationText, sourceSite, sourceUrl, workspace, sessionTimestamp); } @@ -77,7 +86,7 @@ export class MemoryService { // --- CRUD --- - async list(userId: string, limit: number = 20, offset: number = 0) { return crud.listMemories(this.deps, userId, limit, offset); } + async list(userId: string, limit: number = 20, offset: number = 0, sourceSite?: string, episodeId?: string) { return crud.listMemories(this.deps, userId, limit, offset, sourceSite, episodeId); } async listInWorkspace(workspaceId: string, limit: number = 20, offset: number = 0) { return crud.listMemoriesInWorkspace(this.deps, workspaceId, limit, offset); } async get(id: string, userId: string) { return crud.getMemory(this.deps, id, userId); } async getInWorkspace(id: string, workspaceId: string) { return crud.getMemoryInWorkspace(this.deps, id, workspaceId); }