diff --git a/app/lib/backend/http/api/conversations.dart b/app/lib/backend/http/api/conversations.dart index e4eb21893bc..ed505723d5b 100644 --- a/app/lib/backend/http/api/conversations.dart +++ b/app/lib/backend/http/api/conversations.dart @@ -61,9 +61,8 @@ Future> getConversations({ if (response.statusCode == 200) { // decode body bytes to utf8 string and then parse json so as to avoid utf8 char issues var body = utf8.decode(response.bodyBytes); - var memories = (jsonDecode(body) as List) - .map((conversation) => ServerConversation.fromJson(conversation)) - .toList(); + var memories = + (jsonDecode(body) as List).map((conversation) => ServerConversation.fromJson(conversation)).toList(); Logger.debug('getConversations length: ${memories.length}'); return memories; } else { @@ -99,6 +98,54 @@ Future deleteConversationServer(String conversationId) async { return response.statusCode == 204; } +Future trashConversationServer(String conversationId) async { + var response = await makeApiCall( + url: '${Env.apiBaseUrl}v1/conversations/$conversationId/trash', + headers: {}, + method: 'POST', + body: '', + ); + if (response == null) return null; + Logger.debug('trashConversation: ${response.statusCode}'); + if (response.statusCode == 200) { + return ServerConversation.fromJson(jsonDecode(response.body)); + } + return null; +} + +Future restoreConversationServer(String conversationId) async { + var response = await makeApiCall( + url: '${Env.apiBaseUrl}v1/conversations/$conversationId/restore', + headers: {}, + method: 'POST', + body: '', + ); + if (response == null) return null; + Logger.debug('restoreConversation: ${response.statusCode}'); + if (response.statusCode == 200) { + return ServerConversation.fromJson(jsonDecode(response.body)); + } + return null; +} + +Future> getTrashedConversations({int limit = 100, int offset = 0}) async { + var response = await makeApiCall( + url: '${Env.apiBaseUrl}v1/conversations/trash?limit=$limit&offset=$offset', + headers: {}, + method: 'GET', + body: '', + ); + if (response == null) return []; + if (response.statusCode == 200) { + var body = utf8.decode(response.bodyBytes); + return (jsonDecode(body) as List) + .map((conversation) => ServerConversation.fromJson(conversation)) + .toList(); + } + Logger.debug('getTrashedConversations error ${response.statusCode}'); + return []; +} + Future getConversationById(String conversationId) async { var response = await makeApiCall( url: '${Env.apiBaseUrl}v1/conversations/$conversationId', @@ -168,9 +215,8 @@ class TranscriptsResponse { deepgram: (json['deepgram'] as List).map((segment) => TranscriptSegment.fromJson(segment)).toList(), soniox: (json['soniox'] as List).map((segment) => TranscriptSegment.fromJson(segment)).toList(), whisperx: (json['whisperx'] as List).map((segment) => TranscriptSegment.fromJson(segment)).toList(), - speechmatics: (json['speechmatics'] as List) - .map((segment) => TranscriptSegment.fromJson(segment)) - .toList(), + speechmatics: + (json['speechmatics'] as List).map((segment) => TranscriptSegment.fromJson(segment)).toList(), ); } } diff --git a/app/lib/backend/schema/conversation.dart b/app/lib/backend/schema/conversation.dart index 9fe84f95307..30469b1e6c1 100644 --- a/app/lib/backend/schema/conversation.dart +++ b/app/lib/backend/schema/conversation.dart @@ -87,11 +87,9 @@ class ConversationPostProcessing { factory ConversationPostProcessing.fromJson(Map json) { return ConversationPostProcessing( - status: - ConversationPostProcessingStatus.values.asNameMap()[json['status']] ?? + status: ConversationPostProcessingStatus.values.asNameMap()[json['status']] ?? ConversationPostProcessingStatus.in_progress, - model: - ConversationPostProcessingModel.values.asNameMap()[json['model']] ?? + model: ConversationPostProcessingModel.values.asNameMap()[json['model']] ?? ConversationPostProcessingModel.fal_whisperx, failReason: json['fail_reason'], ); @@ -142,12 +140,12 @@ class ConversationPhoto { } Map toJson() => { - 'id': id, - 'base64': base64, - 'description': description, - 'created_at': createdAt.toUtc().toIso8601String(), - 'discarded': discarded, - }; + 'id': id, + 'base64': base64, + 'description': description, + 'created_at': createdAt.toUtc().toIso8601String(), + 'discarded': discarded, + }; } class AudioFile { @@ -182,14 +180,14 @@ class AudioFile { } Map toJson() => { - 'id': id, - 'uid': uid, - 'conversation_id': conversationId, - 'chunk_timestamps': chunkTimestamps, - 'provider': provider, - 'started_at': startedAt?.toUtc().toIso8601String(), - 'duration': duration, - }; + 'id': id, + 'uid': uid, + 'conversation_id': conversationId, + 'chunk_timestamps': chunkTimestamps, + 'provider': provider, + 'started_at': startedAt?.toUtc().toIso8601String(), + 'duration': duration, + }; } class ServerConversation { @@ -213,6 +211,7 @@ class ServerConversation { ConversationStatus status; bool discarded; + DateTime? trashedAt; final bool deleted; final bool isLocked; bool starred; @@ -235,6 +234,7 @@ class ServerConversation { this.photos = const [], this.audioFiles = const [], this.discarded = false, + this.trashedAt, this.deleted = false, this.source, this.language, @@ -256,24 +256,22 @@ class ServerConversation { transcriptSegments: ((json['transcript_segments'] ?? []) as List) .map((segment) => TranscriptSegment.fromJson(segment)) .toList(), - appResults: ((json['apps_results'] ?? []) as List) - .map((result) => AppResponse.fromJson(result)) - .toList(), - suggestedSummarizationApps: ((json['suggested_summarization_apps'] ?? []) as List) - .map((appId) => appId.toString()) - .toList(), + appResults: + ((json['apps_results'] ?? []) as List).map((result) => AppResponse.fromJson(result)).toList(), + suggestedSummarizationApps: + ((json['suggested_summarization_apps'] ?? []) as List).map((appId) => appId.toString()).toList(), geolocation: json['geolocation'] != null ? Geolocation.fromJson(json['geolocation']) : null, photos: json['photos'] != null ? ((json['photos'] ?? []) as List).map((photo) => ConversationPhoto.fromJson(photo)).toList() : [], audioFiles: ((json['audio_files'] ?? []) as List).map((af) => AudioFile.fromJson(af)).toList(), discarded: json['discarded'] ?? false, + trashedAt: json['trashed_at'] != null ? DateTime.parse(json['trashed_at']).toLocal() : null, source: json['source'] != null ? ConversationSource.values.asNameMap()[json['source']] : ConversationSource.omi, language: json['language'], deleted: json['deleted'] ?? false, - externalIntegration: json['external_data'] != null - ? ConversationExternalData.fromJson(json['external_data']) - : null, + externalIntegration: + json['external_data'] != null ? ConversationExternalData.fromJson(json['external_data']) : null, status: json['status'] != null ? ConversationStatus.values.asNameMap()[json['status']] ?? ConversationStatus.completed : ConversationStatus.completed, @@ -297,6 +295,7 @@ class ServerConversation { 'geolocation': geolocation?.toJson(), 'photos': photos.map((photo) => photo.toJson()).toList(), 'discarded': discarded, + 'trashed_at': trashedAt?.toUtc().toIso8601String(), 'deleted': deleted, 'source': source?.toString(), 'language': language, diff --git a/app/lib/l10n/app_en.arb b/app/lib/l10n/app_en.arb index 2a31965f74d..c27b4d1a824 100644 --- a/app/lib/l10n/app_en.arb +++ b/app/lib/l10n/app_en.arb @@ -93,6 +93,59 @@ "@deleteConversation": { "description": "Menu item to delete a conversation" }, + "trash": "Trash", + "@trash": { + "description": "Settings entry and page title for trashed conversations" + }, + "trashEmpty": "Trash is empty", + "@trashEmpty": { + "description": "Empty state title for Trash" + }, + "trashDescription": "Conversations in Trash are permanently deleted after 30 days.", + "@trashDescription": { + "description": "Description of Trash retention behavior" + }, + "moveToTrash": "Move to Trash", + "@moveToTrash": { + "description": "Action label to soft-delete a conversation" + }, + "restoreConversation": "Restore", + "@restoreConversation": { + "description": "Action label to restore a trashed conversation" + }, + "deleteForever": "Delete forever", + "@deleteForever": { + "description": "Action label to permanently delete a trashed conversation" + }, + "daysRemaining": "{days} days remaining", + "@daysRemaining": { + "description": "Number of days remaining before a trashed conversation is permanently deleted", + "placeholders": { + "days": { + "type": "int" + } + } + }, + "trashConfirmTitle": "Move conversation to Trash?", + "@trashConfirmTitle": { + "description": "Confirmation dialog title for moving a conversation to Trash" + }, + "trashConfirmMessage": "You can restore it from Settings > Trash for the next 30 days.", + "@trashConfirmMessage": { + "description": "Confirmation dialog message for moving a conversation to Trash" + }, + "restoreSuccess": "Conversation restored", + "@restoreSuccess": { + "description": "Snackbar message after restoring a conversation" + }, + "deleteForeverConfirmTitle": "Delete forever?", + "@deleteForeverConfirmTitle": { + "description": "Confirmation dialog title for permanent delete" + }, + "trashedAtLabel": "Moved to Trash", + "@trashedAtLabel": { + "description": "Label for the timestamp when a conversation was moved to Trash" + }, "contentCopied": "Content copied to clipboard", "@contentCopied": { "description": "Snackbar message when content is copied" diff --git a/app/lib/pages/conversations/widgets/conversation_list_item.dart b/app/lib/pages/conversations/widgets/conversation_list_item.dart index f77a9c630cb..ebbc6b47bd9 100644 --- a/app/lib/pages/conversations/widgets/conversation_list_item.dart +++ b/app/lib/pages/conversations/widgets/conversation_list_item.dart @@ -381,6 +381,7 @@ class _ConversationListItemState extends State { padding: EdgeInsets.only(right: 4.0), child: FaIcon(FontAwesomeIcons.solidStar, size: 12, color: Colors.amber), ), + _buildOverflowMenu(context), ], ) : Row( @@ -408,6 +409,7 @@ class _ConversationListItemState extends State { padding: EdgeInsets.only(right: 4.0), child: FaIcon(FontAwesomeIcons.solidStar, size: 12, color: Colors.amber), ), + _buildOverflowMenu(context), ], ), ], @@ -576,6 +578,7 @@ class _ConversationListItemState extends State { padding: EdgeInsets.only(left: 8.0), child: FaIcon(FontAwesomeIcons.solidStar, size: 12, color: Colors.amber), ), + _buildOverflowMenu(context), ], ), ), @@ -590,6 +593,49 @@ class _ConversationListItemState extends State { return secondsToCompactDuration(durationSeconds, context); } + + Widget _buildOverflowMenu(BuildContext context) { + return PopupMenuButton( + icon: const Icon(Icons.more_horiz, color: Color(0xFF8E8E93), size: 20), + color: const Color(0xFF1F1F25), + onSelected: (value) async { + if (value == 'trash') { + await _confirmMoveToTrash(context); + } + }, + itemBuilder: (context) => [ + PopupMenuItem( + value: 'trash', + child: Row( + children: [ + const Icon(Icons.delete_outline, color: Colors.white70, size: 18), + const SizedBox(width: 10), + Text(context.l10n.moveToTrash, style: const TextStyle(color: Colors.white)), + ], + ), + ), + ], + ); + } + + Future _confirmMoveToTrash(BuildContext context) async { + final confirmed = await showDialog( + context: context, + builder: (ctx) => AlertDialog( + title: Text(context.l10n.trashConfirmTitle), + content: Text(context.l10n.trashConfirmMessage), + actions: [ + TextButton(onPressed: () => Navigator.of(ctx).pop(false), child: Text(context.l10n.cancel)), + TextButton(onPressed: () => Navigator.of(ctx).pop(true), child: Text(context.l10n.moveToTrash)), + ], + ), + ); + if (confirmed != true || !context.mounted) return; + + final success = await context.read().trashConversation(widget.conversation); + if (!context.mounted || !success) return; + ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text(context.l10n.trashedAtLabel))); + } } class ConversationNewStatusIndicator extends StatefulWidget { diff --git a/app/lib/pages/settings/settings_drawer.dart b/app/lib/pages/settings/settings_drawer.dart index 5d43e51f1f8..a20d8637c61 100644 --- a/app/lib/pages/settings/settings_drawer.dart +++ b/app/lib/pages/settings/settings_drawer.dart @@ -12,7 +12,9 @@ import 'package:omi/pages/settings/permissions_page.dart'; import 'package:omi/pages/settings/profile.dart'; import 'package:omi/pages/memories/page.dart'; import 'package:omi/pages/settings/integrations_page.dart'; +import 'package:omi/pages/settings/trash_page.dart'; import 'package:omi/pages/settings/usage_page.dart'; +import 'package:omi/backend/http/api/conversations.dart'; import 'package:omi/pages/referral/referral_page.dart'; import 'package:omi/providers/device_provider.dart'; import 'package:omi/providers/usage_provider.dart'; @@ -61,6 +63,7 @@ class _SettingsDrawerState extends State { String? version; String? buildVersion; String? shortDeviceInfo; + int _trashPreviewCount = 0; bool _isSearching = false; String _searchQuery = ''; @@ -73,6 +76,7 @@ class _SettingsDrawerState extends State { _searchController = TextEditingController(); _searchFocusNode = FocusNode(); _loadAppAndDeviceInfo(); + _loadTrashPreviewCount(); } @override @@ -121,6 +125,14 @@ class _SettingsDrawerState extends State { } } + Future _loadTrashPreviewCount() async { + final conversations = await getTrashedConversations(limit: 1); + if (!mounted) return; + setState(() { + _trashPreviewCount = conversations.length; + }); + } + Widget _buildSettingsItem({ required String title, required Widget icon, @@ -300,6 +312,7 @@ class _SettingsDrawerState extends State { } void goToMemories() => routeToPage(context, const MemoriesPage()); + void goToTrash() => Navigator.of(context).push(MaterialPageRoute(builder: (context) => const TrashPage())); void goToDeveloper() async => await routeToPage(context, const DeveloperSettingsPage()); const profileIcon = FaIcon(FontAwesomeIcons.solidUser, color: Color(0xFF8E8E93), size: 20); @@ -308,6 +321,7 @@ class _SettingsDrawerState extends State { const deviceIcon = FaIcon(FontAwesomeIcons.bluetooth, color: Color(0xFF8E8E93), size: 20); const permIcon = FaIcon(FontAwesomeIcons.shieldHalved, color: Color(0xFF8E8E93), size: 20); const memIcon = FaIcon(FontAwesomeIcons.brain, color: Color(0xFF8E8E93), size: 20); + const trashIcon = FaIcon(FontAwesomeIcons.trash, color: Color(0xFF8E8E93), size: 20); const devIcon = FaIcon(FontAwesomeIcons.code, color: Color(0xFF8E8E93), size: 20); const intIcon = FaIcon(FontAwesomeIcons.networkWired, color: Color(0xFF8E8E93), size: 20); const syncIcon = FaIcon(FontAwesomeIcons.solidCloud, color: Color(0xFF8E8E93), size: 20); @@ -356,6 +370,7 @@ class _SettingsDrawerState extends State { _SearchableItem(title: context.l10n.backgroundActivity, icon: permIcon, onTap: goToPermissions), // --- Memories --- _SearchableItem(title: context.l10n.memories, icon: memIcon, onTap: goToMemories), + _SearchableItem(title: context.l10n.trash, icon: trashIcon, onTap: goToTrash), // --- Support --- if (PlatformService.isIntercomSupported) ...[ _SearchableItem( @@ -607,6 +622,27 @@ class _SettingsDrawerState extends State { routeToPage(context, const MemoriesPage()); }, ), + const Divider(height: 1, color: Color(0xFF3C3C43)), + _buildSettingsItem( + title: context.l10n.trash, + icon: const FaIcon(FontAwesomeIcons.trash, color: Color(0xFF8E8E93), size: 20), + trailingChip: _trashPreviewCount > 0 + ? Container( + padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4), + decoration: BoxDecoration( + color: Colors.red.withOpacity(0.2), + borderRadius: BorderRadius.circular(10), + ), + child: Text( + _trashPreviewCount.toString(), + style: const TextStyle(color: Colors.red, fontSize: 10, fontWeight: FontWeight.w600), + ), + ) + : null, + onTap: () { + Navigator.of(context).push(MaterialPageRoute(builder: (context) => const TrashPage())); + }, + ), ], ), const SizedBox(height: 32), diff --git a/app/lib/pages/settings/trash_page.dart b/app/lib/pages/settings/trash_page.dart new file mode 100644 index 00000000000..da19ab492da --- /dev/null +++ b/app/lib/pages/settings/trash_page.dart @@ -0,0 +1,184 @@ +import 'package:flutter/material.dart'; + +import 'package:omi/backend/http/api/conversations.dart'; +import 'package:omi/backend/schema/conversation.dart'; +import 'package:omi/utils/l10n_extensions.dart'; +import 'package:omi/widgets/extensions/string.dart'; + +class TrashPage extends StatefulWidget { + const TrashPage({super.key}); + + @override + State createState() => _TrashPageState(); +} + +class _TrashPageState extends State { + bool _isLoading = true; + List _conversations = []; + + @override + void initState() { + super.initState(); + _loadConversations(); + } + + Future _loadConversations() async { + final conversations = await getTrashedConversations(); + if (!mounted) return; + setState(() { + _conversations = conversations; + _isLoading = false; + }); + } + + int _daysRemaining(ServerConversation conversation) { + final trashedAt = conversation.trashedAt; + if (trashedAt == null) return 30; + final elapsedDays = DateTime.now().difference(trashedAt).inDays.clamp(0, 30).toInt(); + return 30 - elapsedDays; + } + + Future _restore(ServerConversation conversation) async { + final restored = await restoreConversationServer(conversation.id); + if (!mounted || restored == null) return; + setState(() { + _conversations.removeWhere((item) => item.id == conversation.id); + }); + ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text(context.l10n.restoreSuccess))); + } + + Future _deleteForever(ServerConversation conversation) async { + final confirmed = await showDialog( + context: context, + builder: (ctx) => AlertDialog( + title: Text(context.l10n.deleteForeverConfirmTitle), + content: Text(context.l10n.deleteConversationMessage), + actions: [ + TextButton(onPressed: () => Navigator.of(ctx).pop(false), child: Text(context.l10n.cancel)), + TextButton( + onPressed: () => Navigator.of(ctx).pop(true), + child: Text(context.l10n.deleteForever, style: const TextStyle(color: Colors.red)), + ), + ], + ), + ); + if (confirmed != true) return; + + final deleted = await deleteConversationServer(conversation.id); + if (!mounted || !deleted) return; + setState(() { + _conversations.removeWhere((item) => item.id == conversation.id); + }); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + backgroundColor: Colors.black, + appBar: AppBar(backgroundColor: Colors.black, foregroundColor: Colors.white, title: Text(context.l10n.trash)), + body: _isLoading + ? const Center(child: CircularProgressIndicator()) + : _conversations.isEmpty + ? _EmptyTrashState() + : RefreshIndicator( + onRefresh: _loadConversations, + child: ListView.separated( + padding: const EdgeInsets.all(16), + itemBuilder: (context, index) => _TrashRow( + conversation: _conversations[index], + daysRemaining: _daysRemaining(_conversations[index]), + onRestore: () => _restore(_conversations[index]), + onDeleteForever: () => _deleteForever(_conversations[index]), + ), + separatorBuilder: (context, index) => const SizedBox(height: 10), + itemCount: _conversations.length, + ), + ), + ); + } +} + +class _EmptyTrashState extends StatelessWidget { + @override + Widget build(BuildContext context) { + return Center( + child: Padding( + padding: const EdgeInsets.all(32), + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + const Icon(Icons.delete_outline, color: Color(0xFF8E8E93), size: 48), + const SizedBox(height: 16), + Text( + context.l10n.trashEmpty, + style: const TextStyle(color: Colors.white, fontSize: 20, fontWeight: FontWeight.w600), + ), + const SizedBox(height: 8), + Text( + context.l10n.trashDescription, + textAlign: TextAlign.center, + style: const TextStyle(color: Color(0xFF8E8E93), fontSize: 15), + ), + ], + ), + ), + ); + } +} + +class _TrashRow extends StatelessWidget { + final ServerConversation conversation; + final int daysRemaining; + final VoidCallback onRestore; + final VoidCallback onDeleteForever; + + const _TrashRow({ + required this.conversation, + required this.daysRemaining, + required this.onRestore, + required this.onDeleteForever, + }); + + @override + Widget build(BuildContext context) { + final title = + conversation.discarded ? conversation.getTranscript(maxCount: 100) : conversation.structured.title.decodeString; + + return Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration(color: const Color(0xFF1C1C1E), borderRadius: BorderRadius.circular(8)), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + title.isEmpty ? context.l10n.discardedConversation : title, + maxLines: 2, + overflow: TextOverflow.ellipsis, + style: const TextStyle(color: Colors.white, fontSize: 16, fontWeight: FontWeight.w600), + ), + const SizedBox(height: 8), + Text( + context.l10n.daysRemaining(daysRemaining), + style: const TextStyle(color: Color(0xFF8E8E93), fontSize: 13), + ), + const SizedBox(height: 14), + Row( + children: [ + TextButton.icon( + onPressed: onRestore, + icon: const Icon(Icons.restore, size: 18), + label: Text(context.l10n.restoreConversation), + ), + const Spacer(), + TextButton.icon( + onPressed: onDeleteForever, + icon: const Icon(Icons.delete_forever, color: Colors.red, size: 18), + label: Text(context.l10n.deleteForever, style: const TextStyle(color: Colors.red)), + ), + ], + ), + ], + ), + ); + } +} diff --git a/app/lib/providers/conversation_provider.dart b/app/lib/providers/conversation_provider.dart index 4eed0d61d93..16dbfb2aa56 100644 --- a/app/lib/providers/conversation_provider.dart +++ b/app/lib/providers/conversation_provider.dart @@ -737,6 +737,15 @@ class ConversationProvider extends ChangeNotifier { groupConversationsByDate(); } + Future trashConversation(ServerConversation conversation) async { + final result = await trashConversationServer(conversation.id); + if (result == null) return false; + conversations.removeWhere((element) => element.id == conversation.id); + searchedConversations.removeWhere((element) => element.id == conversation.id); + groupConversationsByDate(); + return true; + } + @override void dispose() { _processingConversationWatchTimer?.cancel(); diff --git a/app/lib/utils/l10n_extensions.dart b/app/lib/utils/l10n_extensions.dart index 0088dc8947d..9653de411f3 100644 --- a/app/lib/utils/l10n_extensions.dart +++ b/app/lib/utils/l10n_extensions.dart @@ -20,3 +20,18 @@ extension LocalizationExtension on BuildContext { /// which should never happen if MaterialApp is configured correctly. AppLocalizations get l10n => AppLocalizations.of(this); } + +// Temporary accessors for new English ARB keys until generated localizations are refreshed. +extension TrashLocalizationExtension on AppLocalizations { + String get trash => 'Trash'; + String get trashEmpty => 'Trash is empty'; + String get trashDescription => 'Conversations in Trash are permanently deleted after 30 days.'; + String get moveToTrash => 'Move to Trash'; + String get deleteForever => 'Delete forever'; + String daysRemaining(int days) => '$days days remaining'; + String get trashConfirmTitle => 'Move conversation to Trash?'; + String get trashConfirmMessage => 'You can restore it from Settings > Trash for the next 30 days.'; + String get restoreSuccess => 'Conversation restored'; + String get deleteForeverConfirmTitle => 'Delete forever?'; + String get trashedAtLabel => 'Moved to Trash'; +} diff --git a/backend/database/conversations.py b/backend/database/conversations.py index 3a58052a58e..fe7e0980301 100644 --- a/backend/database/conversations.py +++ b/backend/database/conversations.py @@ -10,7 +10,7 @@ from google.cloud.firestore_v1 import FieldFilter import utils.other.hume as hume -from database import users as users_db +from database import users as users_db, redis_db from models.audio_file import AudioFile from models.conversation_enums import ConversationStatus, PostProcessingModel, PostProcessingStatus from models.conversation_photo import ConversationPhoto @@ -36,6 +36,16 @@ def _ensure_timezone_aware(dt: datetime) -> datetime: return dt +def _is_not_trashed(conversation_data: Dict[str, Any]) -> bool: + return conversation_data.get('trashed_at') is None + + +def _filter_trashed(conversations: List[dict], include_trashed: bool) -> List[dict]: + if include_trashed: + return conversations + return [conversation for conversation in conversations if _is_not_trashed(conversation)] + + # ********************************* # ******* ENCRYPTION HELPERS ****** # ********************************* @@ -178,6 +188,7 @@ def get_conversations( limit: int = 100, offset: int = 0, include_discarded: bool = False, + include_trashed: bool = False, statuses: List[str] = [], start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, @@ -209,19 +220,32 @@ def get_conversations( # Sort conversations_ref = conversations_ref.order_by('created_at', direction=firestore.Query.DESCENDING) - # Limits - conversations_ref = conversations_ref.limit(limit).offset(offset) + # Fetch extra rows when post-filtering trashed conversations so recent trashed + # documents do not produce short pages while older visible rows exist. + fetch_limit = limit if include_trashed else min(limit * 3, 500) + conversations_ref = conversations_ref.limit(fetch_limit).offset(offset) conversations = [doc.to_dict() for doc in conversations_ref.stream()] - return conversations + return _filter_trashed(conversations, include_trashed)[:limit] -def get_conversations_count(uid: str, include_discarded: bool = False, statuses: List[str] = []): +def get_conversations_count( + uid: str, include_discarded: bool = False, include_trashed: bool = False, statuses: List[str] = [] +): conversations_ref = db.collection('users').document(uid).collection(conversations_collection) if not include_discarded: conversations_ref = conversations_ref.where(filter=FieldFilter('discarded', '==', False)) if statuses: conversations_ref = conversations_ref.where(filter=FieldFilter('status', 'in', statuses)) + if not include_trashed: + # Legacy conversations may not have trashed_at populated, so this remains + # an O(n) client-side filter until backfill makes `trashed_at == None` + # safe to enforce server-side. + count = 0 + for doc in conversations_ref.stream(): + if _is_not_trashed(doc.to_dict()): + count += 1 + return count result = conversations_ref.count().get() return int(result[0][0].value) @@ -232,6 +256,7 @@ def get_conversations_without_photos( limit: int = 100, offset: int = 0, include_discarded: bool = False, + include_trashed: bool = False, statuses: List[str] = [], start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, @@ -267,15 +292,24 @@ def get_conversations_without_photos( # Sort conversations_ref = conversations_ref.order_by('created_at', direction=firestore.Query.DESCENDING) - # Limits - conversations_ref = conversations_ref.limit(limit).offset(offset) + # Fetch extra rows when post-filtering trashed conversations so recent trashed + # documents do not produce short pages while older visible rows exist. + fetch_limit = limit if include_trashed else min(limit * 3, 500) + conversations_ref = conversations_ref.limit(fetch_limit).offset(offset) conversations = [doc.to_dict() for doc in conversations_ref.stream()] - return conversations + return _filter_trashed(conversations, include_trashed)[:limit] -def iter_all_conversations(uid: str, batch_size: int = 400, include_discarded: bool = True): - """Yield all conversations for a user, decrypted, in batches. Used for streaming data export.""" +def iter_all_conversations( + uid: str, batch_size: int = 400, include_discarded: bool = True, include_trashed: bool = True +): + """Yield all conversations for a user, decrypted, in batches. Used for streaming data export. + + Defaults to include_trashed=True so /v1/users/export streams the full set of stored + conversations, including those soft-deleted within the 30-day window. Callers that + want only currently-visible conversations should pass include_trashed=False explicitly. + """ conversations_ref = db.collection('users').document(uid).collection(conversations_collection) if not include_discarded: conversations_ref = conversations_ref.where(filter=FieldFilter('discarded', '==', False)) @@ -286,6 +320,8 @@ def iter_all_conversations(uid: str, batch_size: int = 400, include_discarded: b batch = [] for doc in batch_ref.stream(): conv = doc.to_dict() + if not include_trashed and not _is_not_trashed(conv): + continue conv = _prepare_conversation_for_read(conv, uid) or conv batch.append(conv) yield from batch @@ -305,6 +341,88 @@ def update_conversation(uid: str, conversation_id: str, update_data: dict): doc_ref.update(prepared_data) +@prepare_for_read(decrypt_func=_prepare_conversation_for_read) +@with_photos(get_conversation_photos) +def trash_conversation(uid: str, conversation_id: str): + conversation_ref = ( + db.collection('users').document(uid).collection(conversations_collection).document(conversation_id) + ) + conversation_snapshot = conversation_ref.get() + if not conversation_snapshot.exists: + return None + conversation = conversation_snapshot.to_dict() + update_data = {'trashed_at': datetime.now(timezone.utc)} + if conversation.get('visibility') in ['public', 'shared']: + redis_db.remove_conversation_to_uid(conversation_id) + redis_db.remove_public_conversation(conversation_id) + update_data['visibility'] = 'private' + conversation_ref.update(update_data) + return conversation_ref.get().to_dict() + + +@prepare_for_read(decrypt_func=_prepare_conversation_for_read) +@with_photos(get_conversation_photos) +def restore_conversation(uid: str, conversation_id: str): + conversation_ref = ( + db.collection('users').document(uid).collection(conversations_collection).document(conversation_id) + ) + if not conversation_ref.get().exists: + return None + conversation_ref.update({'trashed_at': firestore.DELETE_FIELD}) + return conversation_ref.get().to_dict() + + +@prepare_for_read(decrypt_func=_prepare_conversation_for_read) +@with_photos(get_conversation_photos) +def get_trashed_conversations(uid: str, limit: int = 100, offset: int = 0): + conversations_ref = ( + db.collection('users') + .document(uid) + .collection(conversations_collection) + .where(filter=FieldFilter('trashed_at', '!=', None)) + .order_by('trashed_at', direction=firestore.Query.DESCENDING) + .limit(limit) + .offset(offset) + ) + return [doc.to_dict() for doc in conversations_ref.stream()] + + +def list_expired_trashed(cutoff_dt: datetime): + cutoff_dt = _ensure_timezone_aware(cutoff_dt) + query = db.collection_group(conversations_collection).where(filter=FieldFilter('trashed_at', '<', cutoff_dt)) + for doc in query.stream(): + user_ref = doc.reference.parent.parent + if user_ref is None: + continue + yield user_ref.id, doc.id + + +def filter_visible_conversation_ids( + uid: str, conversation_ids: List[str], include_discarded: bool = False, include_trashed: bool = False +) -> List[str]: + if not conversation_ids: + return [] + + user_ref = db.collection('users').document(uid) + conversations_ref = user_ref.collection(conversations_collection) + doc_refs = [conversations_ref.document(str(conversation_id)) for conversation_id in conversation_ids] + docs = db.get_all(doc_refs) + + visible_ids = [] + for doc in docs: + if not doc.exists: + continue + data = doc.to_dict() + if not include_discarded and data.get('discarded'): + continue + if not include_trashed and not _is_not_trashed(data): + continue + visible_ids.append(doc.id) + + visible_set = set(visible_ids) + return [conversation_id for conversation_id in conversation_ids if conversation_id in visible_set] + + def create_audio_files_from_chunks( uid: str, conversation_id: str, @@ -558,6 +676,8 @@ def get_conversations_by_id(uid, conversation_ids): data = doc.to_dict() if data.get('discarded'): continue + if not _is_not_trashed(data): + continue conversations.append(data) return conversations diff --git a/backend/database/vector_db.py b/backend/database/vector_db.py index b89642cf53d..6d10335cb65 100644 --- a/backend/database/vector_db.py +++ b/backend/database/vector_db.py @@ -6,6 +6,7 @@ from pinecone import Pinecone +import database.conversations as conversations_db from utils.llm.clients import embeddings import logging @@ -61,7 +62,8 @@ def query_vectors(query: str, uid: str, starts_at: int = None, ends_at: int = No xq = embeddings.embed_query(query) xc = index.query(vector=xq, top_k=k, include_metadata=False, filter=filter_data, namespace="ns1") - return [item['id'].replace(f'{uid}-', '') for item in xc['matches']] + conversation_ids = [item['id'].replace(f'{uid}-', '') for item in xc['matches']] + return conversations_db.filter_visible_conversation_ids(uid, conversation_ids) def query_vectors_by_metadata( @@ -130,6 +132,7 @@ def query_vectors_by_metadata( conversations_id = [item['id'].replace(f'{uid}-', '') for item in xc['matches']] conversations_id.sort(key=lambda x: conversation_id_to_matches[x], reverse=True) + conversations_id = conversations_db.filter_visible_conversation_ids(uid, conversations_id) return conversations_id[:limit] if len(conversations_id) > limit else conversations_id diff --git a/backend/firestore.indexes.json b/backend/firestore.indexes.json new file mode 100644 index 00000000000..dc5538e4122 --- /dev/null +++ b/backend/firestore.indexes.json @@ -0,0 +1,19 @@ +{ + "indexes": [ + { + "collectionGroup": "conversations", + "queryScope": "COLLECTION_GROUP", + "fields": [ + { + "fieldPath": "trashed_at", + "order": "ASCENDING" + }, + { + "fieldPath": "__name__", + "order": "ASCENDING" + } + ] + } + ], + "fieldOverrides": [] +} diff --git a/backend/models/conversation.py b/backend/models/conversation.py index 7bcce5517f3..cafe49a493c 100644 --- a/backend/models/conversation.py +++ b/backend/models/conversation.py @@ -95,6 +95,7 @@ class Conversation(BaseModel): app_id: Optional[str] = None discarded: bool = False + trashed_at: Optional[datetime] = None visibility: ConversationVisibility = ConversationVisibility.private starred: bool = False diff --git a/backend/routers/conversations.py b/backend/routers/conversations.py index 302dbf9081d..2b33be15dc7 100644 --- a/backend/routers/conversations.py +++ b/backend/routers/conversations.py @@ -130,6 +130,7 @@ def get_conversations( offset: int = 0, statuses: Optional[str] = "processing,completed", include_discarded: bool = True, + include_trashed: bool = Query(False), start_date: Optional[datetime] = Query(None, description="Filter by start date (inclusive)"), end_date: Optional[datetime] = Query(None, description="Filter by end date (inclusive)"), folder_id: Optional[str] = Query(None, description="Filter by folder ID"), @@ -146,6 +147,7 @@ def get_conversations( limit, offset, include_discarded=include_discarded, + include_trashed=include_trashed, statuses=statuses.split(",") if len(statuses) > 0 else [], start_date=start_date, end_date=end_date, @@ -161,19 +163,47 @@ def get_conversations( def get_conversations_count( statuses: Optional[str] = Query(None, description="Comma-separated status filter (e.g. processing,completed)"), include_discarded: bool = Query(False), + include_trashed: bool = Query(False), uid: str = Depends(auth.get_current_user_uid), ): status_list = [s.strip() for s in statuses.split(',') if s.strip()] if statuses else [] - count = conversations_db.get_conversations_count(uid, include_discarded=include_discarded, statuses=status_list) + count = conversations_db.get_conversations_count( + uid, include_discarded=include_discarded, include_trashed=include_trashed, statuses=status_list + ) return {'count': count} +@router.get("/v1/conversations/trash", response_model=List[Conversation], tags=['conversations']) +def get_trashed_conversations( + limit: int = 100, + offset: int = 0, + uid: str = Depends(auth.get_current_user_uid), +): + return conversations_db.get_trashed_conversations(uid, limit=limit, offset=offset) + + @router.get("/v1/conversations/{conversation_id}", response_model=Conversation, tags=['conversations']) def get_conversation_by_id(conversation_id: str, uid: str = Depends(auth.get_current_user_uid)): logger.info(f'get_conversation_by_id {uid} {conversation_id}') return _get_valid_conversation_by_id(uid, conversation_id) +@router.post("/v1/conversations/{conversation_id}/trash", response_model=Conversation, tags=['conversations']) +def trash_conversation(conversation_id: str, uid: str = Depends(auth.get_current_user_uid)): + conversation = conversations_db.trash_conversation(uid, conversation_id) + if conversation is None: + raise HTTPException(status_code=404, detail="Conversation not found") + return conversation + + +@router.post("/v1/conversations/{conversation_id}/restore", response_model=Conversation, tags=['conversations']) +def restore_conversation(conversation_id: str, uid: str = Depends(auth.get_current_user_uid)): + conversation = conversations_db.restore_conversation(uid, conversation_id) + if conversation is None: + raise HTTPException(status_code=404, detail="Conversation not found") + return conversation + + @router.patch("/v1/conversations/{conversation_id}/title", tags=['conversations']) def patch_conversation_title(conversation_id: str, title: str, uid: str = Depends(auth.get_current_user_uid)): _get_valid_conversation_by_id(uid, conversation_id) diff --git a/backend/tests/integration/conftest.py b/backend/tests/integration/conftest.py index 54c26c1404d..9c161f1673a 100644 --- a/backend/tests/integration/conftest.py +++ b/backend/tests/integration/conftest.py @@ -5,10 +5,19 @@ import os import sys -import firebase_admin import pytest -from dotenv import load_dotenv -from firebase_admin import credentials + +try: + from dotenv import load_dotenv +except ModuleNotFoundError: + load_dotenv = None + +try: + import firebase_admin + from firebase_admin import credentials +except ModuleNotFoundError: + firebase_admin = None + credentials = None # Add project root to path (go up from integration -> tests -> backend -> root) project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) @@ -20,7 +29,7 @@ sys.path.insert(0, backend_dir) env_file = os.path.join(backend_dir, '.env') -if os.path.exists(env_file): +if load_dotenv is not None and os.path.exists(env_file): load_dotenv(env_file) os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = backend_dir + "/" + os.getenv('GOOGLE_APPLICATION_CREDENTIALS') @@ -28,6 +37,10 @@ @pytest.fixture(scope="session", autouse=True) def initialize_firebase(): """Initialize Firebase Admin SDK before running tests""" + if firebase_admin is None: + yield + return + try: cred = credentials.ApplicationDefault() firebase_admin.initialize_app(cred) diff --git a/backend/tests/integration/test_conversation_trash.py b/backend/tests/integration/test_conversation_trash.py new file mode 100644 index 00000000000..1abe8ce5863 --- /dev/null +++ b/backend/tests/integration/test_conversation_trash.py @@ -0,0 +1,502 @@ +import os +import sys +import types +import importlib +from copy import deepcopy +from datetime import datetime, timezone +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +os.environ.setdefault( + "ENCRYPTION_SECRET", + "omi_ZwB2ZNqB2HHpMK6wStk7sTpavJiPTFg7gXUHnc4tFABPU6pZ2c2DKgehtfgi4RZv", +) + +BACKEND_DIR = Path(__file__).resolve().parent.parent.parent +if str(BACKEND_DIR) not in sys.path: + sys.path.insert(0, str(BACKEND_DIR)) + + +def _prefer_real_module(module_name): + previous_module = sys.modules.get(module_name) + sys.modules.pop(module_name, None) + try: + importlib.import_module(module_name) + except Exception: + if previous_module is not None: + sys.modules[module_name] = previous_module + + +if "google" not in sys.modules: + google_stub = types.ModuleType("google") + sys.modules["google"] = google_stub +else: + google_stub = sys.modules["google"] + +api_core_stub = types.ModuleType("google.api_core") +api_core_exceptions_stub = types.ModuleType("google.api_core.exceptions") + + +class NotFound(Exception): + pass + + +api_core_exceptions_stub.NotFound = NotFound +sys.modules.setdefault("google.api_core", api_core_stub) +sys.modules.setdefault("google.api_core.exceptions", api_core_exceptions_stub) + +cloud_stub = types.ModuleType("google.cloud") +firestore_stub = types.ModuleType("google.cloud.firestore") +firestore_v1_stub = types.ModuleType("google.cloud.firestore_v1") + + +class Query: + DESCENDING = "DESCENDING" + + +class FieldFilter: + def __init__(self, field_path, op_string, value): + self.field_path = field_path + self.op_string = op_string + self.value = value + + +class DeleteField: + def __repr__(self): + return "DELETE_FIELD" + + +firestore_stub.Query = Query +firestore_stub.DELETE_FIELD = DeleteField() +firestore_v1_stub.FieldFilter = FieldFilter +cloud_stub.firestore = firestore_stub +google_stub.cloud = cloud_stub +sys.modules.setdefault("google.cloud", cloud_stub) +sys.modules.setdefault("google.cloud.firestore", firestore_stub) +sys.modules.setdefault("google.cloud.firestore_v1", firestore_v1_stub) + +audio_file_stub = types.ModuleType("models.audio_file") +audio_file_stub.AudioFile = type("AudioFile", (), {}) +sys.modules["models.audio_file"] = audio_file_stub + +conversation_enums_stub = types.ModuleType("models.conversation_enums") +conversation_enums_stub.ConversationStatus = type("ConversationStatus", (), {}) +conversation_enums_stub.PostProcessingModel = type("PostProcessingModel", (), {"fal_whisperx": "fal_whisperx"}) +conversation_enums_stub.PostProcessingStatus = type("PostProcessingStatus", (), {}) +sys.modules["models.conversation_enums"] = conversation_enums_stub + +conversation_photo_stub = types.ModuleType("models.conversation_photo") +conversation_photo_stub.ConversationPhoto = type("ConversationPhoto", (), {}) +sys.modules["models.conversation_photo"] = conversation_photo_stub + +transcript_segment_stub = types.ModuleType("models.transcript_segment") +transcript_segment_stub.TranscriptSegment = type("TranscriptSegment", (), {}) +sys.modules["models.transcript_segment"] = transcript_segment_stub + +for model_module_name in [ + "models.audio_file", + "models.conversation_enums", + "models.conversation_photo", + "models.transcript_segment", +]: + _prefer_real_module(model_module_name) + + +class FakeDocumentSnapshot: + def __init__(self, doc_id, data, reference): + self.id = doc_id + self.exists = data is not None + self._data = deepcopy(data) if data is not None else None + self.reference = reference + + def to_dict(self): + return deepcopy(self._data) if self._data is not None else None + + +class FakePhotosCollection: + def stream(self): + return [] + + +class FakeConversationDocument: + def __init__(self, db, uid, conversation_id): + self._db = db + self.uid = uid + self.id = conversation_id + + def set(self, data): + self._db.conversations.setdefault(self.uid, {})[self.id] = deepcopy(data) + + def get(self): + data = self._db.conversations.get(self.uid, {}).get(self.id) + return FakeDocumentSnapshot(self.id, data, self) + + def update(self, data): + existing = self._db.conversations.setdefault(self.uid, {}).setdefault(self.id, {}) + for key, value in data.items(): + if value is firestore_stub.DELETE_FIELD or "DELETE_FIELD" in repr(value): + existing.pop(key, None) + else: + existing[key] = value + + def delete(self): + self._db.conversations.get(self.uid, {}).pop(self.id, None) + + def collection(self, name): + if name != "photos": + raise AssertionError(f"Unexpected subcollection {name}") + return FakePhotosCollection() + + +class FakeConversationQuery: + def __init__(self, db, uid, filters=None, order=None, limit_value=None, offset_value=0): + self._db = db + self.uid = uid + self._filters = filters or [] + self._order = order + self._limit = limit_value + self._offset = offset_value + + def document(self, conversation_id): + return FakeConversationDocument(self._db, self.uid, conversation_id) + + def where(self, filter=None): + return FakeConversationQuery( + self._db, + self.uid, + self._filters + [filter], + self._order, + self._limit, + self._offset, + ) + + def order_by(self, field_path, direction=None): + return FakeConversationQuery( + self._db, + self.uid, + self._filters, + (field_path, direction), + self._limit, + self._offset, + ) + + def limit(self, limit_value): + return FakeConversationQuery(self._db, self.uid, self._filters, self._order, limit_value, self._offset) + + def offset(self, offset_value): + return FakeConversationQuery(self._db, self.uid, self._filters, self._order, self._limit, offset_value) + + def stream(self): + items = list(self._db.conversations.get(self.uid, {}).items()) + items = [(doc_id, data) for doc_id, data in items if self._matches(data)] + + if self._order: + field_path, direction = self._order + reverse = str(direction).upper().endswith("DESCENDING") + items.sort(key=lambda item: self._field_value(item[1], field_path), reverse=reverse) + + if self._offset: + items = items[self._offset :] + if self._limit is not None: + items = items[: self._limit] + + return [FakeDocumentSnapshot(doc_id, data, self.document(doc_id)) for doc_id, data in items] + + def _matches(self, data): + for field_filter in self._filters: + field_path = field_filter.field_path + op = field_filter.op_string + expected = field_filter.value + actual = self._field_value(data, field_path) + if op == "==" and actual != expected: + return False + if op == "!=" and actual == expected: + return False + if op == "in" and actual not in expected: + return False + if op == ">=" and actual < expected: + return False + if op == "<=" and actual > expected: + return False + if op == "<" and actual >= expected: + return False + return True + + @staticmethod + def _field_value(data, field_path): + value = data + for part in field_path.split("."): + if not isinstance(value, dict): + return None + value = value.get(part) + return value + + +class FakeUserDocument: + def __init__(self, db, uid): + self._db = db + self.id = uid + + def collection(self, name): + if name != "conversations": + raise AssertionError(f"Unexpected collection {name}") + return FakeConversationQuery(self._db, self.id) + + +class FakeUsersCollection: + def __init__(self, db): + self._db = db + + def document(self, uid): + return FakeUserDocument(self._db, uid) + + +class FakeFirestore: + def __init__(self): + self.conversations = {} + + def reset(self): + self.conversations = {} + + def collection(self, name): + if name != "users": + raise AssertionError(f"Unexpected collection {name}") + return FakeUsersCollection(self) + + def get_all(self, doc_refs): + return [doc_ref.get() for doc_ref in doc_refs] + + +fake_db = FakeFirestore() + +database_client_stub = types.ModuleType("database._client") +database_client_stub.db = fake_db +database_client_stub.document_id_from_seed = MagicMock(return_value="seed-id") +database_stub = sys.modules.setdefault("database", types.ModuleType("database")) +database_stub.__path__ = [str(BACKEND_DIR / "database")] +sys.modules["database._client"] = database_client_stub + +redis_db_stub = sys.modules.get("database.redis_db") or types.ModuleType("database.redis_db") +if "r" not in redis_db_stub.__dict__: + redis_db_stub.r = MagicMock() +redis_db_stub.get_user_data_protection_level = MagicMock(return_value="standard") +redis_db_stub.set_user_data_protection_level = MagicMock() +redis_db_stub.remove_conversation_to_uid = MagicMock() +redis_db_stub.remove_public_conversation = MagicMock() +sys.modules["database.redis_db"] = redis_db_stub + +users_db_stub = sys.modules.get("database.users") or types.ModuleType("database.users") +users_db_stub.get_user_profile = MagicMock(return_value={"data_protection_level": "standard"}) +sys.modules["database.users"] = users_db_stub + +utils_other_stub = types.ModuleType("utils.other") +utils_other_stub.__path__ = [str(BACKEND_DIR / "utils" / "other")] +utils_stub = sys.modules.setdefault("utils", types.ModuleType("utils")) +utils_stub.__path__ = [str(BACKEND_DIR / "utils")] +utils_conversations_stub = sys.modules.setdefault("utils.conversations", types.ModuleType("utils.conversations")) +utils_conversations_stub.__path__ = [str(BACKEND_DIR / "utils" / "conversations")] +sys.modules["utils.other"] = utils_other_stub + +hume_stub = sys.modules.get("utils.other.hume") or types.ModuleType("utils.other.hume") +if "HumeJobModelPredictionResponseModel" not in hume_stub.__dict__: + hume_stub.HumeJobModelPredictionResponseModel = type("HumeJobModelPredictionResponseModel", (), {}) +sys.modules["utils.other.hume"] = hume_stub +utils_other_stub.hume = hume_stub + +storage_stub = sys.modules.get("utils.other.storage") or types.ModuleType("utils.other.storage") +if "list_audio_chunks" not in storage_stub.__dict__: + storage_stub.list_audio_chunks = MagicMock(return_value=[]) +if "delete_conversation_audio_files" not in storage_stub.__dict__: + storage_stub.delete_conversation_audio_files = MagicMock() +sys.modules["utils.other.storage"] = storage_stub +utils_other_stub.storage = storage_stub + +typesense_stub = types.ModuleType("typesense") +typesense_stub.Client = MagicMock(return_value=MagicMock()) +sys.modules.setdefault("typesense", typesense_stub) + +existing_conversations_module = sys.modules.get("database.conversations") +if existing_conversations_module and "upsert_conversation" not in existing_conversations_module.__dict__: + del sys.modules["database.conversations"] + +import database.conversations as conversations_db +import utils.conversations.search as search + +REAL_CONVERSATION_DB_FUNCS = { + name: getattr(conversations_db, name) + for name in [ + "filter_visible_conversation_ids", + "get_conversations", + "get_conversations_without_photos", + "get_trashed_conversations", + "restore_conversation", + "set_conversation_visibility", + "trash_conversation", + "upsert_conversation", + ] +} + + +@pytest.fixture(scope="session", autouse=True) +def initialize_firebase(): + yield + + +@pytest.fixture(autouse=True) +def reset_fake_db(): + for name, func in REAL_CONVERSATION_DB_FUNCS.items(): + setattr(conversations_db, name, func) + fake_db.reset() + redis_db_stub.remove_conversation_to_uid.reset_mock() + redis_db_stub.remove_public_conversation.reset_mock() + yield + fake_db.reset() + + +def _conversation(conversation_id, created_at=None, discarded=False, title="Conversation"): + created_at = created_at or datetime.now(timezone.utc) + return { + "id": conversation_id, + "created_at": created_at, + "started_at": created_at, + "finished_at": created_at, + "discarded": discarded, + "status": "completed", + "structured": {"title": title, "overview": title}, + "data_protection_level": "standard", + } + + +def _upsert(uid, conversation_id, **overrides): + data = _conversation(conversation_id, **overrides) + conversations_db.upsert_conversation(uid, data) + return data + + +def test_trash_then_restore(): + uid = "uid-trash-restore" + _upsert(uid, "conv-1") + + trashed = conversations_db.trash_conversation(uid, "conv-1") + assert trashed["id"] == "conv-1" + assert isinstance(trashed["trashed_at"], datetime) + + restored = conversations_db.restore_conversation(uid, "conv-1") + assert restored["id"] == "conv-1" + assert "trashed_at" not in restored + + +@pytest.mark.parametrize("visibility", ["shared", "public"]) +def test_trash_revokes_shared_access(visibility): + uid = "uid-trash-shared" + _upsert(uid, "conv-1") + conversations_db.set_conversation_visibility(uid, "conv-1", visibility) + + trashed = conversations_db.trash_conversation(uid, "conv-1") + + assert trashed["visibility"] == "private" + assert isinstance(trashed["trashed_at"], datetime) + redis_db_stub.remove_conversation_to_uid.assert_called_once_with("conv-1") + redis_db_stub.remove_public_conversation.assert_called_once_with("conv-1") + + +def test_get_trashed_conversations(): + uid = "uid-trashed-list" + _upsert(uid, "older") + _upsert(uid, "newer") + + conversations_db.trash_conversation(uid, "older") + conversations_db.trash_conversation(uid, "newer") + + trashed = conversations_db.get_trashed_conversations(uid) + assert [conversation["id"] for conversation in trashed] == ["newer", "older"] + assert trashed[0]["trashed_at"] >= trashed[1]["trashed_at"] + + +def test_default_list_excludes_trashed(): + uid = "uid-list" + _upsert(uid, "visible") + _upsert(uid, "trashed") + conversations_db.trash_conversation(uid, "trashed") + + visible = conversations_db.get_conversations(uid) + with_trashed = conversations_db.get_conversations(uid, include_trashed=True) + + assert [conversation["id"] for conversation in visible] == ["visible"] + assert {conversation["id"] for conversation in with_trashed} == {"visible", "trashed"} + + +def test_default_list_fetches_past_recent_trashed_conversations(): + uid = "uid-list-pagination" + base = datetime(2026, 1, 1, tzinfo=timezone.utc) + for index in range(4): + _upsert(uid, f"visible-{index}", created_at=base.replace(day=index + 1)) + for index in range(2): + _upsert(uid, f"trashed-{index}", created_at=base.replace(day=index + 10)) + conversations_db.trash_conversation(uid, f"trashed-{index}") + + visible = conversations_db.get_conversations(uid, limit=3) + visible_without_photos = conversations_db.get_conversations_without_photos(uid, limit=3) + + assert [conversation["id"] for conversation in visible] == ["visible-3", "visible-2", "visible-1"] + assert [conversation["id"] for conversation in visible_without_photos] == [ + "visible-3", + "visible-2", + "visible-1", + ] + + +def test_trash_404_when_missing(): + assert conversations_db.trash_conversation("uid-missing", "does-not-exist") is None + + +def test_filter_visible_conversation_ids(): + uid = "uid-visible" + _upsert(uid, "trashed") + _upsert(uid, "discarded", discarded=True) + _upsert(uid, "visible") + conversations_db.trash_conversation(uid, "trashed") + + visible_ids = conversations_db.filter_visible_conversation_ids(uid, ["trashed", "discarded", "visible"]) + + assert visible_ids == ["visible"] + + +def test_search_excludes_trashed(): + uid = "uid-search" + now_timestamp = int(datetime.now(timezone.utc).timestamp()) + _upsert(uid, "visible") + _upsert(uid, "trashed") + conversations_db.trash_conversation(uid, "trashed") + + search.client = MagicMock() + search.client.collections.__getitem__.return_value.documents.search.return_value = { + "hits": [ + { + "document": { + "id": "trashed", + "created_at": now_timestamp, + "started_at": now_timestamp, + "finished_at": now_timestamp, + "structured": {"title": "match", "overview": "match"}, + "userId": uid, + } + }, + { + "document": { + "id": "visible", + "created_at": now_timestamp, + "started_at": now_timestamp, + "finished_at": now_timestamp, + "structured": {"title": "match", "overview": "match"}, + "userId": uid, + } + }, + ] + } + + results = search.search_conversations(uid, "match", per_page=10) + + assert [item["id"] for item in results["items"]] == ["visible"] diff --git a/backend/tests/unit/test_lock_bypass_fixes.py b/backend/tests/unit/test_lock_bypass_fixes.py index 04449e2f8c2..bf97ae785d7 100644 --- a/backend/tests/unit/test_lock_bypass_fixes.py +++ b/backend/tests/unit/test_lock_bypass_fixes.py @@ -132,6 +132,10 @@ def _make_memory(locked=False, memory_id='mem-1'): } +def _pass_through_visible_ids(uid, conversation_ids, **kwargs): + return conversation_ids + + # ============================================================================= # Test sync.py audio endpoints — call the real router functions # ============================================================================= @@ -274,7 +278,14 @@ def test_search_excludes_locked_results(self): 'found': 2, } - with patch('utils.conversations.search.client', mock_client): + with ( + patch('utils.conversations.search.client', mock_client), + patch( + 'utils.conversations.search.conversations_db.filter_visible_conversation_ids', + side_effect=_pass_through_visible_ids, + create=True, + ), + ): from utils.conversations.search import search_conversations result = search_conversations(uid='test-uid', query='test') @@ -317,7 +328,14 @@ def test_search_total_pages_does_not_leak_locked_count(self): 'hits': hits, 'found': 6, } - with patch('utils.conversations.search.client', mock_client): + with ( + patch('utils.conversations.search.client', mock_client), + patch( + 'utils.conversations.search.conversations_db.filter_visible_conversation_ids', + side_effect=_pass_through_visible_ids, + create=True, + ), + ): from utils.conversations.search import search_conversations result = search_conversations(uid='test-uid', query='test', per_page=5) @@ -345,7 +363,14 @@ def test_search_total_pages_last_page_no_leak(self): 'hits': hits, 'found': 2, } - with patch('utils.conversations.search.client', mock_client): + with ( + patch('utils.conversations.search.client', mock_client), + patch( + 'utils.conversations.search.conversations_db.filter_visible_conversation_ids', + side_effect=_pass_through_visible_ids, + create=True, + ), + ): from utils.conversations.search import search_conversations result = search_conversations(uid='test-uid', query='test', per_page=5) diff --git a/backend/tests/unit/test_purge_trashed_cron.py b/backend/tests/unit/test_purge_trashed_cron.py new file mode 100644 index 00000000000..741382808e9 --- /dev/null +++ b/backend/tests/unit/test_purge_trashed_cron.py @@ -0,0 +1,139 @@ +import os +import sys +import types +from datetime import datetime, timedelta, timezone +from pathlib import Path +from unittest.mock import MagicMock, call + +import pytest + +os.environ.setdefault( + "ENCRYPTION_SECRET", + "omi_ZwB2ZNqB2HHpMK6wStk7sTpavJiPTFg7gXUHnc4tFABPU6pZ2c2DKgehtfgi4RZv", +) + +BACKEND_DIR = Path(__file__).resolve().parent.parent.parent +if str(BACKEND_DIR) not in sys.path: + sys.path.insert(0, str(BACKEND_DIR)) + + +def _stub_module(name): + mod = types.ModuleType(name) + sys.modules[name] = mod + if "." in name: + parent_name, attr_name = name.rsplit(".", 1) + parent = sys.modules.get(parent_name) + if parent is None: + parent = types.ModuleType(parent_name) + parent.__path__ = [] + sys.modules[parent_name] = parent + setattr(parent, attr_name, mod) + return mod + + +conversations_db_stub = _stub_module("database.conversations") +conversations_db_stub.list_expired_trashed = MagicMock(return_value=[]) +conversations_db_stub.delete_conversation = MagicMock() + +memories_db_stub = _stub_module("database.memories") +memories_db_stub.get_memory_ids_for_conversation = MagicMock(return_value=[]) +memories_db_stub.delete_memories_for_conversation = MagicMock() + +action_items_db_stub = _stub_module("database.action_items") +action_items_db_stub.delete_action_items_for_conversation = MagicMock() + +vector_db_stub = _stub_module("database.vector_db") +vector_db_stub.delete_vector = MagicMock() +vector_db_stub.delete_memory_vector = MagicMock() + +log_sanitizer_stub = _stub_module("utils.log_sanitizer") +log_sanitizer_stub.sanitize_pii = MagicMock(side_effect=lambda value: value) + +utils_other_stub = _stub_module("utils.other") +utils_other_stub.__path__ = [str(BACKEND_DIR / "utils" / "other")] + +storage_stub = _stub_module("utils.other.storage") +storage_stub.delete_conversation_audio_files = MagicMock() +utils_other_stub.storage = storage_stub + +sys.modules.pop("utils.other.purge_trashed", None) +import utils.other.purge_trashed as purge_trashed + + +@pytest.fixture(autouse=True) +def reset_mocks(): + conversations_db_stub.list_expired_trashed.reset_mock(return_value=True, side_effect=True) + conversations_db_stub.list_expired_trashed.return_value = [] + conversations_db_stub.delete_conversation.reset_mock(side_effect=True) + memories_db_stub.get_memory_ids_for_conversation.reset_mock(return_value=True, side_effect=True) + memories_db_stub.get_memory_ids_for_conversation.return_value = [] + memories_db_stub.delete_memories_for_conversation.reset_mock(side_effect=True) + action_items_db_stub.delete_action_items_for_conversation.reset_mock(side_effect=True) + vector_db_stub.delete_vector.reset_mock(side_effect=True) + vector_db_stub.delete_memory_vector.reset_mock(side_effect=True) + log_sanitizer_stub.sanitize_pii.reset_mock(side_effect=True) + log_sanitizer_stub.sanitize_pii.side_effect = lambda value: value + storage_stub.delete_conversation_audio_files.reset_mock(side_effect=True) + purge_trashed.logger = MagicMock() + yield + + +def test_should_run_purge_trashed_job_at_3am(): + assert purge_trashed.should_run_purge_trashed_job(datetime(2026, 5, 6, 3, tzinfo=timezone.utc)) is True + + for hour in [0, 1, 2, 4, 12, 23]: + assert purge_trashed.should_run_purge_trashed_job(datetime(2026, 5, 6, hour, tzinfo=timezone.utc)) is False + + +def test_purge_does_not_touch_recent(): + now = datetime(2026, 5, 6, 12, tzinfo=timezone.utc) + recent_trashed_at = now - timedelta(days=29) + + def list_expired(cutoff): + assert recent_trashed_at >= cutoff + return [] + + conversations_db_stub.list_expired_trashed.side_effect = list_expired + + purged_count = purge_trashed.purge_expired_trashed_conversations(now) + + assert purged_count == 0 + conversations_db_stub.delete_conversation.assert_not_called() + vector_db_stub.delete_vector.assert_not_called() + storage_stub.delete_conversation_audio_files.assert_not_called() + memories_db_stub.delete_memories_for_conversation.assert_not_called() + action_items_db_stub.delete_action_items_for_conversation.assert_not_called() + + +def test_purge_removes_expired(): + now = datetime(2026, 5, 6, 12, tzinfo=timezone.utc) + conversations_db_stub.list_expired_trashed.return_value = [("uid-1", "conv-1")] + memories_db_stub.get_memory_ids_for_conversation.return_value = ["mem-1", "mem-2"] + + purged_count = purge_trashed.purge_expired_trashed_conversations(now) + + assert purged_count == 1 + conversations_db_stub.list_expired_trashed.assert_called_once_with(now - timedelta(days=30)) + conversations_db_stub.delete_conversation.assert_called_once_with("uid-1", "conv-1") + vector_db_stub.delete_vector.assert_called_once_with("uid-1", "conv-1") + storage_stub.delete_conversation_audio_files.assert_called_once_with("uid-1", "conv-1") + memories_db_stub.get_memory_ids_for_conversation.assert_called_once_with("uid-1", "conv-1") + memories_db_stub.delete_memories_for_conversation.assert_called_once_with("uid-1", "conv-1") + vector_db_stub.delete_memory_vector.assert_has_calls([call("uid-1", "mem-1"), call("uid-1", "mem-2")]) + action_items_db_stub.delete_action_items_for_conversation.assert_called_once_with("uid-1", "conv-1") + + +def test_purge_continues_on_error(): + now = datetime(2026, 5, 6, 12, tzinfo=timezone.utc) + conversations_db_stub.list_expired_trashed.return_value = [("uid-bad", "conv-bad"), ("uid-good", "conv-good")] + conversations_db_stub.delete_conversation.side_effect = [RuntimeError("delete failed"), None] + + purged_count = purge_trashed.purge_expired_trashed_conversations(now) + + assert purged_count == 1 + conversations_db_stub.delete_conversation.assert_has_calls( + [call("uid-bad", "conv-bad"), call("uid-good", "conv-good")] + ) + vector_db_stub.delete_vector.assert_called_once_with("uid-good", "conv-good") + purge_trashed.logger.exception.assert_called_once() + purge_trashed.logger.info.assert_called_once() diff --git a/backend/utils/conversations/search.py b/backend/utils/conversations/search.py index 2543684b97c..7bbd7cb14ba 100644 --- a/backend/utils/conversations/search.py +++ b/backend/utils/conversations/search.py @@ -5,6 +5,8 @@ import typesense +import database.conversations as conversations_db + client = typesense.Client( { 'nodes': [{'host': os.getenv('TYPESENSE_HOST'), 'port': os.getenv('TYPESENSE_HOST_PORT'), 'protocol': 'https'}], @@ -20,6 +22,7 @@ def search_conversations( page: int = 1, per_page: int = 10, include_discarded: bool = True, + include_trashed: bool = False, start_date: int = None, end_date: int = None, ) -> Dict: @@ -46,8 +49,17 @@ def search_conversations( results = client.collections['conversations'].documents.search(search_parameters) memories = [] + visible_ids = conversations_db.filter_visible_conversation_ids( + uid, + [item['document']['id'] for item in results['hits']], + include_discarded=include_discarded, + include_trashed=include_trashed, + ) + visible_id_set = set(visible_ids) for item in results['hits']: doc = item['document'] + if doc['id'] not in visible_id_set: + continue # Exclude locked conversations entirely to prevent inference leaks if doc.get('is_locked', False): continue diff --git a/backend/utils/other/jobs.py b/backend/utils/other/jobs.py index b0ace666133..7e83aa58adb 100644 --- a/backend/utils/other/jobs.py +++ b/backend/utils/other/jobs.py @@ -1,8 +1,12 @@ from utils.other.notifications import should_run_job as should_run_daily_notification_job from utils.other.notifications import start_cron_job as start_cron_notification_job +from utils.other.purge_trashed import purge_expired_trashed_conversations, should_run_purge_trashed_job async def start_job(): # Notification if should_run_daily_notification_job(): await start_cron_notification_job() + + if should_run_purge_trashed_job(): + purge_expired_trashed_conversations() diff --git a/backend/utils/other/purge_trashed.py b/backend/utils/other/purge_trashed.py new file mode 100644 index 00000000000..8ee6bbe7f12 --- /dev/null +++ b/backend/utils/other/purge_trashed.py @@ -0,0 +1,54 @@ +import logging +import uuid +from datetime import datetime, timedelta, timezone + +import database.action_items as action_items_db +import database.conversations as conversations_db +import database.memories as memories_db +from database.vector_db import delete_memory_vector, delete_vector +from utils.log_sanitizer import sanitize_pii +from utils.other.storage import delete_conversation_audio_files + +logger = logging.getLogger(__name__) + +RETENTION_DAYS = 30 +PURGE_HOUR_UTC = 3 + + +def should_run_purge_trashed_job(now: datetime | None = None) -> bool: + now = now or datetime.now(timezone.utc) + return now.hour == PURGE_HOUR_UTC + + +def purge_expired_trashed_conversations(now: datetime | None = None) -> int: + now = now or datetime.now(timezone.utc) + cutoff = now - timedelta(days=RETENTION_DAYS) + batch_id = str(uuid.uuid4()) + purged_count = 0 + + for uid, conversation_id in conversations_db.list_expired_trashed(cutoff): + safe_uid = sanitize_pii(uid) + safe_conversation_id = sanitize_pii(conversation_id) + try: + conversations_db.delete_conversation(uid, conversation_id) + delete_vector(uid, conversation_id) + delete_conversation_audio_files(uid, conversation_id) + + memory_ids = memories_db.get_memory_ids_for_conversation(uid, conversation_id) + memories_db.delete_memories_for_conversation(uid, conversation_id) + for memory_id in memory_ids: + delete_memory_vector(uid, memory_id) + + action_items_db.delete_action_items_for_conversation(uid, conversation_id) + purged_count += 1 + except Exception as e: + logger.exception( + 'purge_trashed failed batch_id=%s uid=%s conversation_id=%s error=%s', + batch_id, + safe_uid, + safe_conversation_id, + sanitize_pii(str(e)), + ) + + logger.info('purge_trashed completed count=%s batch_id=%s', purged_count, batch_id) + return purged_count