diff --git a/package.json b/package.json index 75a033665a..f231e44ed6 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "fs-extra": "11.3.0", "glob": "10.4.5", "image-type": "^4.1.0", - "libsession_util_nodejs": "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz", + "libsession_util_nodejs": "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz", "libsodium-wrappers-sumo": "^0.7.15", "linkify-it": "^5.0.0", "lodash": "^4.17.21", diff --git a/preload.js b/preload.js index 68a925d9c0..829de8dbb9 100644 --- a/preload.js +++ b/preload.js @@ -57,6 +57,7 @@ window.sessionFeatureFlags = { replaceLocalizedStringsWithKeys: false, // Hooks useClosedGroupV2QAButtons: false, // TODO DO NOT MERGE + useDeterministicEncryption: !isEmpty(process.env.SESSION_ATTACH_DETERMINISTIC_ENCRYPTION), useOnionRequests: true, useTestNet: isTestNet() || isTestIntegration(), useLocalDevNet: !isEmpty(process.env.LOCAL_DEVNET_SEED_URL) diff --git a/protos/SignalService.proto b/protos/SignalService.proto index 14df6e5db9..976b8f34cf 100644 --- a/protos/SignalService.proto +++ b/protos/SignalService.proto @@ -178,17 +178,11 @@ message DataMessage { } message Quote { - - message QuotedAttachment { - optional string contentType = 1; - optional string fileName = 2; - optional AttachmentPointer thumbnail = 3; - } + reserved 3, 4; + reserved "text", "attachments"; required uint64 id = 1; required string author = 2; - optional string text = 3; - repeated QuotedAttachment attachments = 4; } message Preview { @@ -268,16 +262,22 @@ message AttachmentPointer { // @required required fixed64 deprecated_id = 1; - optional string contentType = 2; - optional bytes key = 3; - optional uint32 size = 4; - optional bytes digest = 6; - optional string fileName = 7; - optional uint32 flags = 8; - optional uint32 width = 9; - optional uint32 height = 10; - optional string caption = 11; - optional string url = 101; + optional string contentType = 2; + optional bytes key = 3; + optional uint32 size = 4; + optional bytes digest = 6; + optional string fileName = 7; + optional uint32 flags = 8; + optional uint32 width = 9; + optional uint32 height = 10; + optional string caption = 11; + /** + * This field can be just an url to the file, or have a fragment appended to it that can contain: + * - `p=` // hex encoded pubkey of the file server + * - `d=` // if the file is deterministically encrypted, this field is present, otherwise it is not + * If needed, those fields are a &, and can be parsed/built with the usual URLSearchParams logic + */ + optional string url = 101; } diff --git a/ts/components/conversation/composition/CompositionBox.tsx b/ts/components/conversation/composition/CompositionBox.tsx index b3dcc7c8d9..b7b78082c9 100644 --- a/ts/components/conversation/composition/CompositionBox.tsx +++ b/ts/components/conversation/composition/CompositionBox.tsx @@ -83,10 +83,10 @@ export interface StagedLinkPreviewData { scaledDown: ProcessedLinkPreviewThumbnailType | null; } -export interface StagedAttachmentType extends AttachmentType { +export type StagedAttachmentType = AttachmentType & { file: File; path?: string; // a bit hacky, but this is the only way to make our sending audio message be playable, this must be used only for those message -} +}; export type SendMessageType = { conversationId: string; diff --git a/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx b/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx index 885520ce3f..db20a39d0f 100644 --- a/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx +++ b/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx @@ -28,7 +28,8 @@ function formatAttachmentUrl(attachment: PropsForAttachment) { return tr('attachmentsNa'); } - const fileId = attachment.url.split('/').pop() || ''; + const fileUrl = URL.canParse(attachment.url) && new URL(attachment.url); + const fileId = fileUrl ? fileUrl?.pathname.split('/').pop() || '' : ''; if (!fileId) { return tr('attachmentsNa'); diff --git a/ts/components/leftpane/ActionsPanel.tsx b/ts/components/leftpane/ActionsPanel.tsx index ca66eb466b..883be17efa 100644 --- a/ts/components/leftpane/ActionsPanel.tsx +++ b/ts/components/leftpane/ActionsPanel.tsx @@ -22,7 +22,6 @@ import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttac import { DURATION } from '../../session/constants'; -import { reuploadCurrentAvatarUs } from '../../interactions/avatar-interactions/nts-avatar-interactions'; import { onionPathModal, updateDebugMenuModal, @@ -52,14 +51,13 @@ import { useDebugMode } from '../../state/selectors/debug'; import { networkDataActions } from '../../state/ducks/networkData'; import { LUCIDE_ICONS_UNICODE } from '../icon/lucide'; import { AvatarMigrate } from '../../session/utils/job_runners/jobs/AvatarMigrateJob'; -import { NetworkTime } from '../../util/NetworkTime'; import { Storage } from '../../util/storage'; -import { getFileInfoFromFileServer } from '../../session/apis/file_server_api/FileServerApi'; import { themesArray } from '../../themes/constants/colors'; import { isDebugMode, isDevProd } from '../../shared/env_vars'; import { GearAvatarButton } from '../buttons/avatar/GearAvatarButton'; import { useZoomShortcuts } from '../../hooks/useZoomingShortcut'; import { OnionStatusLight } from '../dialog/OnionStatusPathDialog'; +import { AvatarReupload } from '../../session/utils/job_runners/jobs/AvatarReuploadJob'; const StyledContainerAvatar = styled.div` padding: var(--margins-lg); @@ -98,17 +96,6 @@ const triggerSyncIfNeeded = async () => { } }; -const triggerAvatarReUploadIfNeeded = async () => { - const lastAvatarUploadExpiryMs = - (await Data.getItemById(SettingsKey.ntsAvatarExpiryMs))?.value || Number.MAX_SAFE_INTEGER; - - if (NetworkTime.now() > lastAvatarUploadExpiryMs) { - window.log.info('Reuploading avatar...'); - // reupload the avatar - await reuploadCurrentAvatarUs(); - } -}; - /** * This function is called only once: on app startup with a logged in user */ @@ -127,9 +114,8 @@ const doAppStartUp = async () => { }); // refresh our swarm on start to speed up the first message fetching event void Data.cleanupOrphanedAttachments(); - // TODOLATER make this a job of the JobRunner // Note: do not make this a debounce call (as for some reason it doesn't work with promises) - void triggerAvatarReUploadIfNeeded(); + await AvatarReupload.addAvatarReuploadJob(); /* Postpone a little bit of the polling of sogs messages to let the swarm messages come in first. */ global.setTimeout(() => { @@ -147,17 +133,6 @@ const doAppStartUp = async () => { // Schedule a confSyncJob in some time to let anything incoming from the network be applied and see if there is a push needed // Note: this also starts periodic jobs, so we don't need to keep doing it await UserSync.queueNewJobIfNeeded(); - - // on app startup, check that the avatar expiry on the file server - const avatarPointer = ConvoHub.use() - .get(UserUtils.getOurPubKeyStrFromCache()) - .getAvatarPointer(); - if (avatarPointer) { - const details = await getFileInfoFromFileServer(avatarPointer); - if (details?.expiryMs) { - await Storage.put(SettingsKey.ntsAvatarExpiryMs, details.expiryMs); - } - } }, 20000); global.setTimeout(() => { @@ -283,8 +258,7 @@ export const ActionsPanel = () => { if (!ourPrimaryConversation) { return; } - // this won't be run every days, but if the app stays open for more than 10 days - void triggerAvatarReUploadIfNeeded(); + void AvatarReupload.addAvatarReuploadJob(); }, window.sessionFeatureFlags.fsTTL30s ? DURATION.SECONDS * 1 : DURATION.DAYS * 1 ); diff --git a/ts/data/settings-key.ts b/ts/data/settings-key.ts index ee5707eb85..57644e5de4 100644 --- a/ts/data/settings-key.ts +++ b/ts/data/settings-key.ts @@ -11,7 +11,6 @@ const settingsOpengroupPruning = 'prune-setting'; const settingsNotification = 'notification-setting'; const settingsAudioNotification = 'audio-notification-setting'; const hasSyncedInitialConfigurationItem = 'hasSyncedInitialConfigurationItem'; -const ntsAvatarExpiryMs = 'ntsAvatarExpiryMs'; const hasLinkPreviewPopupBeenDisplayed = 'hasLinkPreviewPopupBeenDisplayed'; const hasFollowSystemThemeEnabled = 'hasFollowSystemThemeEnabled'; const hideRecoveryPassword = 'hideRecoveryPassword'; @@ -44,7 +43,6 @@ export const SettingsKey = { settingsNotification, settingsAudioNotification, hasSyncedInitialConfigurationItem, - ntsAvatarExpiryMs, hasLinkPreviewPopupBeenDisplayed, latestUserProfileEnvelopeTimestamp, latestUserGroupEnvelopeTimestamp, diff --git a/ts/interactions/avatar-interactions/nts-avatar-interactions.ts b/ts/interactions/avatar-interactions/nts-avatar-interactions.ts index 3ae3182179..c035827f5d 100644 --- a/ts/interactions/avatar-interactions/nts-avatar-interactions.ts +++ b/ts/interactions/avatar-interactions/nts-avatar-interactions.ts @@ -1,99 +1,68 @@ -import { isEmpty } from 'lodash'; -import { SettingsKey } from '../../data/settings-key'; +import { randombytes_buf } from 'libsodium-wrappers-sumo'; + import { uploadFileToFsWithOnionV4 } from '../../session/apis/file_server_api/FileServerApi'; -import { ConvoHub } from '../../session/conversations'; -import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttachmentsManager'; -import { UserUtils } from '../../session/utils'; -import { fromHexToArray } from '../../session/utils/String'; -import { urlToBlob } from '../../types/attachments/VisualAttachment'; import { processNewAttachment } from '../../types/MessageAttachment'; -import { IMAGE_JPEG } from '../../types/MIME'; import { encryptProfile } from '../../util/crypto/profileEncrypter'; -import { Storage } from '../../util/storage'; import type { ConversationModel } from '../../models/conversation'; import { processAvatarData } from '../../util/avatar/processAvatarData'; -import { UserConfigWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; - -/** - * This function can be used for reupload our avatar to the file server. - * It will reuse the same profileKey and avatarContent if we have some, or do nothing if one of those is missing. - */ -export async function reuploadCurrentAvatarUs() { - const ourConvo = ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache()); - if (!ourConvo) { - window.log.warn('ourConvo not found... This is not a valid case'); - return null; - } - - // this is a reupload. no need to generate a new profileKey - const ourConvoProfileKey = - ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache())?.getProfileKey() || null; - - const profileKey = ourConvoProfileKey ? fromHexToArray(ourConvoProfileKey) : null; - if (!profileKey || isEmpty(profileKey)) { - window.log.info('reuploadCurrentAvatarUs: our profileKey empty'); - - return null; - } - // Note: we do want to grab the current non-static avatar path here - // to reupload it, no matter if we are a pro user or not. - const currentNonStaticAvatarPath = ourConvo.getAvatarInProfilePath(); - - if (!currentNonStaticAvatarPath) { - window.log.info('No attachment currently set for our convo.. Nothing to do.'); - return null; - } - - const decryptedAvatarUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( - currentNonStaticAvatarPath, - IMAGE_JPEG, - true - ); - - if (!decryptedAvatarUrl) { - window.log.warn('Could not decrypt avatar stored locally..'); - return null; - } - const blob = await urlToBlob(decryptedAvatarUrl); - - const decryptedAvatarData = await blob.arrayBuffer(); - - return uploadAndSetOurAvatarShared({ - decryptedAvatarData, - ourConvo, - profileKey, - context: 'reuploadAvatar', - }); -} +import { + MultiEncryptWrapperActions, + UserConfigWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; +import { UserUtils } from '../../session/utils'; +import { fromBase64ToArray } from '../../session/utils/String'; export async function uploadAndSetOurAvatarShared({ decryptedAvatarData, ourConvo, - profileKey, context, }: { ourConvo: ConversationModel; decryptedAvatarData: ArrayBuffer; - profileKey: Uint8Array; context: 'uploadNewAvatar' | 'reuploadAvatar'; }) { if (!decryptedAvatarData?.byteLength) { window.log.warn('uploadAndSetOurAvatarShared: avatar content is empty'); return null; } + // Note: we want to encrypt & upload the **processed** avatar + // below (resized & converted), not the original one. + const { avatarFallback, mainAvatarDetails } = await processAvatarData(decryptedAvatarData, true); + + let encryptedData: ArrayBuffer; + let encryptionKey: Uint8Array; + const deterministicEncryption = window.sessionFeatureFlags?.useDeterministicEncryption; + if (deterministicEncryption) { + const encryptedContent = await MultiEncryptWrapperActions.attachmentEncrypt({ + allowLarge: false, + seed: await UserUtils.getUserEd25519Seed(), + data: new Uint8Array(mainAvatarDetails.outputBuffer), + domain: 'profilePic', + }); + encryptedData = encryptedContent.encryptedData; + encryptionKey = encryptedContent.encryptionKey; + } else { + // if this is a reupload, reuse the current profile key. Otherwise generate a new one + const existingProfileKey = ourConvo.getProfileKey(); + const profileKey = + context === 'reuploadAvatar' && existingProfileKey + ? fromBase64ToArray(existingProfileKey) + : randombytes_buf(32); + encryptedData = await encryptProfile(mainAvatarDetails.outputBuffer, profileKey); + encryptionKey = profileKey; + } - const encryptedData = await encryptProfile(decryptedAvatarData, profileKey); - - const avatarPointer = await uploadFileToFsWithOnionV4(encryptedData); + const avatarPointer = await uploadFileToFsWithOnionV4(encryptedData, deterministicEncryption); if (!avatarPointer) { window.log.warn('failed to upload avatar to file server'); return null; } - const { fileUrl, expiresMs } = avatarPointer; + // Note: we don't care about the expiry of the file anymore. + // This is because we renew the expiry of the file itself, and only when that fails we reupload the avatar content. + const { fileUrl } = avatarPointer; // Note: processing the avatar here doesn't change the buffer (unless the first one was uploaded as an image too big for an avatar.) // so, once we have deterministic encryption of avatars, the uploaded should always have the same hash - const { avatarFallback, mainAvatarDetails } = await processAvatarData(decryptedAvatarData); // this encrypts and save the new avatar and returns a new attachment path const savedMainAvatar = await processNewAttachment({ @@ -106,7 +75,7 @@ export async function uploadAndSetOurAvatarShared({ ? await processNewAttachment({ isRaw: true, data: avatarFallback.outputBuffer, - contentType: avatarFallback.contentType, // contentType is mostly used to generate previews and screenshot. We do not care for those in this case. + contentType: avatarFallback.contentType, }) : null; @@ -118,17 +87,16 @@ export async function uploadAndSetOurAvatarShared({ displayName: null, avatarPointer: fileUrl, type: 'setAvatarDownloadedPrivate', - profileKey, + profileKey: encryptionKey, }); - await Storage.put(SettingsKey.ntsAvatarExpiryMs, expiresMs); if (context === 'uploadNewAvatar') { await UserConfigWrapperActions.setNewProfilePic({ - key: profileKey, + key: encryptionKey, url: fileUrl, }); } else if (context === 'reuploadAvatar') { await UserConfigWrapperActions.setReuploadProfilePic({ - key: profileKey, + key: encryptionKey, url: fileUrl, }); } diff --git a/ts/models/conversation.ts b/ts/models/conversation.ts index 942d563faf..4f9cd85a35 100644 --- a/ts/models/conversation.ts +++ b/ts/models/conversation.ts @@ -1772,9 +1772,8 @@ export class ConversationModel extends Model { const updatedAtSeconds = this.getProfileUpdatedSeconds(); return new OutgoingUserProfile({ - avatarPointer, + profilePic: { url: avatarPointer, key: profileKey ? from_hex(profileKey) : null }, displayName, - profileKey, updatedAtSeconds, }); } diff --git a/ts/models/message.ts b/ts/models/message.ts index 5837dc7e7a..ca6b7106a7 100644 --- a/ts/models/message.ts +++ b/ts/models/message.ts @@ -13,7 +13,6 @@ import { attachmentIdAsStrFromUrl, uploadAttachmentsToFileServer, uploadLinkPreviewToFileServer, - uploadQuoteThumbnailsToFileServer, } from '../session/utils'; import { MessageAttributes, @@ -39,11 +38,7 @@ import { VisibleMessage, VisibleMessageParams, } from '../session/messages/outgoing/visibleMessage/VisibleMessage'; -import { - uploadAttachmentsV3, - uploadLinkPreviewsV3, - uploadQuoteThumbnailsV3, -} from '../session/utils/AttachmentsV2'; +import { uploadAttachmentsV3, uploadLinkPreviewsV3 } from '../session/utils/AttachmentsV2'; import { isUsFromCache } from '../session/utils/User'; import { buildSyncMessage } from '../session/utils/sync/syncUtils'; import { @@ -69,7 +64,6 @@ import { getAbsoluteAttachmentPath, loadAttachmentData, loadPreviewData, - loadQuoteData, } from '../types/MessageAttachment'; import { ReactionList } from '../types/Reaction'; import { getAttachmentMetadata } from '../types/message/initializeAttachmentMetadata'; @@ -787,7 +781,7 @@ export class MessageModel extends Model { (this.get('attachments') || []).map(loadAttachmentData) ); const body = this.get('body'); - const quoteWithData = await loadQuoteData(this.get('quote')); + const previewWithData = await loadPreviewData(this.get('preview')); const { hasAttachments, hasVisualMediaAttachments, hasFileAttachments } = @@ -799,7 +793,6 @@ export class MessageModel extends Model { let attachmentPromise; let linkPreviewPromise; - let quotePromise; const fileIdsToLink: Array = []; // we can only send a single preview @@ -810,33 +803,18 @@ export class MessageModel extends Model { const openGroupV2 = conversation.toOpenGroupV2(); attachmentPromise = uploadAttachmentsV3(finalAttachments, openGroupV2); linkPreviewPromise = uploadLinkPreviewsV3(firstPreviewWithData, openGroupV2); - quotePromise = uploadQuoteThumbnailsV3(openGroupV2, quoteWithData); } else { // if that's not an sogs, the file is uploaded to the file server instead attachmentPromise = uploadAttachmentsToFileServer(finalAttachments); linkPreviewPromise = uploadLinkPreviewToFileServer(firstPreviewWithData); - quotePromise = uploadQuoteThumbnailsToFileServer(quoteWithData); } - const [attachments, preview, quote] = await Promise.all([ - attachmentPromise, - linkPreviewPromise, - quotePromise, - ]); + const [attachments, preview] = await Promise.all([attachmentPromise, linkPreviewPromise]); fileIdsToLink.push(...attachments.map(m => attachmentIdAsStrFromUrl(m.url))); if (preview && preview.image?.url) { fileIdsToLink.push(attachmentIdAsStrFromUrl(preview.image.url)); } - if (quote && quote.attachments?.length && quote.attachments[0].thumbnail) { - // typing for all of this Attachment + quote + preview + send or unsend is pretty bad - const firstQuoteAttachmentUrl = - 'url' in quote.attachments[0].thumbnail ? quote.attachments[0].thumbnail.url : undefined; - if (firstQuoteAttachmentUrl && attachmentIdAsStrFromUrl(firstQuoteAttachmentUrl)) { - fileIdsToLink.push(attachmentIdAsStrFromUrl(firstQuoteAttachmentUrl)); - } - } - const isFirstAttachmentVoiceMessage = finalAttachments?.[0]?.isVoiceMessage; if (isFirstAttachmentVoiceMessage) { attachments[0].flags = SignalService.AttachmentPointer.Flags.VOICE_MESSAGE; @@ -845,13 +823,14 @@ export class MessageModel extends Model { window.log.info( `Upload of message data for message ${this.idForLogging()} is finished in ${ Date.now() - start - }ms.` + }ms. Attachments: ${attachments.map(m => m.url)}` ); + return { body, attachments, preview, - quote, + quote: this.get('quote'), fileIdsToLink: uniq(fileIdsToLink), }; } diff --git a/ts/receiver/attachments.ts b/ts/receiver/attachments.ts index dc8fe383cb..2b5a479b59 100644 --- a/ts/receiver/attachments.ts +++ b/ts/receiver/attachments.ts @@ -1,4 +1,4 @@ -import { omit, startsWith } from 'lodash'; +import { omit } from 'lodash'; import { MessageModel } from '../models/message'; import { Data } from '../data/data'; @@ -10,12 +10,14 @@ import { callUtilsWorker } from '../webworker/workers/browser/util_worker_interf import { sogsV3FetchFileByFileID } from '../session/apis/open_group_api/sogsv3/sogsV3FetchFile'; import { OpenGroupData } from '../data/opengroups'; import { OpenGroupRequestCommonType } from '../data/types'; -import { - downloadFileFromFileServer, - fileServerURL, -} from '../session/apis/file_server_api/FileServerApi'; +import { downloadFileFromFileServer } from '../session/apis/file_server_api/FileServerApi'; +import { FileFromFileServerDetails } from '../session/apis/file_server_api/types'; +import { MultiEncryptWrapperActions } from '../webworker/workers/browser/libsession_worker_interface'; -export async function downloadAttachment(attachment: { +/** + * Note: the url must have the serverPubkey as a query parameter + */ +export async function downloadAttachmentFs(attachment: { url: string; id?: string; isRaw?: boolean; @@ -23,24 +25,16 @@ export async function downloadAttachment(attachment: { digest?: string; size?: number; }) { - const asURL = new URL(attachment.url); - const serverUrl = asURL.origin; - - // is it an attachment hosted on the file server - const defaultFileServer = startsWith(serverUrl, fileServerURL); + const toDownload = new FileFromFileServerDetails(attachment.url); let res: ArrayBuffer | null = null; - // try to get the fileId from the end of the URL - const attachmentId = attachmentIdAsStrFromUrl(attachment.url); - if (!defaultFileServer) { - window.log.warn( - `downloadAttachment attachment is neither opengroup attachment nor fileserver... Dropping it ${asURL.href}` - ); - throw new Error('Attachment url is not opengroupv2 nor fileserver. Unsupported'); - } - window?.log?.info('Download v2 file server attachment', attachmentId); - res = await downloadFileFromFileServer(attachmentId); + window?.log?.info( + 'Download v2 file server attachment', + toDownload.fullUrl.toString(), + toDownload.serverEd25519Pk + ); + res = await downloadFileFromFileServer(toDownload); if (!res?.byteLength) { window?.log?.error('Failed to download attachment. Length is 0'); @@ -52,18 +46,36 @@ export async function downloadAttachment(attachment: { if (!attachment.isRaw) { const { key, digest, size } = attachment; - if (!key || !digest) { + // Note: if key is set but digest is not, it means we have a libsession deterministic encryption + if (!key) { throw new Error('Attachment is not raw but we do not have a key to decode it'); } + if (!size) { throw new Error('Attachment expected size is 0'); } - const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; - const digestBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', digest)) as ArrayBuffer; + if (!toDownload.deterministicEncryption) { + const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; + const digestBuffer = (await callUtilsWorker( + 'fromBase64ToArrayBuffer', + digest + )) as ArrayBuffer; + + data = await decryptAttachment(data, keyBuffer, digestBuffer); + } else { + window.log.debug( + `${attachment.url} attachment has deterministicEncryption flag set, assuming it is deterministic encryption` + ); - data = await decryptAttachment(data, keyBuffer, digestBuffer); + const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; + const decrypted = await MultiEncryptWrapperActions.attachmentDecrypt({ + encryptedData: new Uint8Array(data), + decryptionKey: new Uint8Array(keyBuffer), + }); + data = decrypted.decryptedData.buffer; + } if (size !== data.byteLength) { // we might have padding, check that all the remaining bytes are padding bytes // otherwise we have an error. @@ -148,7 +160,6 @@ async function processNormalAttachments( convo: ConversationModel ): Promise { const isOpenGroupV2 = convo.isOpenGroupV2(); - if (message.isTrustedForAttachmentDownload()) { const openGroupV2Details = (isOpenGroupV2 && convo.toOpenGroupV2()) || undefined; const attachments = await Promise.all( @@ -202,48 +213,6 @@ async function processPreviews(message: MessageModel, convo: ConversationModel): return addedCount; } -async function processQuoteAttachments( - message: MessageModel, - convo: ConversationModel -): Promise { - let addedCount = 0; - - const quote = message.get('quote'); - - if (!quote || !quote.attachments || !quote.attachments.length) { - return 0; - } - const isOpenGroupV2 = convo.isOpenGroupV2(); - const openGroupV2Details = (isOpenGroupV2 && convo.toOpenGroupV2()) || undefined; - - for (let index = 0; index < quote.attachments.length; index++) { - // If we already have a path, then we copied this image from the quoted - // message and we don't need to download the attachment. - const attachment = quote.attachments[index]; - - if (!attachment.thumbnail || attachment.thumbnail.path) { - continue; - } - - addedCount += 1; - - // eslint-disable-next-line no-await-in-loop - const thumbnail = await AttachmentDownloads.addJob(attachment.thumbnail, { - messageId: message.id, - type: 'quote', - index, - isOpenGroupV2, - openGroupV2Details, - }); - - quote.attachments[index] = { ...attachment, thumbnail }; - } - - message.setQuote(quote); - - return addedCount; -} - export async function queueAttachmentDownloads( message: MessageModel, conversation: ConversationModel @@ -251,11 +220,8 @@ export async function queueAttachmentDownloads( let count = 0; count += await processNormalAttachments(message, message.get('attachments') || [], conversation); - count += await processPreviews(message, conversation); - count += await processQuoteAttachments(message, conversation); - if (count > 0) { await Data.saveMessage(message.cloneAttributes()); } diff --git a/ts/receiver/contentMessage.ts b/ts/receiver/contentMessage.ts index eb0382ca7c..876bd03b9f 100644 --- a/ts/receiver/contentMessage.ts +++ b/ts/receiver/contentMessage.ts @@ -744,7 +744,7 @@ async function handleMessageRequestResponse( const srcProfileDetails = srcConvo.getPrivateProfileDetails(); const srcAvatarPath = srcConvo.getAvatarInProfilePath(); const srcFallbackAvatarPath = srcConvo.getFallbackAvatarInProfilePath(); - const srcProfilePic = srcProfileDetails.toProfilePicture(); + const srcProfilePic = srcProfileDetails.toHexProfilePicture(); const avatarChanges = srcAvatarPath && srcFallbackAvatarPath && srcProfilePic.url && srcProfilePic.key diff --git a/ts/receiver/dataMessage.ts b/ts/receiver/dataMessage.ts index d84acd2fa1..ddf68de507 100644 --- a/ts/receiver/dataMessage.ts +++ b/ts/receiver/dataMessage.ts @@ -69,19 +69,6 @@ function cleanAttachments(decryptedDataMessage: SignalService.DataMessage) { if (quote.id) { quote.id = toNumber(quote.id); } - - quote.attachments = (quote.attachments || []).map((item: any) => { - const { thumbnail } = item; - - if (!thumbnail || thumbnail.length === 0) { - return item; - } - - return { - ...item, - thumbnail: cleanAttachment(item.thumbnail), - }; - }); } } diff --git a/ts/receiver/queuedJob.ts b/ts/receiver/queuedJob.ts index 4f43c89989..d9e981a843 100644 --- a/ts/receiver/queuedJob.ts +++ b/ts/receiver/queuedJob.ts @@ -22,17 +22,11 @@ import { import { showMessageRequestBannerOutsideRedux } from '../state/ducks/userConfig'; import { selectMemberInviteSentOutsideRedux } from '../state/selectors/groups'; import { getHideMessageRequestBannerOutsideRedux } from '../state/selectors/userConfig'; -import { GoogleChrome } from '../util'; import { LinkPreviews } from '../util/linkPreviews'; import { GroupV2Receiver } from './groupv2/handleGroupV2Message'; import { Constants } from '../session'; import { Timestamp } from '../types/timestamp/timestamp'; -function contentTypeSupported(type: string): boolean { - const Chrome = GoogleChrome; - return Chrome.isImageTypeSupported(type) || Chrome.isVideoTypeSupported(type); -} - function isMessageModel( msg: MessageModel | MessageModelPropsWithoutConvoProps ): msg is MessageModel { @@ -50,18 +44,16 @@ async function copyFromQuotedMessage( if (!quote) { return; } - const { attachments, id: quoteId, author } = quote; + const { id: quoteId, author } = quote; const quoteLocal: Quote = { - attachments: attachments || null, + attachments: null, author, id: _.toNumber(quoteId), text: null, referencedMessageNotFound: false, }; - const firstAttachment = attachments?.[0] || undefined; - const id = _.toNumber(quoteId); // First we try to look for the quote in memory @@ -99,11 +91,6 @@ async function copyFromQuotedMessage( window?.log?.info(`Found quoted message id: ${id}`); quoteLocal.referencedMessageNotFound = false; - // NOTE we send the entire body to be consistent with the other platforms - quoteLocal.text = - (isMessageModel(quotedMessage) - ? quotedMessage.get('body') - : quotedMessage.propsForMessage.text) || ''; if (isMessageModel(quotedMessage)) { window.inboxStore?.dispatch(pushQuotedMessageDetails(quotedMessage.getMessageModelProps())); @@ -111,52 +98,6 @@ async function copyFromQuotedMessage( window.inboxStore?.dispatch(pushQuotedMessageDetails(quotedMessage)); } - // no attachments, just save the quote with the body - if ( - !firstAttachment || - !firstAttachment.contentType || - !contentTypeSupported(firstAttachment.contentType) - ) { - msg.setQuote(quoteLocal); - return; - } - - firstAttachment.thumbnail = null; - - const queryAttachments = - (isMessageModel(quotedMessage) - ? quotedMessage.get('attachments') - : quotedMessage.propsForMessage.attachments) || []; - - if (queryAttachments.length > 0) { - const queryFirst = queryAttachments[0]; - const { thumbnail } = queryFirst; - - if (thumbnail && thumbnail.path) { - firstAttachment.thumbnail = { - ...thumbnail, - copied: true, - }; - } - } - - const queryPreview = - (isMessageModel(quotedMessage) - ? quotedMessage.get('preview') - : quotedMessage.propsForMessage.previews) || []; - if (queryPreview.length > 0) { - const queryFirst = queryPreview[0]; - const { image } = queryFirst; - - if (image && image.path) { - firstAttachment.thumbnail = { - ...image, - copied: true, - }; - } - } - quoteLocal.attachments = [firstAttachment]; - msg.setQuote(quoteLocal); } @@ -166,9 +107,12 @@ async function copyFromQuotedMessage( function handleLinkPreviews(messageBody: string, messagePreview: any, message: MessageModel) { const urls = LinkPreviews.findLinks(messageBody); const incomingPreview = messagePreview || []; - const preview = incomingPreview.filter( - (item: any) => (item.image || item.title) && urls.includes(item.url) - ); + const preview = incomingPreview + .filter((item: any) => (item.image || item.title) && urls.includes(item.url)) + .map((p: any) => ({ + ...p, + pending: true, + })); if (preview.length < incomingPreview.length) { window?.log?.info( `${message.idForLogging()}: Eliminated ${ @@ -307,7 +251,10 @@ async function handleRegularMessage( message.set({ // quote: rawDataMessage.quote, // do not do this copy here, it must be done only in copyFromQuotedMessage() - attachments: rawDataMessage.attachments?.map(m => ({ ...m, pending: true })), + attachments: rawDataMessage.attachments?.map(m => ({ + ...m, + pending: true, + })), body, conversationId: conversation.id, messageHash, diff --git a/ts/receiver/receiver.ts b/ts/receiver/receiver.ts index 473ee5374c..ba5a7e6df3 100644 --- a/ts/receiver/receiver.ts +++ b/ts/receiver/receiver.ts @@ -20,8 +20,6 @@ import { createTaskWithTimeout } from '../session/utils/TaskWithTimeout'; import { UnprocessedParameter } from '../types/sqlSharedTypes'; import { getEnvelopeId } from './common'; -export { downloadAttachment } from './attachments'; - const incomingMessagePromises: Array> = []; export async function handleSwarmContentDecryptedWithTimeout({ diff --git a/ts/session/apis/file_server_api/FileServerApi.ts b/ts/session/apis/file_server_api/FileServerApi.ts index b5bb3c9cdb..2b4467fdb5 100644 --- a/ts/session/apis/file_server_api/FileServerApi.ts +++ b/ts/session/apis/file_server_api/FileServerApi.ts @@ -1,5 +1,5 @@ import AbortController from 'abort-controller'; -import { isEmpty, isFinite, isNumber, isString } from 'lodash'; +import { isEmpty, isFinite, isNumber, isString, toNumber } from 'lodash'; import { BlindingActions } from '../../../webworker/workers/browser/libsession_worker_interface'; import { OnionSending } from '../../onions/onionSend'; @@ -13,44 +13,35 @@ import { DURATION } from '../../constants'; import { isReleaseChannel, type ReleaseChannels } from '../../../updater/types'; import { Storage } from '../../../util/storage'; import { OnionV4 } from '../../onions/onionv4'; -import { SERVER_HOSTS } from '..'; +import { FileFromFileServerDetails } from './types'; +import { queryParamDeterministicEncryption, queryParamServerEd25519Pubkey } from '../../url'; +import { FS, type FILE_SERVER_TARGET_TYPE } from './FileServerTarget'; -export const fileServerURL = `http://${SERVER_HOSTS.FILE_SERVER}`; - -export const fileServerPubKey = 'da21e1d886c6fbaea313f75298bd64aab03a97ce985b46bb2dad9f2089c8ee59'; const RELEASE_VERSION_ENDPOINT = '/session_version'; - -const POST_GET_FILE_ENDPOINT = '/file'; - -function fileUrlToFileId(fullURL?: string) { - const prefix = `${fileServerURL}${POST_GET_FILE_ENDPOINT}/`; - if (!fullURL || !fullURL.startsWith(prefix)) { - return null; - } - const fileId = fullURL.substring(prefix.length); - - if (!fileId) { - return null; - } - return fileId; -} +const FILE_ENDPOINT = '/file'; /** * Upload a file to the file server v2 using the onion v4 encoding * @param fileContent the data to send + * @param deterministicEncryption whether the file is deterministically encrypted or not * @returns null or the complete URL to share this file */ export const uploadFileToFsWithOnionV4 = async ( - fileContent: ArrayBuffer + fileContent: ArrayBuffer, + deterministicEncryption: boolean ): Promise<{ fileUrl: string; expiresMs: number } | null> => { if (!fileContent || !fileContent.byteLength) { return null; } + // TODO: remove this once QA is done + const target = process.env.POTATO_FS ? 'POTATO' : 'DEFAULT'; + const result = await OnionSending.sendBinaryViaOnionV4ToFileServer({ abortSignal: new AbortController().signal, bodyBinary: new Uint8Array(fileContent), - endpoint: POST_GET_FILE_ENDPOINT, + target, + endpoint: FILE_ENDPOINT, method: 'POST', timeoutMs: 30 * DURATION.SECONDS, // longer time for file upload headers: window.sessionFeatureFlags.fsTTL30s ? { 'X-FS-TTL': '30' } : {}, @@ -72,7 +63,14 @@ export const uploadFileToFsWithOnionV4 = async ( ) { return null; } - const fileUrl = `${fileServerURL}${POST_GET_FILE_ENDPOINT}/${fileId}`; + + // we now have the `fileUrl` provide the `serverPubkey` and the deterministic flag as an url fragment. + const urlParams = new URLSearchParams(); + urlParams.set(queryParamServerEd25519Pubkey, FS.FILE_SERVERS[target].edPk); + if (deterministicEncryption) { + urlParams.set(queryParamDeterministicEncryption, ''); + } + const fileUrl = `${FS.FILE_SERVERS[target].url}${FILE_ENDPOINT}/${fileId}#${urlParams.toString()}`; const expiresMs = Math.floor(expires * 1000); return { fileUrl, @@ -86,42 +84,21 @@ export const uploadFileToFsWithOnionV4 = async ( * @returns the data as an Uint8Array or null */ export const downloadFileFromFileServer = async ( - fileIdOrCompleteUrl: string + toDownload: FileFromFileServerDetails ): Promise => { - let fileId = fileIdOrCompleteUrl; - if (!fileIdOrCompleteUrl) { - window?.log?.warn('Empty url to download for fileserver'); - return null; - } - - if (fileIdOrCompleteUrl.lastIndexOf('/') >= 0) { - fileId = fileId.substring(fileIdOrCompleteUrl.lastIndexOf('/') + 1); - } - - if (fileId.startsWith('/')) { - fileId = fileId.substring(1); - } - - if (!fileId) { - window.log.info('downloadFileFromFileServer given empty fileId'); - return null; - } - - const urlToGet = `${POST_GET_FILE_ENDPOINT}/${fileId}`; if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`about to try to download fsv2: "${urlToGet}"`); + window.log.info(`about to try to download fsv2: "${toDownload.fullUrl}"`); } // this throws if we get a 404 from the file server const result = await OnionSending.getBinaryViaOnionV4FromFileServer({ abortSignal: new AbortController().signal, - endpoint: urlToGet, - method: 'GET', + fileToGet: toDownload, throwError: true, timeoutMs: 30 * DURATION.SECONDS, // longer time for file download }); if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`download fsv2: "${urlToGet} got result:`, JSON.stringify(result)); + window.log.info(`download fsv2: "${toDownload.fullUrl} got result:`, JSON.stringify(result)); } if (!result) { return null; @@ -190,6 +167,7 @@ export const getLatestReleaseFromFileServer = async ( stringifiedBody: null, headers, timeoutMs: 10 * DURATION.SECONDS, + target: 'DEFAULT' as const, }; const result = await OnionSending.sendJsonViaOnionV4ToFileServer(params); @@ -206,34 +184,49 @@ export const getLatestReleaseFromFileServer = async ( }; /** - * Fetch the expiry in ms of the corresponding file. + * Extend a file expiry from the file server. + * This only works with files that have an alphanumeric id. + * */ -export const getFileInfoFromFileServer = async ( - fileUrl: string -): Promise<{ expiryMs: number } | null> => { - const fileId = fileUrlToFileId(fileUrl); +export const extendFileExpiry = async (fileId: string, fsTarget: FILE_SERVER_TARGET_TYPE) => { + // TODO: remove this once QA is done - if (!fileId) { - throw new Error("getFileInfoFromFileServer: fileUrl doesn't start with the file server url"); + if (!FS.supportsFsExtend(fsTarget)) { + throw new Error('extendFileExpiry: only works with potato for now'); + } + if (window.sessionFeatureFlags?.debugServerRequests) { + window.log.info(`about to renew expiry of file: "${fileId}"`); } - const result = await OnionSending.sendJsonViaOnionV4ToFileServer({ + const method = 'POST'; + const endpoint = `/file/${fileId}/extend`; + const params = { abortSignal: new AbortController().signal, + endpoint, + method, stringifiedBody: null, - endpoint: `${POST_GET_FILE_ENDPOINT}/${fileId}/info`, - method: 'GET', - timeoutMs: 10 * DURATION.SECONDS, headers: {}, - }); + timeoutMs: 10 * DURATION.SECONDS, + target: fsTarget, + }; - const fileExpirySeconds = result?.body?.expires as number | undefined; + const result = await OnionSending.sendJsonViaOnionV4ToFileServer(params); - if (!batchGlobalIsSuccess(result)) { + if (!batchGlobalIsSuccess(result) || OnionV4.parseStatusCodeFromV4Request(result) !== 200) { return null; } - if (!fileExpirySeconds || !isNumber(fileExpirySeconds) || !isFinite(fileExpirySeconds)) { + const { + expires: fileNewExpirySeconds, + uploaded: fileUploadedSeconds, + size, + } = result?.body as any; + if (!fileNewExpirySeconds) { return null; } - return { expiryMs: Math.floor(fileExpirySeconds * 1000) }; + return { + fileNewExpiryMs: Math.floor(fileNewExpirySeconds * 1000), // the expires/uploaded have the ms in the decimals (i.e `1761002358.02229`) + fileUploadedMs: Math.floor(fileUploadedSeconds * 1000), + size: toNumber(size), + }; }; diff --git a/ts/session/apis/file_server_api/FileServerTarget.ts b/ts/session/apis/file_server_api/FileServerTarget.ts new file mode 100644 index 0000000000..b4de3f275e --- /dev/null +++ b/ts/session/apis/file_server_api/FileServerTarget.ts @@ -0,0 +1,77 @@ +import { SERVER_HOSTS } from '..'; +import { assertUnreachable } from '../../../types/sqlSharedTypes'; + +enum FS_FEATURES { + fsExtend = 'fsExtend', +} + +type FileServerConfigType = { + url: string; + xPk: string; + edPk: string; + extraFeatures: Array; +}; + +// not exported/included in the SERVER_HOSTS as this is for testing only +const POTATO_FS_HOST = 'potatofiles.getsession.org'; + +const FILE_SERVERS: Record<'DEFAULT' | 'POTATO', FileServerConfigType> = { + DEFAULT: { + url: `http://${SERVER_HOSTS.DEFAULT_FILE_SERVER}`, + xPk: '09324794aa9c11948189762d198c618148e9136ac9582068180661208927ef34', + edPk: 'b8eef9821445ae16e2e97ef8aa6fe782fd11ad5253cd6723b281341dba22e371', + extraFeatures: [], + }, + POTATO: { + url: `http://${POTATO_FS_HOST}`, + // potato has different keys than the default + edPk: 'ff86dcd4b26d1bfec944c59859494248626d6428efc12168749d65a1b92f5e28', + xPk: 'fc097b06821c98a2db75ce02e521cef5fd9d3446e42e81d843c4c8c4e9260f48', + extraFeatures: [FS_FEATURES.fsExtend], + }, +}; + +const FILE_SERVER_TARGETS = Object.keys(FILE_SERVERS) as Array; + +function isDefaultFileServer(edOrXPk: string) { + return edOrXPk === FILE_SERVERS.DEFAULT.edPk || edOrXPk === FILE_SERVERS.DEFAULT.xPk; +} + +function supportsFsExtend(target: FILE_SERVER_TARGET_TYPE) { + return FILE_SERVERS[target].extraFeatures.includes(FS_FEATURES.fsExtend); +} + +function fileUrlToFileTarget(url: string): FILE_SERVER_TARGET_TYPE { + if (!URL.canParse(url)) { + throw new Error(`fileUrlToFileTarget: url can't be parsed: "${url}"`); + } + const parsedUrl = new URL(url); + // this for loop is just here to get a compile error if we ever add a fs target + for (let index = 0; index < FILE_SERVER_TARGETS.length; index++) { + const target = FILE_SERVER_TARGETS[index]; + switch (target) { + case 'POTATO': + if (parsedUrl.host.includes(POTATO_FS_HOST)) { + return 'POTATO'; + } + break; + case 'DEFAULT': + if (parsedUrl.host.includes(SERVER_HOSTS.DEFAULT_FILE_SERVER)) { + return 'DEFAULT'; + } + break; + default: + assertUnreachable(target, 'fileUrlToFileTarget: target is not a valid target'); + } + } + throw new Error(`fileUrlToFileTarget: url host is not a valid file server: "${url}"`); +} + +export const FS = { + isDefaultFileServer, + supportsFsExtend, + FILE_SERVERS, + fileUrlToFileTarget, +}; + +export type FILE_SERVER_TARGET_TYPE = keyof typeof FILE_SERVERS; diff --git a/ts/session/apis/file_server_api/types.ts b/ts/session/apis/file_server_api/types.ts new file mode 100644 index 0000000000..5ee99c969f --- /dev/null +++ b/ts/session/apis/file_server_api/types.ts @@ -0,0 +1,55 @@ +import { + extractDetailsFromUrlFragment, + extractLastPathSegment, + parseFileServerUrl, +} from '../../url'; + +export function fileServerUrlToFileId(fullURL?: string) { + const parsedUrl = parseFileServerUrl(fullURL); + if (!parsedUrl) { + return { fileId: '', fullUrl: null }; + } + const fileId = extractLastPathSegment(parsedUrl); + + if (!fileId) { + return { fileId: '', fullUrl: null }; + } + return { fileId, fullUrl: parsedUrl }; +} + +function getDownloadFileDetails(urlWithFragment: string) { + const { fileId, fullUrl } = fileServerUrlToFileId(urlWithFragment); + if (!fileId || !fullUrl) { + throw new Error('DownloadFromFileServer: fileId is empty or not a file server url'); + } + + const { serverEd25519Pk, deterministicEncryption } = extractDetailsFromUrlFragment(fullUrl); + + return { fileId, fullUrl, serverEd25519Pk, deterministicEncryption }; +} + +/** + * A utility class to store a file that needs to be downloaded from a file server. + * It validates that the url is one of the valid file server urls. + * Throws if the url is not valid or not a file server url. + */ +export class FileFromFileServerDetails { + public readonly fileId: string; + public readonly fullUrl: URL; + public readonly serverEd25519Pk: string; + public readonly deterministicEncryption: boolean; + + /** + * Construct a FileFromFileServer object. + * @param url the url to download from. It must have the serverPubkey as a query parameter (serverPubkey) + */ + constructor(url: string) { + const { fileId, fullUrl, serverEd25519Pk, deterministicEncryption } = + getDownloadFileDetails(url); + + this.fileId = fileId; + this.fullUrl = fullUrl; + this.serverEd25519Pk = serverEd25519Pk; + this.deterministicEncryption = deterministicEncryption; + } +} diff --git a/ts/session/apis/index.ts b/ts/session/apis/index.ts index b30f67d560..04562dbf8c 100644 --- a/ts/session/apis/index.ts +++ b/ts/session/apis/index.ts @@ -1,4 +1,4 @@ export const SERVER_HOSTS = { - FILE_SERVER: 'filev2.getsession.org', + DEFAULT_FILE_SERVER: 'filev2.getsession.org', NETWORK_SERVER: 'networkv1.getsession.org', }; diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts index 8ed68bde5f..21e4bc526b 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts @@ -33,7 +33,11 @@ export function fileDetailsToURL({ } function imageUrlToImageId(imageFullUrl?: string) { - const imageId = imageFullUrl?.split('/').pop(); + if (!imageFullUrl) { + return null; + } + const parsedUrl = URL.canParse(imageFullUrl) && new URL(imageFullUrl); + const imageId = parsedUrl && parsedUrl?.pathname.split('/').pop(); if (isNil(imageId) || !isNumber(toNumber(imageId)) || !isFinite(toNumber(imageId))) { return null; } diff --git a/ts/session/apis/push_notification_api/PnServer.ts b/ts/session/apis/push_notification_api/PnServer.ts deleted file mode 100644 index 23195b8623..0000000000 --- a/ts/session/apis/push_notification_api/PnServer.ts +++ /dev/null @@ -1,25 +0,0 @@ -import AbortController from 'abort-controller'; -import { callUtilsWorker } from '../../../webworker/workers/browser/util_worker_interface'; -import { OnionSending } from '../../onions/onionSend'; -import { DURATION } from '../../constants'; - -export const pnServerPubkeyHex = '642a6585919742e5a2d4dc51244964fbcd8bcab2b75612407de58b810740d049'; -export const hrefPnServerProd = 'live.apns.getsession.org'; -export const pnServerUrl = `https://${hrefPnServerProd}`; - -export async function notifyPnServer(wrappedEnvelope: ArrayBuffer, sentTo: string) { - const wrappedEnvelopeBase64 = await callUtilsWorker('arrayBufferToStringBase64', wrappedEnvelope); - - // we actually don't care about the result of this request, and it's better like this - // as it is not a response encoded back for us with a symmetric key - await OnionSending.sendJsonViaOnionV4ToPnServer({ - abortSignal: new AbortController().signal, - endpoint: '/notify', - method: 'POST', - stringifiedBody: JSON.stringify({ - data: wrappedEnvelopeBase64, - send_to: sentTo, - }), - timeoutMs: 10 * DURATION.SECONDS, - }); -} diff --git a/ts/session/apis/push_notification_api/index.ts b/ts/session/apis/push_notification_api/index.ts deleted file mode 100644 index 3c33137e6f..0000000000 --- a/ts/session/apis/push_notification_api/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as PnServer from './PnServer'; - -export { PnServer }; diff --git a/ts/session/apis/snode_api/onions.ts b/ts/session/apis/snode_api/onions.ts index ea6553be47..7cde11b417 100644 --- a/ts/session/apis/snode_api/onions.ts +++ b/ts/session/apis/snode_api/onions.ts @@ -17,7 +17,6 @@ import { Snode } from '../../../data/types'; import { callUtilsWorker } from '../../../webworker/workers/browser/util_worker_interface'; import { encodeV4Request } from '../../onions/onionv4'; import { SnodeResponseError } from '../../utils/errors'; -import { hrefPnServerProd } from '../push_notification_api/PnServer'; import { ERROR_CODE_NO_CONNECT } from './SNodeAPI'; import { MergedAbortSignal, WithAbortSignal, WithTimeoutMs } from './requestWith'; import { @@ -185,8 +184,7 @@ async function buildOnionCtxs( const relayingToFinalDestination = i === firstPos; // if last position if (relayingToFinalDestination && finalRelayOptions) { - const isCallToPn = finalRelayOptions?.host === hrefPnServerProd; - const target = !isCallToPn && !useV4 ? '/loki/v3/lsrpc' : '/oxen/v4/lsrpc'; + const target = useV4 ? '/oxen/v4/lsrpc' : '/loki/v3/lsrpc'; dest = { host: finalRelayOptions.host, @@ -389,7 +387,6 @@ async function processAnyOtherErrorOnPath( } processOxenServerError(status, ciphertext); - throw new Error(`Bad Path handled. Retry this request. Status: ${status}`); } } @@ -428,7 +425,6 @@ async function processAnyOtherErrorAtDestination( snodeEd25519: destinationEd25519, associatedWith, }); - throw new Error(`Bad Path handled. Retry this request. Status: ${status}`); } } diff --git a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts index a6d36812c4..8f4f7a4fa2 100644 --- a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts +++ b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts @@ -56,8 +56,6 @@ export interface QuotedAttachmentWithUrl extends QuotedAttachmentCommon { export interface Quote { id: number; author: string; - text?: string; - attachments?: Array; } export type VisibleMessageParams = ExpirableMessageParams & @@ -123,7 +121,7 @@ export class VisibleMessage extends DataMessage { dataMessage.body = this.body; } - dataMessage.attachments = this.attachments || []; + dataMessage.attachments = this.attachments ?? []; if (this.reaction) { dataMessage.reaction = this.reaction; @@ -143,23 +141,6 @@ export class VisibleMessage extends DataMessage { dataMessage.quote.id = this.quote.id; dataMessage.quote.author = this.quote.author; - dataMessage.quote.text = this.quote.text; - if (this.quote.attachments) { - dataMessage.quote.attachments = this.quote.attachments.map(attachment => { - const quotedAttachment = new SignalService.DataMessage.Quote.QuotedAttachment(); - if (attachment.contentType) { - quotedAttachment.contentType = attachment.contentType; - } - if (attachment.fileName) { - quotedAttachment.fileName = attachment.fileName; - } - if (attachment.thumbnail && (attachment.thumbnail as any).id) { - quotedAttachment.thumbnail = attachment.thumbnail as any; // be sure to keep the typescript guard on id above - } - - return quotedAttachment; - }); - } } if (Array.isArray(this.preview)) { diff --git a/ts/session/onions/onionSend.ts b/ts/session/onions/onionSend.ts index 1bf2260d7e..72303c2cb9 100644 --- a/ts/session/onions/onionSend.ts +++ b/ts/session/onions/onionSend.ts @@ -1,6 +1,7 @@ import { AbortSignal } from 'abort-controller'; import { toNumber } from 'lodash'; import pRetry from 'p-retry'; +import { crypto_sign_ed25519_pk_to_curve25519, from_hex, to_hex } from 'libsodium-wrappers-sumo'; import { OnionPaths } from '.'; import { Snode } from '../../data/types'; @@ -10,7 +11,6 @@ import { addBinaryContentTypeToHeaders, addJsonContentTypeToHeaders, } from '../apis/open_group_api/sogsv3/sogsV3SendMessage'; -import { pnServerPubkeyHex, pnServerUrl } from '../apis/push_notification_api/PnServer'; import { FinalDestNonSnodeOptions, FinalRelayOptions, @@ -23,8 +23,9 @@ import { OnionV4 } from './onionv4'; import { MergedAbortSignal, WithAbortSignal, WithTimeoutMs } from '../apis/snode_api/requestWith'; import { OnionPathEmptyError } from '../utils/errors'; import { SnodePool } from '../apis/snode_api/snodePool'; -import { fileServerURL, fileServerPubKey } from '../apis/file_server_api/FileServerApi'; import { SERVER_HOSTS } from '../apis'; +import type { FileFromFileServerDetails } from '../apis/file_server_api/types'; +import { FS, type FILE_SERVER_TARGET_TYPE } from '../apis/file_server_api/FileServerTarget'; export type OnionFetchOptions = { method: string; @@ -374,42 +375,6 @@ async function sendJsonViaOnionV4ToSogs( return res as OnionV4JSONSnodeResponse | null; } -/** - * Send some json to the PushNotification server. - * Desktop only send `/notify` requests. - * - * You should probably not use this function directly but instead rely on the PnServer.notifyPnServer() function - */ -async function sendJsonViaOnionV4ToPnServer( - sendOptions: WithTimeoutMs & { - endpoint: string; - method: string; - stringifiedBody: string | null; - abortSignal: AbortSignal; - } -): Promise { - const { endpoint, method, stringifiedBody, abortSignal, timeoutMs } = sendOptions; - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); - } - const builtUrl = new URL(`${pnServerUrl}${endpoint}`); - - const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - pnServerPubkeyHex, - builtUrl, - { - method, - headers: {}, - body: stringifiedBody, - useV4: true, - }, - false, - abortSignal, - timeoutMs - ); - return res as OnionV4JSONSnodeResponse; -} - async function sendBinaryViaOnionV4ToSogs( sendOptions: WithTimeoutMs & { serverUrl: string; @@ -481,6 +446,7 @@ async function sendBinaryViaOnionV4ToSogs( * You should probably not use this function directly, but instead rely on the FileServerAPI.uploadFileToFsWithOnionV4() */ async function sendBinaryViaOnionV4ToFileServer({ + target, endpoint, method, bodyBinary, @@ -489,19 +455,15 @@ async function sendBinaryViaOnionV4ToFileServer({ headers = {}, }: WithTimeoutMs & WithAbortSignal & { + target: FILE_SERVER_TARGET_TYPE; endpoint: string; method: string; bodyBinary: Uint8Array; headers?: Record; }): Promise { - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); - } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); - const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, - builtUrl, + FS.FILE_SERVERS[target].xPk, + new URL(`${FS.FILE_SERVERS[target].url}${endpoint}`), { method, headers, @@ -521,33 +483,34 @@ async function sendBinaryViaOnionV4ToFileServer({ * You should probably not use this function directly, but instead rely on the FileServerAPI.downloadFileFromFileServer() */ async function getBinaryViaOnionV4FromFileServer({ - endpoint, - method, + fileToGet, abortSignal, throwError, timeoutMs, }: WithTimeoutMs & WithAbortSignal & { - endpoint: string; - method: string; + fileToGet: FileFromFileServerDetails; throwError: boolean; }): Promise { - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); + if (window.sessionFeatureFlags?.debugServerRequests) { + window.log.info(`getBinaryViaOnionV4FromFileServer fsv2: "${fileToGet.fullUrl} `); } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); - if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`getBinaryViaOnionV4FromFileServer fsv2: "${builtUrl} `); + if (!fileToGet.fullUrl) { + throw new Error('getBinaryViaOnionV4FromFileServer: fullUrl is required'); } + const serverX25519Pk = to_hex( + crypto_sign_ed25519_pk_to_curve25519(from_hex(fileToGet.serverEd25519Pk)) + ); + // this throws for a bunch of reasons. // One of them, is if we get a 404 (i.e. the file server was reached but reported no such attachments exists) const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, - builtUrl, + serverX25519Pk, + fileToGet.fullUrl, { - method, + method: 'GET', headers: {}, body: null, useV4: true, @@ -558,8 +521,8 @@ async function getBinaryViaOnionV4FromFileServer({ ); if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info( - `getBinaryViaOnionV4FromFileServer fsv2: "${builtUrl}; got:`, + window.log.debug( + `getBinaryViaOnionV4FromFileServer fsv2: "${fileToGet.fullUrl}; got:`, JSON.stringify(res) ); } @@ -572,6 +535,7 @@ async function getBinaryViaOnionV4FromFileServer({ */ async function sendJsonViaOnionV4ToFileServer({ endpoint, + target, method, stringifiedBody, abortSignal, @@ -579,6 +543,7 @@ async function sendJsonViaOnionV4ToFileServer({ timeoutMs, }: WithAbortSignal & WithTimeoutMs & { + target: FILE_SERVER_TARGET_TYPE; endpoint: string; method: string; stringifiedBody: string | null; @@ -587,10 +552,10 @@ async function sendJsonViaOnionV4ToFileServer({ if (!endpoint.startsWith('/')) { throw new Error('endpoint needs a leading /'); } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); + const builtUrl = new URL(`${FS.FILE_SERVERS[target].url}${endpoint}`); const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, + FS.FILE_SERVERS[target].xPk, builtUrl, { method, @@ -668,7 +633,6 @@ export const OnionSending = { sendViaOnionV4ToNonSnodeWithRetries, getOnionPathForSending, sendJsonViaOnionV4ToSogs, - sendJsonViaOnionV4ToPnServer, sendBinaryViaOnionV4ToFileServer, sendBinaryViaOnionV4ToSogs, getBinaryViaOnionV4FromFileServer, diff --git a/ts/session/url/index.ts b/ts/session/url/index.ts new file mode 100644 index 0000000000..7520afdc72 --- /dev/null +++ b/ts/session/url/index.ts @@ -0,0 +1,119 @@ +import { FS } from '../apis/file_server_api/FileServerTarget'; + +export const queryParamServerEd25519Pubkey = 'p'; +export const queryParamDeterministicEncryption = 'd'; +/** + * The encryption key is a hex string, and was used to encrypt the file. + * It is the same as the profileKey for a user profile. + */ +export const queryParamEncryptionKey = 'e'; + +function parseSearchParamsFromFragment(url: URL) { + // slice to remove the leading '#' + const fragment = (url.hash || '').slice(1); + + const searchParams = new URLSearchParams(fragment); + return searchParams; +} + +/** + * Returns the serverPk/deterministicEncryption/profileKey from the provided url fragment + * Note: + * - for the default file server, the serverPk is hardcoded. + * - if no serverPk is provided, the defaultFileServerPubKey is returned. + * - if no profileKey is provided, the profileKey is null + * - if no deterministicEncryption is provided, the deterministicEncryption is false (presence is used, the value is not checked) + * + * Also, the fs serverPk is removed from the url if it is the default one. + */ +export function extractDetailsFromUrlFragment(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + // if the serverPk is not present in the fragment, we assume it is the default file server + const serverEd25519Pk = + searchParams.get(queryParamServerEd25519Pubkey) ?? FS.FILE_SERVERS.DEFAULT.edPk; + const profileKey = searchParams.get(queryParamEncryptionKey); + const deterministicEncryption = searchParams.has(queryParamDeterministicEncryption) ?? false; + if (!serverEd25519Pk) { + throw new Error( + 'FileFromFileServer: serverPubkey & other details are required as a fragment-query parameter for non-default file server' + ); + } + + return { + serverEd25519Pk, + deterministicEncryption, + profileKey, + urlWithoutProfileKey: removeDefaultServerPk(removeProfileKey(url)).toString(), + }; +} + +export function addProfileKeyToUrl(url: URL, profileKeyHex: string) { + const searchParams = parseSearchParamsFromFragment(url); + const profileKey = searchParams.get(queryParamEncryptionKey); + if (profileKey) { + // a profile key field is already present + return url; + } + const urlCopy = new URL(url.toString()); + searchParams.set(queryParamEncryptionKey, profileKeyHex); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +function removeProfileKey(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + const profileKey = searchParams.get(queryParamEncryptionKey); + if (!profileKey) { + // a profile key field is not present + return url; + } + const urlCopy = new URL(url.toString()); + searchParams.delete(queryParamEncryptionKey); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +function removeDefaultServerPk(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + const serverPk = searchParams.get(queryParamServerEd25519Pubkey); + if (!serverPk || !FS.isDefaultFileServer(serverPk)) { + // a serverPk is not present, or it is not the default file server + return url; + } + + const urlCopy = new URL(url.toString()); + searchParams.delete(queryParamEncryptionKey); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +export function extractLastPathSegment(url: URL) { + const lastSegment = url.pathname.split('/').filter(Boolean).pop(); + if (!lastSegment) { + return null; + } + return lastSegment; +} + +/** + * Returns the parsed url from the provided string only if that matches one of our file server urls. + */ +export function parseFileServerUrl(fullURL?: string) { + if (!fullURL) { + return null; + } + + const parsedUrl = URL.canParse(fullURL) && new URL(fullURL); + if (!parsedUrl) { + return null; + } + + if (parsedUrl.host.includes('open.getsession.org')) { + // we need to filter out communities we host on getsession.org as they do not have the same api. + return null; + } + return parsedUrl; +} diff --git a/ts/session/utils/Attachments.ts b/ts/session/utils/Attachments.ts index 4f4aa2f6dc..788fd6d483 100644 --- a/ts/session/utils/Attachments.ts +++ b/ts/session/utils/Attachments.ts @@ -1,5 +1,5 @@ import * as crypto from 'crypto'; -import _, { isEmpty, isString } from 'lodash'; +import { isEmpty, isString } from 'lodash'; import Long from 'long'; import { Attachment } from '../../types/Attachment'; @@ -10,17 +10,25 @@ import { AttachmentPointer, AttachmentPointerWithUrl, PreviewWithAttachmentUrl, - Quote, - QuotedAttachmentWithUrl, } from '../messages/outgoing/visibleMessage/VisibleMessage'; import { uploadFileToFsWithOnionV4 } from '../apis/file_server_api/FileServerApi'; +import { MultiEncryptWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; +import { UserUtils } from '.'; +import { extractLastPathSegment } from '../url'; -interface UploadParams { +type UploadParams = { attachment: Attachment; - isAvatar?: boolean; - isRaw?: boolean; + + /** + * Explicit padding is only needed for the legacy encryption, as libsession deterministic encryption already pads the data. + */ shouldPad?: boolean; -} + /** + * When using the deterministic encryption, this is the seed used to generate the encryption key (libsession encrypt) + * When not using the deterministic encryption, this is used as the encryption key (legacy encrypt) + */ + encryptionKey: Uint8Array; +}; export interface RawPreview { url: string; @@ -42,7 +50,7 @@ export interface RawQuote { } async function uploadToFileServer(params: UploadParams): Promise { - const { attachment, isRaw = false, shouldPad = false } = params; + const { attachment, shouldPad = false } = params; if (typeof attachment !== 'object' || attachment == null) { throw new Error('Invalid attachment passed.'); } @@ -64,10 +72,25 @@ async function uploadToFileServer(params: UploadParams): Promise ): Promise> { + const encryptionKey = window.sessionFeatureFlags.useDeterministicEncryption + ? await UserUtils.getUserEd25519Seed() + : crypto.randomBytes(32); + const promises = (attachments || []).map(async attachment => uploadToFileServer({ attachment, shouldPad: true, + encryptionKey, }) ); @@ -112,8 +140,13 @@ export async function uploadLinkPreviewToFileServer( } return preview as any; } + const encryptionKey = window.sessionFeatureFlags.useDeterministicEncryption + ? await UserUtils.getUserEd25519Seed() + : crypto.randomBytes(32); + const image = await uploadToFileServer({ attachment: preview.image, + encryptionKey, }); return { ...preview, @@ -121,48 +154,19 @@ export async function uploadLinkPreviewToFileServer( }; } -export async function uploadQuoteThumbnailsToFileServer( - quote?: RawQuote -): Promise { - if (!quote) { - return undefined; - } - - const promises = (quote.attachments ?? []).map(async attachment => { - let thumbnail: AttachmentPointer | undefined; - if (attachment.thumbnail) { - thumbnail = await uploadToFileServer({ - attachment: attachment.thumbnail, - }); - } - if (!thumbnail) { - return attachment; - } - return { - ...attachment, - thumbnail, - url: thumbnail.url, - } as QuotedAttachmentWithUrl; - }); - - const attachments = _.compact(await Promise.all(promises)); - - return { - ...quote, - attachments, - }; -} - -export function attachmentIdAsStrFromUrl(url: string) { - const lastSegment = url?.split('/')?.pop(); +export function attachmentIdAsStrFromUrl(fullUrl: string) { + const url = new URL(fullUrl); + const lastSegment = extractLastPathSegment(url); if (!lastSegment) { - throw new Error('attachmentIdAsStrFromUrl last is not valid'); + throw new Error('attachmentIdAsStrFromUrl last segment is not valid'); } return lastSegment; } export function attachmentIdAsLongFromUrl(url: string) { - const lastSegment = url?.split('/')?.pop(); + const parsedUrl = URL.canParse(url) && new URL(url); + + const lastSegment = parsedUrl && parsedUrl.pathname.split('/').filter(Boolean).pop(); if (!lastSegment) { throw new Error('attachmentIdAsLongFromUrl last is not valid'); } diff --git a/ts/session/utils/AttachmentsDownload.ts b/ts/session/utils/AttachmentsDownload.ts index a2734539a6..801768c13e 100644 --- a/ts/session/utils/AttachmentsDownload.ts +++ b/ts/session/utils/AttachmentsDownload.ts @@ -4,7 +4,7 @@ import { v4 as uuidV4 } from 'uuid'; import { Data } from '../../data/data'; import { MessageModel } from '../../models/message'; -import { downloadAttachment, downloadAttachmentSogsV3 } from '../../receiver/attachments'; +import { downloadAttachmentFs, downloadAttachmentSogsV3 } from '../../receiver/attachments'; import { initializeAttachmentLogic, processNewAttachment } from '../../types/MessageAttachment'; import { getAttachmentMetadata } from '../../types/message/initializeAttachmentMetadata'; import { AttachmentDownloadMessageDetails } from '../../types/sqlSharedTypes'; @@ -179,7 +179,7 @@ async function _runJob(job: any) { // those two functions throw if they get a 404 downloaded = isOpenGroupV2 ? await downloadAttachmentSogsV3(attachment, openGroupV2Details) - : await downloadAttachment(attachment); + : await downloadAttachmentFs(attachment); } catch (error) { // Attachments on the server expire after 60 days, then start returning 404 if (error && error.code === 404) { diff --git a/ts/session/utils/AttachmentsV2.ts b/ts/session/utils/AttachmentsV2.ts index 69328e91f9..59432efc41 100644 --- a/ts/session/utils/AttachmentsV2.ts +++ b/ts/session/utils/AttachmentsV2.ts @@ -7,10 +7,8 @@ import { AttachmentPointer, AttachmentPointerWithUrl, PreviewWithAttachmentUrl, - Quote, - QuotedAttachment, } from '../messages/outgoing/visibleMessage/VisibleMessage'; -import { RawPreview, RawQuote } from './Attachments'; +import { RawPreview } from './Attachments'; import { OpenGroupRequestCommonType } from '../../data/types'; interface UploadParamsV2 { @@ -90,33 +88,3 @@ export async function uploadLinkPreviewsV3( url: preview.url || image.url, }; } - -export async function uploadQuoteThumbnailsV3( - openGroup: OpenGroupRequestCommonType, - quote?: RawQuote -): Promise { - if (!quote) { - return undefined; - } - - const promises = (quote.attachments ?? []).map(async attachment => { - let thumbnail: QuotedAttachment | undefined; - if (attachment.thumbnail) { - thumbnail = (await uploadV3({ - attachment: attachment.thumbnail, - openGroup, - })) as any; - } - return { - ...attachment, - thumbnail, - }; - }); - - const attachments = await Promise.all(promises); - - return { - ...quote, - attachments, - }; -} diff --git a/ts/session/utils/User.ts b/ts/session/utils/User.ts index 94bd20d5d4..cb00538381 100644 --- a/ts/session/utils/User.ts +++ b/ts/session/utils/User.ts @@ -99,14 +99,25 @@ export const getUserED25519KeyPairBytes = async (): Promise => { throw new Error('getUserED25519KeyPairBytes: user has no keypair'); }; +/** + * Return the ed25519 seed of the current user. + * This is used to generate deterministic encryption keys for attachments/profile pictures. + * + * This is cached so will only be slow on the first fetch. + */ +export async function getUserEd25519Seed() { + const ed25519KeyPairBytes = await getUserED25519KeyPairBytes(); + return ed25519KeyPairBytes.privKeyBytes.slice(0, 32); +} + export async function getOurProfile() { const displayName = (await UserConfigWrapperActions.getName()) || 'Anonymous'; const updatedAtSeconds = await UserConfigWrapperActions.getProfileUpdatedSeconds(); - const profilePicWithKey = await UserConfigWrapperActions.getProfilePicWithKeyHex(); + const profilePic = await UserConfigWrapperActions.getProfilePic(); return new OutgoingUserProfile({ displayName, updatedAtSeconds, - picUrlWithProfileKey: profilePicWithKey ?? null, + profilePic: profilePic ?? null, }); } diff --git a/ts/session/utils/calling/CallManager.ts b/ts/session/utils/calling/CallManager.ts index b5727fbd5a..b35ce1fbae 100644 --- a/ts/session/utils/calling/CallManager.ts +++ b/ts/session/utils/calling/CallManager.ts @@ -24,7 +24,6 @@ import { getCallMediaPermissionsSettings } from '../../../components/settings/Se import { Data } from '../../../data/data'; import { handleAcceptConversationRequest } from '../../../interactions/conversationInteractions'; import { READ_MESSAGE_STATE } from '../../../models/conversationAttributes'; -import { PnServer } from '../../apis/push_notification_api'; import { SnodeNamespaces } from '../../apis/snode_api/namespaces'; import { DURATION } from '../../constants'; import { DisappearingMessages } from '../../disappearing_messages'; @@ -553,12 +552,11 @@ export async function USER_callRecipient(recipient: string) { preOfferMsg, SnodeNamespaces.Default ); - const { wrappedEnvelope } = await MessageSender.sendSingleMessage({ + await MessageSender.sendSingleMessage({ message: rawPreOffer, isSyncMessage: false, abortSignal: null, }); - void PnServer.notifyPnServer(wrappedEnvelope, recipient); await openMediaDevicesAndAddTracks(); // Note CallMessages are very custom, as we moslty don't sync them to ourselves. diff --git a/ts/session/utils/job_runners/JobDeserialization.ts b/ts/session/utils/job_runners/JobDeserialization.ts index 4528d00dba..f43f4777eb 100644 --- a/ts/session/utils/job_runners/JobDeserialization.ts +++ b/ts/session/utils/job_runners/JobDeserialization.ts @@ -13,6 +13,7 @@ import { GroupInvite } from './jobs/GroupInviteJob'; import { GroupPendingRemovals } from './jobs/GroupPendingRemovalsJob'; import { GroupSync } from './jobs/GroupSyncJob'; import { UpdateMsgExpirySwarm } from './jobs/UpdateMsgExpirySwarmJob'; +import { AvatarReupload } from './jobs/AvatarReuploadJob'; export function persistedJobFromData( data: T @@ -27,6 +28,8 @@ export function persistedJobFromData( return new UserSync.UserSyncJob(data) as unknown as PersistedJob; case 'AvatarDownloadJobType': return new AvatarDownload.AvatarDownloadJob(data) as unknown as PersistedJob; + case 'AvatarReuploadJobType': + return new AvatarReupload.AvatarReuploadJob(data) as unknown as PersistedJob; case 'AvatarMigrateJobType': return new AvatarMigrate.AvatarMigrateJob(data) as unknown as PersistedJob; case 'FetchMsgExpirySwarmJobType': diff --git a/ts/session/utils/job_runners/JobRunner.ts b/ts/session/utils/job_runners/JobRunner.ts index e24e4fc0f8..192c78a6e1 100644 --- a/ts/session/utils/job_runners/JobRunner.ts +++ b/ts/session/utils/job_runners/JobRunner.ts @@ -15,6 +15,7 @@ import { UpdateMsgExpirySwarmPersistedData, UserSyncPersistedData, type AvatarMigratePersistedData, + type AvatarReuploadPersistedData, } from './PersistedJob'; import { JobRunnerType } from './jobs/JobRunnerType'; import { DURATION } from '../../constants'; @@ -385,6 +386,9 @@ const groupSyncRunner = new PersistedJobRunner('GroupSyn const avatarDownloadRunner = new PersistedJobRunner( 'AvatarDownloadJob' ); +const avatarReuploadRunner = new PersistedJobRunner( + 'AvatarReuploadJob' +); const avatarMigrateRunner = new PersistedJobRunner('AvatarMigrateJob'); const groupInviteJobRunner = new PersistedJobRunner('GroupInviteJob', 4); @@ -408,6 +412,7 @@ export const runners = { updateMsgExpiryRunner, fetchSwarmMsgExpiryRunner, avatarDownloadRunner, + avatarReuploadRunner, avatarMigrateRunner, groupInviteJobRunner, groupPendingRemovalJobRunner, diff --git a/ts/session/utils/job_runners/PersistedJob.ts b/ts/session/utils/job_runners/PersistedJob.ts index 6931c015bf..d33ddee652 100644 --- a/ts/session/utils/job_runners/PersistedJob.ts +++ b/ts/session/utils/job_runners/PersistedJob.ts @@ -5,6 +5,7 @@ export type PersistedJobType = | 'UserSyncJobType' | 'GroupSyncJobType' | 'AvatarDownloadJobType' + | 'AvatarReuploadJobType' | 'AvatarMigrateJobType' | 'GroupInviteJobType' | 'GroupPendingRemovalJobType' @@ -38,6 +39,11 @@ export interface AvatarDownloadPersistedData extends PersistedJobData { conversationId: string; } +export interface AvatarReuploadPersistedData extends PersistedJobData { + jobType: 'AvatarReuploadJobType'; + conversationId: string; +} + export interface AvatarMigratePersistedData extends PersistedJobData { jobType: 'AvatarMigrateJobType'; conversationId: string; @@ -61,15 +67,15 @@ export interface UserSyncPersistedData extends PersistedJobData { export interface GroupSyncPersistedData extends PersistedJobData { jobType: 'GroupSyncJobType'; } -interface PersitedDataWithMsgIds extends PersistedJobData { +interface PersistedDataWithMsgIds extends PersistedJobData { msgIds: Array; } -export interface FetchMsgExpirySwarmPersistedData extends PersitedDataWithMsgIds { +export interface FetchMsgExpirySwarmPersistedData extends PersistedDataWithMsgIds { jobType: 'FetchMsgExpirySwarmJobType'; } -export interface UpdateMsgExpirySwarmPersistedData extends PersitedDataWithMsgIds { +export interface UpdateMsgExpirySwarmPersistedData extends PersistedDataWithMsgIds { jobType: 'UpdateMsgExpirySwarmJobType'; } @@ -77,6 +83,7 @@ export type TypeOfPersistedData = | UserSyncPersistedData | AvatarDownloadPersistedData | AvatarMigratePersistedData + | AvatarReuploadPersistedData | FetchMsgExpirySwarmPersistedData | UpdateMsgExpirySwarmPersistedData | FakeSleepJobData @@ -92,8 +99,8 @@ export enum RunJobResult { RetryJobIfPossible = 2, PermanentFailure = 3, } -function isDataWithMsgIds(data: PersistedJobData): data is PersitedDataWithMsgIds { - return !isNil((data as PersitedDataWithMsgIds)?.msgIds); +function isDataWithMsgIds(data: PersistedJobData): data is PersistedDataWithMsgIds { + return !isNil((data as PersistedDataWithMsgIds)?.msgIds); } /** diff --git a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts index b76d7c92e0..099894b078 100644 --- a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts +++ b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts @@ -1,11 +1,10 @@ import { isEmpty, isNumber, isString } from 'lodash'; import { v4 } from 'uuid'; import { UserUtils } from '../..'; -import { downloadAttachment } from '../../../../receiver/attachments'; import { processNewAttachment } from '../../../../types/MessageAttachment'; import { decryptProfile } from '../../../../util/crypto/profileEncrypter'; import { ConvoHub } from '../../../conversations'; -import { fromHexToArray } from '../../String'; +import { ed25519Str, fromHexToArray } from '../../String'; import { runners } from '../JobRunner'; import { AddJobCheckReturn, @@ -14,6 +13,9 @@ import { RunJobResult, } from '../PersistedJob'; import { processAvatarData } from '../../../../util/avatar/processAvatarData'; +import { downloadAttachmentFs } from '../../../../receiver/attachments'; +import { extractDetailsFromUrlFragment } from '../../../url'; +import { MultiEncryptWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; const defaultMsBetweenRetries = 10000; const defaultMaxAttempts = 3; @@ -117,10 +119,13 @@ class AvatarDownloadJob extends PersistedJob { window.log.debug(`[profileupdate] starting downloading task for ${conversation.id}`); // This is an avatar download, we are free to resize/compress/convert what is downloaded as we wish. // Desktop will generate a normal avatar and a forced static one. Both resized and converted if required. - const downloaded = await downloadAttachment({ + const downloaded = await downloadAttachmentFs({ url: toDownloadPointer, isRaw: true, }); + const { deterministicEncryption } = extractDetailsFromUrlFragment( + new URL(toDownloadPointer) + ); conversation = ConvoHub.use().getOrThrow(convoId); if (!downloaded.data.byteLength) { @@ -136,10 +141,19 @@ class AvatarDownloadJob extends PersistedJob { const profileKeyArrayBuffer = fromHexToArray(toDownloadProfileKey); let decryptedData: ArrayBuffer; try { - decryptedData = await decryptProfile(downloaded.data, profileKeyArrayBuffer); + if (deterministicEncryption) { + const { decryptedData: decryptedData2 } = + await MultiEncryptWrapperActions.attachmentDecrypt({ + encryptedData: new Uint8Array(downloaded.data), + decryptionKey: profileKeyArrayBuffer, + }); + decryptedData = decryptedData2.buffer; + } else { + decryptedData = await decryptProfile(downloaded.data, profileKeyArrayBuffer); + } } catch (decryptError) { window.log.info( - `[profileupdate] failed to decrypt downloaded data ${conversation.id} with provided profileKey` + `[profileupdate] failed to decrypt downloaded data for ${ed25519Str(conversation.id)} with provided profileKey` ); // if we got content, but cannot decrypt it with the provided profileKey, there is no need to keep retrying. return RunJobResult.PermanentFailure; @@ -150,7 +164,7 @@ class AvatarDownloadJob extends PersistedJob { ); // we autoscale incoming avatars because our app keeps decrypted avatars in memory and some platforms allows large avatars to be uploaded. - const processed = await processAvatarData(decryptedData); + const processed = await processAvatarData(decryptedData, conversation.isMe()); const upgradedMainAvatar = await processNewAttachment({ data: processed.mainAvatarDetails.outputBuffer, diff --git a/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts b/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts index ee681caab9..00f676433d 100644 --- a/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts +++ b/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts @@ -141,7 +141,7 @@ class AvatarMigrateJob extends PersistedJob { } // we autoscale incoming avatars because our app keeps decrypted avatars in memory and some platforms allows large avatars to be uploaded. - const processed = await processAvatarData(decryptedData); + const processed = await processAvatarData(decryptedData, conversation.isMe()); const upgradedMainAvatar = await processNewAttachment({ data: processed.mainAvatarDetails.outputBuffer, diff --git a/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts b/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts new file mode 100644 index 0000000000..4acdc8754c --- /dev/null +++ b/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts @@ -0,0 +1,207 @@ +import { isNumber } from 'lodash'; +import { v4 } from 'uuid'; +import { UserUtils } from '../..'; +import { ConvoHub } from '../../../conversations'; +import { ed25519Str } from '../../String'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + AvatarDownloadPersistedData, + PersistedJob, + RunJobResult, + type AvatarReuploadPersistedData, +} from '../PersistedJob'; +import { DecryptedAttachmentsManager } from '../../../crypto/DecryptedAttachmentsManager'; +import { IMAGE_JPEG } from '../../../../types/MIME'; +import { urlToBlob } from '../../../../types/attachments/VisualAttachment'; +import { ImageProcessor } from '../../../../webworker/workers/browser/image_processor_interface'; +import { maxAvatarDetails } from '../../../../util/attachment/attachmentSizes'; +import { UserConfigWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { extendFileExpiry } from '../../../apis/file_server_api/FileServerApi'; +import { fileServerUrlToFileId } from '../../../apis/file_server_api/types'; +import { NetworkTime } from '../../../../util/NetworkTime'; +import { DURATION, DURATION_SECONDS } from '../../../constants'; +import { uploadAndSetOurAvatarShared } from '../../../../interactions/avatar-interactions/nts-avatar-interactions'; +import { FS } from '../../../apis/file_server_api/FileServerTarget'; + +const defaultMsBetweenRetries = 10000; +const defaultMaxAttempts = 3; + +async function addAvatarReuploadJob() { + const avatarReuploadJob = new AvatarReuploadJob({ + // postpone this job for 30 seconds, so we don't reupload right on start (we need an onion path to be valid) + nextAttemptTimestamp: Date.now() + DURATION.SECONDS * 30, + conversationId: UserUtils.getOurPubKeyStrFromCache(), + }); + window.log.debug(`addAvatarReuploadJob: adding job reupload `); + await runners.avatarReuploadRunner.addJob(avatarReuploadJob); +} + +async function fetchLocalAvatarDetails(currentMainPath: string) { + try { + const decryptedAvatarLocalUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + currentMainPath, + IMAGE_JPEG, // not needed + true + ); + + if (!decryptedAvatarLocalUrl) { + window.log.warn('Could not decrypt avatar stored locally..'); + return null; + } + const blob = await urlToBlob(decryptedAvatarLocalUrl); + const decryptedAvatarData = await blob.arrayBuffer(); + const metadata = await ImageProcessor.imageMetadata(decryptedAvatarData); + if (!metadata) { + window.log.warn('Failed to get metadata from avatar'); + return null; + } + return { decryptedAvatarData, metadata }; + } catch (e) { + window.log.warn('[avatarReupload] Failed to get metadata from avatar'); + return null; + } +} + +class AvatarReuploadJob extends PersistedJob { + constructor({ + conversationId, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + identifier, + }: Pick & + Partial< + Pick< + AvatarDownloadPersistedData, + 'nextAttemptTimestamp' | 'identifier' | 'maxAttempts' | 'currentRetry' + > + >) { + super({ + jobType: 'AvatarReuploadJobType', + identifier: identifier || v4(), + conversationId, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now() + defaultMsBetweenRetries, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + }); + } + + public async run(): Promise { + const convoId = this.persistedData.conversationId; + window.log.debug( + `running job ${this.persistedData.jobType} id:"${this.persistedData.identifier}" ` + ); + + if (!this.persistedData.identifier) { + return RunJobResult.PermanentFailure; + } + if (!convoId) { + return RunJobResult.PermanentFailure; + } + + let conversation = ConvoHub.use().get(convoId); + if (!conversation || !conversation.isMe()) { + // Note: if we add the groupv2 case here, we'd need to add a profile_updated timestamp to the metagroup wrapper + window.log.warn('AvatarReuploadJob did not find corresponding conversation, or not us'); + + return RunJobResult.PermanentFailure; + } + const ourProfileLastUpdatedSeconds = await UserConfigWrapperActions.getProfileUpdatedSeconds(); + const currentMainPath = conversation.getAvatarInProfilePath(); + const avatarPointer = conversation.getAvatarPointer(); + const profileKey = conversation.getProfileKey(); + const { fileId, fullUrl } = fileServerUrlToFileId(avatarPointer); + if (!currentMainPath || !avatarPointer || !profileKey || !fullUrl) { + // we do not have an avatar to reupload, nothing to do. + return RunJobResult.Success; + } + + try { + const currentAvatarDetails = await fetchLocalAvatarDetails(currentMainPath); + if (!currentAvatarDetails) { + return RunJobResult.RetryJobIfPossible; + } + const { decryptedAvatarData, metadata } = currentAvatarDetails; + + window.log.debug(`[avatarReupload] starting for ${ed25519Str(conversation.id)}`); + + if ( + ourProfileLastUpdatedSeconds !== 0 && + metadata.width <= maxAvatarDetails.maxSidePlanReupload && + metadata.height <= maxAvatarDetails.maxSidePlanReupload && + metadata.format === 'webp' + ) { + const target = FS.fileUrlToFileTarget(fullUrl?.toString()); + window.log.debug( + `[avatarReupload] main avatar is already the right size and format for ${ed25519Str(conversation.id)}, just renewing it on fs: ${target}` + ); + const expiryRenewResult = await extendFileExpiry(fileId, target); + + if (expiryRenewResult) { + window.log.debug( + `[avatarReupload] expiry renew for ${ed25519Str(conversation.id)} of file ${fileId} was successful` + ); + return RunJobResult.Success; + } + + if (ourProfileLastUpdatedSeconds > NetworkTime.nowSeconds() - 12 * DURATION_SECONDS.DAYS) { + // `renew` failed but our last reupload was less than 12 days ago, so we don't want to retry + window.log.debug( + `[avatarReupload] expiry renew for ${ed25519Str(conversation.id)} of file ${fileId} failed but our last reupload was less than 12 days ago, so we don't want to retry` + ); + // considering this to be a success + return RunJobResult.Success; + } + // renew failed, but our last reupload was more than 12 days ago, so we want to reprocess and + // reupload our current avatar, see below + } + + // here, + // - either the format or the size is wrong + // - or we do not have a ourProfileLastUpdatedSeconds yet + // - or the expiry renew failed and our last reupload was more than 12 days ago + // In all those cases, we want to reprocess our current avatar, and reupload it. + + window.log.info(`[profileupdate] about to auto scale avatar for convo ${conversation.id}`); + + conversation = ConvoHub.use().getOrThrow(convoId); + + // Reprocess the avatar content, and reupload it + // This will pick the correct file server depending on the env variables set. + await uploadAndSetOurAvatarShared({ + decryptedAvatarData, + ourConvo: conversation, + context: 'reuploadAvatar', + }); + } catch (e) { + window.log.warn(`[profileReupload] failed with ${e.message}`); + return RunJobResult.RetryJobIfPossible; + } + + // return true so this job is marked as a success + return RunJobResult.Success; + } + + public serializeJob(): AvatarReuploadPersistedData { + return super.serializeBase(); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public addJobCheck(_jobs: Array): AddJobCheckReturn { + return null; + } + + public getJobTimeoutMs(): number { + return 10000; + } +} + +export const AvatarReupload = { + AvatarReuploadJob, + addAvatarReuploadJob, +}; diff --git a/ts/session/utils/job_runners/jobs/JobRunnerType.ts b/ts/session/utils/job_runners/jobs/JobRunnerType.ts index 46b9d0c0b1..58dd135748 100644 --- a/ts/session/utils/job_runners/jobs/JobRunnerType.ts +++ b/ts/session/utils/job_runners/jobs/JobRunnerType.ts @@ -6,6 +6,7 @@ export type JobRunnerType = | 'FakeSleepForJob' | 'FakeSleepForMultiJob' | 'AvatarDownloadJob' + | 'AvatarReuploadJob' | 'AvatarMigrateJob' | 'GroupInviteJob' | 'GroupPromoteJob' diff --git a/ts/state/ducks/metaGroups.ts b/ts/state/ducks/metaGroups.ts index 694843afc1..d2fa96bd77 100644 --- a/ts/state/ducks/metaGroups.ts +++ b/ts/state/ducks/metaGroups.ts @@ -1000,7 +1000,7 @@ async function handleAvatarChangeFromUI({ const dataUnencrypted = await blobAvatarAlreadyScaled.arrayBuffer(); - const processed = await processAvatarData(dataUnencrypted); + const processed = await processAvatarData(dataUnencrypted, true); if (!processed) { throw new Error('Failed to process avatar'); @@ -1011,8 +1011,13 @@ async function handleAvatarChangeFromUI({ // encrypt the avatar data with the profile key const encryptedData = await encryptProfile(processed.mainAvatarDetails.outputBuffer, profileKey); - // TODO: we should store the expiries of the attachment somewhere in libsession I assume, and reupload as needed - const uploadedFileDetails = await uploadFileToFsWithOnionV4(encryptedData); + // Note: currently deterministic encryption is not supported for group's avatars + const deterministicEncryption = false; + + const uploadedFileDetails = await uploadFileToFsWithOnionV4( + encryptedData, + deterministicEncryption + ); if (!uploadedFileDetails || !uploadedFileDetails.fileUrl) { window?.log?.warn('File upload for groupv2 to file server failed'); throw new Error('File upload for groupv2 to file server failed'); diff --git a/ts/state/ducks/types/releasedFeaturesReduxTypes.ts b/ts/state/ducks/types/releasedFeaturesReduxTypes.ts index 65b54acadd..bb9a05d9d4 100644 --- a/ts/state/ducks/types/releasedFeaturesReduxTypes.ts +++ b/ts/state/ducks/types/releasedFeaturesReduxTypes.ts @@ -5,6 +5,7 @@ export type SessionFeatureFlags = { replaceLocalizedStringsWithKeys: boolean; // Hooks useOnionRequests: boolean; + useDeterministicEncryption: boolean; useTestNet: boolean; useLocalDevNet: string; useClosedGroupV2QAButtons: boolean; diff --git a/ts/state/ducks/user.ts b/ts/state/ducks/user.ts index 96ba1a71da..e559dd6869 100644 --- a/ts/state/ducks/user.ts +++ b/ts/state/ducks/user.ts @@ -2,7 +2,6 @@ import { isNil } from 'lodash'; import { createAsyncThunk, createSlice, type PayloadAction } from '@reduxjs/toolkit'; import { ConvoHub } from '../../session/conversations'; import { SyncUtils, UserUtils } from '../../session/utils'; -import { getSodiumRenderer } from '../../session/crypto'; import { uploadAndSetOurAvatarShared } from '../../interactions/avatar-interactions/nts-avatar-interactions'; import { ed25519Str } from '../../session/utils/String'; import { userSettingsModal, updateEditProfilePictureModal } from './modalDialog'; @@ -37,14 +36,9 @@ const updateOurAvatar = createAsyncThunk( return null; } - const sodium = await getSodiumRenderer(); - // Uploading a new avatar, we want to encrypt its data with a new key. - const profileKey = sodium.randombytes_buf(32); - const res = await uploadAndSetOurAvatarShared({ decryptedAvatarData: mainAvatarDecrypted, ourConvo, - profileKey, context: 'uploadNewAvatar', }); diff --git a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts index a421517374..7bde96ea0c 100644 --- a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts +++ b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts @@ -61,10 +61,10 @@ describe('DecryptedAttachmentsManager', () => { 'decryptAttachmentBufferNode', new Uint8Array(5) ); - TestUtils.stubCreateObjectUrl(); }); it('url starts with attachment path but is not already decrypted', async () => { + TestUtils.stubUrlCreateObjectURL(); expect( DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') ).to.be.eq(null); @@ -89,6 +89,7 @@ describe('DecryptedAttachmentsManager', () => { }); it('url starts with attachment path and is already decrypted', async () => { + TestUtils.stubUrlCreateObjectURL(); expect( DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') ).to.be.eq(null); diff --git a/ts/test/session/unit/messages/ChatMessage_test.ts b/ts/test/session/unit/messages/ChatMessage_test.ts index 9744a8dbbd..ae5436c556 100644 --- a/ts/test/session/unit/messages/ChatMessage_test.ts +++ b/ts/test/session/unit/messages/ChatMessage_test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai'; +import Sinon from 'sinon'; // eslint-disable-next-line import/order -import { TextEncoder } from 'util'; import { toNumber } from 'lodash'; import { SignalService } from '../../../../protobuf'; @@ -12,7 +12,7 @@ import { VisibleMessage, } from '../../../../session/messages/outgoing/visibleMessage/VisibleMessage'; import { DisappearingMessageMode } from '../../../../session/disappearing_messages/types'; -import { OutgoingUserProfile } from '../../../../types/message'; +import { TestUtils } from '../../../test-utils'; const sharedNoExpire = { expirationType: DisappearingMessageMode[0], @@ -20,6 +20,9 @@ const sharedNoExpire = { }; describe('VisibleMessage', () => { + afterEach(() => { + Sinon.restore(); + }); it('can create empty message with just a timestamp', () => { const message = new VisibleMessage({ createAtNetworkTimestamp: Date.now(), @@ -84,35 +87,8 @@ describe('VisibleMessage', () => { ); }); - it('can create message with a full loki profile', () => { - const profileKey = new TextEncoder().encode('profileKey'); - - const lokiProfile = { - displayName: 'displayName', - avatarPointer: 'avatarPointer', - profileKey, - updatedAtSeconds: 1, - }; - const message = new VisibleMessage({ - createAtNetworkTimestamp: Date.now(), - userProfile: new OutgoingUserProfile(lokiProfile), - ...sharedNoExpire, - }); - const plainText = message.plainTextBuffer(); - const decoded = SignalService.Content.decode(plainText); - expect(decoded.dataMessage).to.have.deep.property('profile'); - - expect(decoded.dataMessage) - .to.have.property('profile') - .to.have.deep.property('displayName', 'displayName'); - expect(decoded.dataMessage) - .to.have.property('profile') - .to.have.deep.property('profilePicture', 'avatarPointer'); - expect(decoded.dataMessage).to.have.deep.property('profileKey', profileKey); - }); - it('can create message with a quote without attachments', () => { - const quote: Quote = { id: 1234, author: 'author', text: 'text' }; + const quote: Quote = { id: 1234, author: 'author' }; const message = new VisibleMessage({ createAtNetworkTimestamp: Date.now(), quote, @@ -124,7 +100,6 @@ describe('VisibleMessage', () => { const decodedID = toNumber(decoded.dataMessage?.quote?.id); expect(decodedID).to.be.equal(1234); expect(decoded.dataMessage?.quote).to.have.deep.property('author', 'author'); - expect(decoded.dataMessage?.quote).to.have.deep.property('text', 'text'); }); it('can create message with a preview', () => { @@ -148,6 +123,9 @@ describe('VisibleMessage', () => { }); it('can create message with an AttachmentPointer', () => { + TestUtils.stubWindowFeatureFlags(); + TestUtils.stubURLCanParse(); + const attachment: AttachmentPointerWithUrl = { url: 'http://thisisaareal/url/1234', contentType: 'contentType', diff --git a/ts/test/session/unit/messages/MessageRequestResponse_test.ts b/ts/test/session/unit/messages/MessageRequestResponse_test.ts index 184e879d85..ac81ed0407 100644 --- a/ts/test/session/unit/messages/MessageRequestResponse_test.ts +++ b/ts/test/session/unit/messages/MessageRequestResponse_test.ts @@ -1,13 +1,19 @@ import { expect } from 'chai'; +import { from_hex } from 'libsodium-wrappers-sumo'; import { v4 } from 'uuid'; +import Sinon from 'sinon'; import { SignalService } from '../../../../protobuf'; import { Constants } from '../../../../session'; import { MessageRequestResponse } from '../../../../session/messages/outgoing/controlMessage/MessageRequestResponse'; import { OutgoingUserProfile } from '../../../../types/message'; +import { TestUtils } from '../../../test-utils'; describe('MessageRequestResponse', () => { let message: MessageRequestResponse | undefined; + afterEach(() => { + Sinon.restore(); + }); it('correct ttl', () => { message = new MessageRequestResponse({ createAtNetworkTimestamp: Date.now(), @@ -67,8 +73,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: null, + profilePic: { url: null, key: null }, updatedAtSeconds: 1, }), }); @@ -85,8 +90,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: new Uint8Array(), + profilePic: null, updatedAtSeconds: 1, }), }); @@ -100,36 +104,32 @@ describe('MessageRequestResponse', () => { }); it('can create response with display name and profileKey and profileImage', () => { + TestUtils.stubURLCanParse(); + + const userProfile = new OutgoingUserProfile({ + displayName: 'Jane', + profilePic: { + url: 'http://filev2.getsession.org/file/abcdefghijklmnop', + key: from_hex('0102030405060102030405060102030401020304050601020304050601020304'), + }, + updatedAtSeconds: 1, + }); message = new MessageRequestResponse({ createAtNetworkTimestamp: Date.now(), - userProfile: new OutgoingUserProfile({ - displayName: 'Jane', - avatarPointer: 'https://somevalidurl.com', - profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]), - updatedAtSeconds: 1, - }), + userProfile, }); const plainText = message.plainTextBuffer(); const decoded = SignalService.Content.decode(plainText); expect(decoded.messageRequestResponse?.profile?.displayName).to.be.deep.eq('Jane'); - expect(decoded.messageRequestResponse?.profileKey).to.be.not.empty; - - if (!decoded.messageRequestResponse?.profileKey?.buffer) { - throw new Error('decoded.messageRequestResponse?.profileKey?.buffer should be set'); - } expect(decoded.messageRequestResponse?.profile?.profilePicture).to.be.eq( - 'https://somevalidurl.com' + 'http://filev2.getsession.org/file/abcdefghijklmnop' ); // don't ask me why deep.eq ([1,2,3, ...]) gives nothing interesting but a 8192 buffer not matching - expect(decoded.messageRequestResponse?.profileKey.length).to.be.eq(6); - expect(decoded.messageRequestResponse?.profileKey[0]).to.be.eq(1); - expect(decoded.messageRequestResponse?.profileKey[1]).to.be.eq(2); - expect(decoded.messageRequestResponse?.profileKey[2]).to.be.eq(3); - expect(decoded.messageRequestResponse?.profileKey[3]).to.be.eq(4); - expect(decoded.messageRequestResponse?.profileKey[4]).to.be.eq(5); - expect(decoded.messageRequestResponse?.profileKey[5]).to.be.eq(6); + expect(decoded.messageRequestResponse?.profileKey).to.be.deep.eq( + from_hex('0102030405060102030405060102030401020304050601020304050601020304') + ); }); it('profileKey not included if profileUrl not set', () => { @@ -137,8 +137,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]), + profilePic: { url: null, key: new Uint8Array([1, 2, 3, 4, 5, 6]) }, updatedAtSeconds: 1, }), }); @@ -160,8 +159,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: 'https://somevalidurl.com', - profileKey: null, + profilePic: { url: 'https://somevalidurl.com', key: null }, updatedAtSeconds: 1, }), }); diff --git a/ts/test/session/unit/sending/MessageSender_test.ts b/ts/test/session/unit/sending/MessageSender_test.ts index 37f0371bc6..0047cb5796 100644 --- a/ts/test/session/unit/sending/MessageSender_test.ts +++ b/ts/test/session/unit/sending/MessageSender_test.ts @@ -21,7 +21,6 @@ import { TestUtils } from '../../../test-utils'; import { TypedStub, expectAsyncToThrow, - stubCreateObjectUrl, stubData, stubUtilWorker, stubValidSnodeSwarm, @@ -319,7 +318,6 @@ describe('MessageSender', () => { 'X-SOGS-Signature': 'gYqpWZX6fnF4Gb2xQM3xaXs0WIYEI49+B8q4mUUEg8Rw0ObaHUWfoWjMHMArAtP9QlORfiydsKWz1o6zdPVeCQ==', }); - stubCreateObjectUrl(); Sinon.stub(OpenGroupMessageV2, 'fromJson').resolves(); }); diff --git a/ts/test/test-utils/utils/stubbing.ts b/ts/test/test-utils/utils/stubbing.ts index 1f583e116e..aa72517c41 100644 --- a/ts/test/test-utils/utils/stubbing.ts +++ b/ts/test/test-utils/utils/stubbing.ts @@ -62,11 +62,30 @@ export function stubUserGroupWrapper { +export function stubURLCanParse() { + if (!URL.canParse) { + URL.canParse = () => true; + } + Sinon.stub(URL, 'canParse').callsFake((url, base) => { + try { + // eslint-disable-next-line no-new + new URL(url, base); + return true; + } catch { + return false; + } + }); +} + +export function stubUrlCreateObjectURL() { + if (!URL.createObjectURL) { + URL.createObjectURL = () => { + return `${Date.now()}:${Math.floor(Math.random() * 1000)}`; + }; + } + Sinon.stub(URL, 'createObjectURL').callsFake(() => { return `${Date.now()}:${Math.floor(Math.random() * 1000)}`; - }; + }); } type WindowValue = Partial | undefined; diff --git a/ts/types/Attachment.ts b/ts/types/Attachment.ts index 3b11f14f1f..0aa7101336 100644 --- a/ts/types/Attachment.ts +++ b/ts/types/Attachment.ts @@ -33,7 +33,7 @@ export type AttachmentThumbnail = { path?: string; }; -export interface AttachmentType { +export type AttachmentType = { contentType: MIME.MIMEType; fileName: string; /** For messages not already on disk, this will be a data url */ @@ -50,13 +50,13 @@ export interface AttachmentType { videoUrl?: string; /** Not included in protobuf, needs to be pulled from flags */ isVoiceMessage?: boolean; -} +}; -export interface AttachmentTypeWithPath extends AttachmentType { +export type AttachmentTypeWithPath = AttachmentType & { path: string; flags?: number; error?: any; -} +}; // UI-focused functions @@ -116,7 +116,7 @@ export function getThumbnailUrl(attachment: AttachmentType): string { return attachment?.thumbnail?.url || getUrl(attachment); } -export function getUrl(attachment: AttachmentType): string { +export function getUrl(attachment: Pick): string { return attachment?.screenshot?.url || attachment.url; } @@ -129,7 +129,7 @@ export function isImage(attachments?: Array) { ); } -export function isImageAttachment(attachment: AttachmentType): boolean { +export function isImageAttachment(attachment: Pick): boolean { return Boolean( attachment && attachment.contentType && isImageTypeSupported(attachment.contentType) ); @@ -143,7 +143,7 @@ export function isVideo(attachments?: Array): boolean { return Boolean(attachments && isVideoAttachment(attachments[0])); } -export function isVideoAttachment(attachment?: AttachmentType): boolean { +export function isVideoAttachment(attachment?: Pick): boolean { return Boolean( !!attachment && !!attachment.contentType && isVideoTypeSupported(attachment.contentType) ); @@ -190,7 +190,9 @@ export function getImageDimensionsInAttachment(attachment: AttachmentType): Dime }; } -export function areAllAttachmentsVisual(attachments?: Array): boolean { +export function areAllAttachmentsVisual( + attachments?: Array> +): boolean { if (!attachments) { return false; } diff --git a/ts/types/MessageAttachment.ts b/ts/types/MessageAttachment.ts index 8df234c107..ad757445b9 100644 --- a/ts/types/MessageAttachment.ts +++ b/ts/types/MessageAttachment.ts @@ -118,35 +118,6 @@ export const loadPreviewData = async (preview: any): Promise> => { ]; }; -export const loadQuoteData = async (quote: any) => { - if (!quote) { - return null; - } - if (!quote.attachments?.length || isEmpty(quote.attachments[0])) { - return quote; - } - - const quotedFirstAttachment = await quote.attachments[0]; - - const { thumbnail } = quotedFirstAttachment; - - if (!thumbnail || !thumbnail.path) { - return { - ...quote, - attachments: [quotedFirstAttachment], - }; - } - const quotedAttachmentWithThumbnail = { - ...quotedFirstAttachment, - thumbnail: await loadAttachmentData(thumbnail), - }; - - return { - ...quote, - attachments: [quotedAttachmentWithThumbnail], - }; -}; - /** * Any `data: ArrayBuffer` provided here must first have been oriented to the * right orientation using one of the ImageProcessor functions. @@ -216,7 +187,9 @@ const migrateDataToFileSystem = async (data?: ArrayBuffer) => { const isValidData = isArrayBuffer(data); if (!isValidData) { - throw new TypeError(`Expected ${data} to be an array buffer got: ${typeof data}`); + throw new TypeError( + `migrateDataToFileSystem: Expected 'data' to be an array buffer got: ${typeof data}` + ); } const path = await writeNewAttachmentData(data); diff --git a/ts/types/attachments/VisualAttachment.ts b/ts/types/attachments/VisualAttachment.ts index 04d12c7cea..24ad51637c 100644 --- a/ts/types/attachments/VisualAttachment.ts +++ b/ts/types/attachments/VisualAttachment.ts @@ -14,9 +14,9 @@ import { getFeatureFlag } from '../../state/ducks/types/releasedFeaturesReduxTyp import { processAvatarData } from '../../util/avatar/processAvatarData'; import type { ProcessedAvatarDataType } from '../../webworker/workers/node/image_processor/image_processor'; import { ImageProcessor } from '../../webworker/workers/browser/image_processor_interface'; -import { maxThumbnailDetails } from '../../util/attachment/attachmentSizes'; +import { maxAvatarDetails, maxThumbnailDetails } from '../../util/attachment/attachmentSizes'; -export const THUMBNAIL_CONTENT_TYPE = 'image/png'; +export const THUMBNAIL_CONTENT_TYPE = 'image/webp'; export const urlToBlob = async (dataUrl: string) => { return (await fetch(dataUrl)).blob(); @@ -186,21 +186,6 @@ export const revokeObjectUrl = (objectUrl: string) => { URL.revokeObjectURL(objectUrl); }; -async function autoScaleAvatarBlob(file: File): Promise { - try { - const arrayBuffer = await file.arrayBuffer(); - const processed = await processAvatarData(arrayBuffer); - return processed; - } catch (e) { - ToastUtils.pushToastError( - 'pickFileForAvatar', - `An error happened while picking/resizing the image: "${e.message?.slice(200) || ''}"` - ); - window.log.error(e); - return null; - } -} - async function pickFileForReal() { const acceptedImages = ['.png', '.gif', '.jpeg', '.jpg']; if (getFeatureFlag('proAvailable')) { @@ -225,11 +210,14 @@ async function pickFileForReal() { } async function pickFileForTestIntegration() { - const blueAvatarDetails = await ImageProcessor.testIntegrationFakeAvatar(500, { - r: 0, - g: 0, - b: 255, - }); + const blueAvatarDetails = await ImageProcessor.testIntegrationFakeAvatar( + maxAvatarDetails.maxSidePlanReupload, + { + r: 0, + g: 0, + b: 255, + } + ); const file = new File([blueAvatarDetails.outputBuffer], 'testIntegrationFakeAvatar.jpeg', { type: blueAvatarDetails.format, }); @@ -242,5 +230,17 @@ async function pickFileForTestIntegration() { export async function pickFileForAvatar(): Promise { const file = isTestIntegration() ? await pickFileForTestIntegration() : await pickFileForReal(); - return autoScaleAvatarBlob(file); + try { + const arrayBuffer = await file.arrayBuffer(); + // pickFileForAvatar is only used for avatars we want to be able to reupload (ourselves or 03-groups) + const processed = await processAvatarData(arrayBuffer, true); + return processed; + } catch (e) { + ToastUtils.pushToastError( + 'pickFileForAvatar', + `An error happened while picking/resizing the image: "${e.message?.slice(200) || ''}"` + ); + window.log.error(e); + return null; + } } diff --git a/ts/types/message/index.ts b/ts/types/message/index.ts index c082a76ca1..ace10637be 100644 --- a/ts/types/message/index.ts +++ b/ts/types/message/index.ts @@ -5,31 +5,29 @@ import { isEmpty, isString, isTypedArray } from 'lodash'; import { MessageAttributes } from '../../models/messageType'; import { SignalService } from '../../protobuf'; import { Timestamp } from '../timestamp/timestamp'; +import { addProfileKeyToUrl, extractDetailsFromUrlFragment } from '../../session/url'; +// Note: this removes the profile key from the url on purpose, +// as we do not want to share this design with the other platforms +// (and we do not want to send it as part of the URL). function extractPicDetailsFromUrl(src: string | null): ProfilePicture { - if (!src) { + if (!src || !URL.canParse(src)) { return { url: null, key: null }; } - const urlParts = src.split('#'); - if (urlParts.length !== 2) { - throw new Error('extractPicDetailsFromUrl url does not contain a profileKey'); - } - const url = urlParts[0]; - const key = urlParts[1]; - + const { profileKey: key, urlWithoutProfileKey } = extractDetailsFromUrlFragment(new URL(src)); // throwing here, as if src is not empty we expect a key to be set if (!isEmpty(key) && !isString(key)) { throw new Error('extractPicDetailsFromUrl: profileKey is set but not a string'); } // throwing here, as if src is not empty we expect an url to be set - if (!isEmpty(url) && !isString(url)) { + if (!isEmpty(urlWithoutProfileKey) && !isString(urlWithoutProfileKey)) { throw new Error('extractPicDetailsFromUrl: avatarPointer is set but not a string'); } - if (!url || !key) { + if (!urlWithoutProfileKey || !key) { // this shouldn't happen, but we check it anyway return { url: null, key: null }; } - return { url, key: from_hex(key) }; + return { url: urlWithoutProfileKey, key: from_hex(key) }; } class OutgoingUserProfile { @@ -48,10 +46,8 @@ class OutgoingUserProfile { }: { displayName: string; updatedAtSeconds: number; - } & ( - | { picUrlWithProfileKey: string | null } - | { profileKey: Uint8Array | string | null; avatarPointer: string | null } - )) { + profilePic: ProfilePicture | null; + }) { if (!isString(displayName)) { throw new Error('displayName is not a string'); } @@ -61,33 +57,16 @@ class OutgoingUserProfile { value: updatedAtSeconds, expectedUnit: 'seconds', }); - if ('picUrlWithProfileKey' in args) { - this.initFromPicWithUrl(args.picUrlWithProfileKey); - } else { - this.initFromPicDetails(args); - } + this.initFromPicDetails(args.profilePic); } - private initFromPicWithUrl(picUrlWithProfileKey: string | null) { - if (!picUrlWithProfileKey) { + private initFromPicDetails(args: ProfilePicture | null) { + if (!args) { this.picUrlWithProfileKey = null; return; } - // this throws if the url is not valid - // or if the fields cannot be extracted - extractPicDetailsFromUrl(picUrlWithProfileKey); - - this.picUrlWithProfileKey = picUrlWithProfileKey; - } - - private initFromPicDetails({ - profileKey: profileKeyIn, - avatarPointer, - }: { - profileKey: Uint8Array | string | null; - avatarPointer: string | null; - }) { - if (!profileKeyIn && !avatarPointer) { + const { key: profileKeyIn, url: avatarPointer } = args; + if (!profileKeyIn || !avatarPointer) { this.picUrlWithProfileKey = null; return; } @@ -107,39 +86,39 @@ class OutgoingUserProfile { this.picUrlWithProfileKey = null; return; } + if (profileKey) { - this.picUrlWithProfileKey = `${avatarPointer}#${to_hex(profileKey)}`; + const withProfileKey = addProfileKeyToUrl(new URL(avatarPointer), to_hex(profileKey)); + this.picUrlWithProfileKey = withProfileKey.toString(); } else { this.picUrlWithProfileKey = avatarPointer; } } + /** + * @returns the profile picture with the key in a Uint8Array format. + * Note: this removes the profile key from the url on purpose, + * as we do not want to share this design with the other platforms. + */ public toProfilePicture(): ProfilePicture { return extractPicDetailsFromUrl(this.picUrlWithProfileKey); } + /** + * This function is used to get the profile picture with the key in hex format. + * Note: this removes the profile key from the url on purpose, + * as we do not want to share this design with the other platforms. + */ public toHexProfilePicture() { - const details = extractPicDetailsFromUrl(this.picUrlWithProfileKey); + const details = this.toProfilePicture(); return { url: details.url, key: details.key ? to_hex(details.key) : null }; } - public isEmpty(): boolean { - return !this.displayName && !this.picUrlWithProfileKey; - } - - private emptyProtobufDetails() { - // Note: profileKey: undefined is not allowed by protobuf - return { profile: undefined }; - } - - public getUpdatedAtSeconds(): number { - return this.lastProfileUpdateTs.seconds(); - } - - public getUpdatedAtMs(): number { - return this.lastProfileUpdateTs.ms(); - } - + /** + * This function is used to get the protobuf details that needs to be added to an outgoing message. + * The URL of the profile picture will be stripped of the profile key if it was set on purpose, see `toProfilePicture()`. + * The URL of the profile picture will also be stripped of the serverPk, if the fileServer used is the default one. + */ public toProtobufDetails(): Partial> { if (this.isEmpty()) { return this.emptyProtobufDetails(); @@ -160,6 +139,23 @@ class OutgoingUserProfile { // no profileKey provided here return { profile }; } + + public isEmpty(): boolean { + return !this.displayName && !this.picUrlWithProfileKey; + } + + private emptyProtobufDetails() { + // Note: profileKey: undefined is not allowed by protobuf + return { profile: undefined }; + } + + public getUpdatedAtSeconds(): number { + return this.lastProfileUpdateTs.seconds(); + } + + public getUpdatedAtMs(): number { + return this.lastProfileUpdateTs.ms(); + } } export function longOrNumberToNumber(value: number | Long): number { diff --git a/ts/util/attachment/attachmentSizes.ts b/ts/util/attachment/attachmentSizes.ts index 21548a2454..7934f624a4 100644 --- a/ts/util/attachment/attachmentSizes.ts +++ b/ts/util/attachment/attachmentSizes.ts @@ -1,13 +1,19 @@ -import { FILESIZE } from '../../session/constants'; - export const ATTACHMENT_DEFAULT_MAX_SIDE = 4096; export const maxAvatarDetails = { - maxSide: 200, - maxSize: 5 * FILESIZE.MB, + /** + * 600 px + */ + maxSidePlanReupload: 600, + /** + * 200 px + */ + maxSideNoReuploadRequired: 200, }; export const maxThumbnailDetails = { + /** + * 200 px + */ maxSide: 200, - maxSize: 200 * 1000, // 200 ko }; diff --git a/ts/util/avatar/processAvatarData.ts b/ts/util/avatar/processAvatarData.ts index 021b14e3ce..5d54e580a2 100644 --- a/ts/util/avatar/processAvatarData.ts +++ b/ts/util/avatar/processAvatarData.ts @@ -1,6 +1,5 @@ import { isArrayBuffer } from 'lodash'; import { ImageProcessor } from '../../webworker/workers/browser/image_processor_interface'; -import { maxAvatarDetails } from '../attachment/attachmentSizes'; import { MAX_ATTACHMENT_FILESIZE_BYTES } from '../../session/constants'; /** @@ -11,8 +10,13 @@ import { MAX_ATTACHMENT_FILESIZE_BYTES } from '../../session/constants'; * - mainAvatarDetails will be animated (webp enforced) if the source was animated, or a jpeg of the original image * - avatarFallback will be an image (jpeg enforced) of the first frame of `mainAvatarDetails` if it was animated, or null * + * There is a specific case for the avatars that we need to be able to reupload, + * as we do want to keep a resolution of 600 x 600 instead of the usual 200 x 200. + * + * This is because we need to be able to reupload our full avatar to the file server, and mobile pixel density can be 3x. + * We still want to reduce incoming avatars to 200 x 200 for performance reasons. */ -export async function processAvatarData(arrayBuffer: ArrayBuffer) { +export async function processAvatarData(arrayBuffer: ArrayBuffer, planForReupload: boolean) { if (!arrayBuffer || arrayBuffer.byteLength === 0 || !isArrayBuffer(arrayBuffer)) { throw new Error('processAvatarData: arrayBuffer is empty'); } @@ -23,7 +27,7 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { * 2. a fallback avatar in case the user looses its pro (static image, even if the main avatar is animated) */ // this is step 1, we generate a scaled down avatar, but keep its nature (animated or not) - const processed = await ImageProcessor.processAvatarData(arrayBuffer, maxAvatarDetails.maxSide); + const processed = await ImageProcessor.processAvatarData(arrayBuffer, planForReupload); if (!processed) { throw new Error('processLocalAvatarChange: failed to process avatar'); @@ -32,17 +36,12 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { const { mainAvatarDetails, avatarFallback } = processed; // sanity check the returned data - if (mainAvatarDetails.isAnimated && mainAvatarDetails.format !== 'webp') { + if (mainAvatarDetails.format !== 'webp' && mainAvatarDetails.format !== 'gif') { throw new Error( 'processLocalAvatarChange: we only support animated mainAvatarDetails in webp after conversion' ); } - // sanity check the returned data - if (!mainAvatarDetails.isAnimated && mainAvatarDetails.format !== 'jpeg') { - throw new Error( - 'processLocalAvatarChange: we only support non animated mainAvatarDetails in jpeg after conversion' - ); - } + if (mainAvatarDetails.isAnimated && !avatarFallback) { throw new Error( 'processLocalAvatarChange: we only support animated mainAvatarDetails with fallback after conversion' @@ -50,7 +49,7 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { } // sanity check the returned data - if (avatarFallback && avatarFallback.format !== 'jpeg') { + if (avatarFallback && avatarFallback.format !== 'webp') { throw new Error( 'processLocalAvatarChange: we only support avatarFallback in jpeg after conversion' ); diff --git a/ts/webworker/workers/browser/image_processor_interface.ts b/ts/webworker/workers/browser/image_processor_interface.ts index fab226af69..82f06cd5b8 100644 --- a/ts/webworker/workers/browser/image_processor_interface.ts +++ b/ts/webworker/workers/browser/image_processor_interface.ts @@ -35,10 +35,6 @@ async function callImageProcessorWorker( } export const ImageProcessor: ImageProcessorWorkerActions = { - extractFirstFrameJpeg: ( - ...args: Parameters - ) => callImageProcessorWorker('extractFirstFrameJpeg', ...args), - imageMetadata: (...args: Parameters) => callImageProcessorWorker('imageMetadata', ...args), diff --git a/ts/webworker/workers/browser/libsession_worker_interface.ts b/ts/webworker/workers/browser/libsession_worker_interface.ts index e61ebb4d4a..2177f3717d 100644 --- a/ts/webworker/workers/browser/libsession_worker_interface.ts +++ b/ts/webworker/workers/browser/libsession_worker_interface.ts @@ -185,10 +185,6 @@ export const UserConfigWrapperActions: UserConfigWrapperActionsCalls = { callLibSessionWorker(['UserConfig', 'getProfileUpdatedSeconds']) as Promise< ReturnType >, - getProfilePicWithKeyHex: async () => - callLibSessionWorker(['UserConfig', 'getProfilePicWithKeyHex']) as Promise< - ReturnType - >, getEnableBlindedMsgRequest: async () => callLibSessionWorker(['UserConfig', 'getEnableBlindedMsgRequest']) as Promise< ReturnType @@ -818,6 +814,16 @@ export const MultiEncryptWrapperActions: MultiEncryptActionsCalls = { callLibSessionWorker(['MultiEncrypt', 'multiDecryptEd25519', args]) as Promise< ReturnType >, + + attachmentDecrypt: async args => + callLibSessionWorker(['MultiEncrypt', 'attachmentDecrypt', args]) as Promise< + ReturnType + >, + + attachmentEncrypt: async args => + callLibSessionWorker(['MultiEncrypt', 'attachmentEncrypt', args]) as Promise< + ReturnType + >, }; export const allKnownEncryptionDomains: Array = ['SessionGroupKickedMessage']; diff --git a/ts/webworker/workers/node/image_processor/image_processor.d.ts b/ts/webworker/workers/node/image_processor/image_processor.d.ts index fff25ebea9..1f2cddddfc 100644 --- a/ts/webworker/workers/node/image_processor/image_processor.d.ts +++ b/ts/webworker/workers/node/image_processor/image_processor.d.ts @@ -12,18 +12,21 @@ type WithOutputBuffer = { outputBuffer: ArrayBufferLike }; type WithCustomSharpFormat = { format: T }; -type WithImageFormat = WithCustomSharpFormat & { +type WithImageFormat = WithCustomSharpFormat & { contentType: `image/${T}`; }; -type WithJpegFormat = WithImageFormat<'jpeg'>; -type WithPngFormat = WithImageFormat<'png'>; type WithWebpFormat = WithImageFormat<'webp'>; +type WithGifFormat = WithImageFormat<'gif'>; /** * The output of a always static output image. */ -type StaticOutputType = WithOutputBuffer & WithSharpSize & WithSharpWidth & WithSharpHeight; +type StaticOutputType = WithOutputBuffer & + WithSharpSize & + WithSharpWidth & + WithSharpHeight & + WithSharpFormat; /** * Can be animated or not. Another `With` will be needed to express the type of the content. @@ -39,23 +42,28 @@ export type ProcessedLinkPreviewThumbnailType = NonNullable< >; export type ImageProcessorWorkerActions = { - extractFirstFrameJpeg: ( - input: ArrayBufferLike - ) => Promise<(StaticOutputType & WithJpegFormat) | null>; - /** - * Process an avatar change. + * Process an avatar. Depending on if we want this to be reuploaded or not, we allow gif as a return format or not. + * The reason is that when we plan for reupload, we don't convert gif to webp, as we want to keep the original gif. + * When the change is not planned for reupload, we convert everything to a webp. * This function will generate a mainAvatar, and a fallbackAvatar if needed. * - * The mainAvatar can be animated or not. If animated it is a webp, if not it is a jpeg. - * The fallbackAvatar, if set, is a always a jpeg. + * The mainAvatar can be animated or not. + * - If animated it is an animated gif or webp, + * - If not, it is a static webp (always). + * The fallbackAvatar, if set, is always a static webp. + * + * planForReupload must be true for + * - our own avatar (changed by the current user, locally or not) + * - our own avatar (automatic reupload) + * - (later: for a groupv2 avatar: locally or not and on reupload, even if we are not an admin (as we might become one) */ processAvatarData: ( input: ArrayBufferLike, - maxSidePx: number + planForReupload: boolean ) => Promise<{ - mainAvatarDetails: MaybeAnimatedOutputType & (WithWebpFormat | WithJpegFormat); - avatarFallback: (StaticOutputType & WithJpegFormat) | null; + mainAvatarDetails: Omit & WithImageFormat<'gif' | 'webp'>; + avatarFallback: (StaticOutputType & WithWebpFormat) | null; } | null>; /** @@ -65,21 +73,19 @@ export type ImageProcessorWorkerActions = { processForLinkPreviewThumbnail: ( input: ArrayBufferLike, maxSidePx: number - ) => Promise<(StaticOutputType & WithPngFormat) | null>; + ) => Promise<(StaticOutputType & WithWebpFormat) | null>; /** * Process an image to get a thumbnail matching our required details for in conversation thumbnails * This is about the thumbnail in the conversation list (for attachments in messages). We generate a preview to avoid loading huge files until we show them in fullscreen. * - * Note: animated or not, an image will always be returned as a png. - * Note: eventually we want to support animated images as previews too. When we do, we will need to - * convert them to webp and resize their preview heavily for performance reasons. - * A 'in conversation thumbnail' is always resized to "cover". + * Note: animated or not, an thumbnail will always be returned as a static webp currently. + * A 'in conversation thumbnail' is always resized to "cover" and enlarged if it was smaller than maxSidePx. */ processForInConversationThumbnail: ( input: ArrayBufferLike, maxSidePx: number - ) => Promise<(MaybeAnimatedOutputType & WithPngFormat) | null>; + ) => Promise<(Omit & WithWebpFormat) | null>; /** * Process an image to get something that we can upload to the file server. @@ -114,7 +120,7 @@ export type ImageProcessorWorkerActions = { input: ArrayBufferLike, maxSidePx: number, maxSizeBytes: number - ) => Promise; + ) => Promise; /** * Utility function to generate a fake avatar for testing purposes. @@ -123,7 +129,7 @@ export type ImageProcessorWorkerActions = { testIntegrationFakeAvatar: ( maxSidePx: number, background: { r: number; g: number; b: number } // { r: 0, g: 0, b: 255 } for fully blue - ) => Promise; + ) => Promise & WithWebpFormat>; /** * Extract the metadata retrieved from the image. diff --git a/ts/webworker/workers/node/image_processor/image_processor.worker.ts b/ts/webworker/workers/node/image_processor/image_processor.worker.ts index 1926675ed1..49eda98fa4 100644 --- a/ts/webworker/workers/node/image_processor/image_processor.worker.ts +++ b/ts/webworker/workers/node/image_processor/image_processor.worker.ts @@ -1,6 +1,10 @@ import { isEmpty, isFinite, isNumber } from 'lodash'; import sharp from 'sharp'; -import type { ImageProcessorWorkerActions } from './image_processor'; +import type { + ImageProcessorWorkerActions, + StaticOutputType, + WithWebpFormat, +} from './image_processor'; /* eslint-disable no-console */ /* eslint-disable strict */ @@ -12,6 +16,22 @@ function logIfOn(...args: Array) { } } +const defaultTimeoutProcessingSeconds = 5; + +/** + * Duplicated to be used in the worker environment + */ +const maxAvatarDetails = { + /** + * 600 px + */ + maxSidePlanReupload: 600, + /** + * 200 px + */ + maxSideNoReuploadRequired: 200, +}; + onmessage = async (e: any) => { const [jobId, fnName, ...args] = e.data; @@ -54,11 +74,18 @@ function isAnimated(metadata: sharp.Metadata) { return (metadata.pages || 0) > 1; // more than 1 frame means that the image is animated } -function centerCoverOpts(maxSidePx: number) { +function centerCoverOpts({ + maxSidePx, + withoutEnlargement, +}: { + maxSidePx: number; + withoutEnlargement: boolean; +}) { return { height: maxSidePx, width: maxSidePx, fit: 'cover' as const, // a thumbnail we generate should contain the source image + withoutEnlargement, }; } @@ -108,46 +135,48 @@ async function metadataFromBuffer( } } -const workerActions: ImageProcessorWorkerActions = { - extractFirstFrameJpeg: async inputBuffer => { - if (!inputBuffer?.byteLength) { - throw new Error('inputBuffer is required'); - } - const inputMetadata = await metadataFromBuffer(inputBuffer); - if (!inputMetadata) { - return null; - } +async function extractFirstFrameWebp( + inputBuffer: ArrayBufferLike +): Promise<(StaticOutputType & WithWebpFormat) | null> { + if (!inputBuffer?.byteLength) { + throw new Error('inputBuffer is required'); + } + const inputMetadata = await metadataFromBuffer(inputBuffer); + if (!inputMetadata) { + return null; + } - metadataSizeIsSetOrThrow(inputMetadata, 'extractFirstFrameJpeg'); + metadataSizeIsSetOrThrow(inputMetadata, 'extractFirstFrameWebp'); - if (!isAnimated(inputMetadata)) { - throw new Error('extractFirstFrameJpeg: input is not animated'); - } + if (!isAnimated(inputMetadata)) { + throw new Error('extractFirstFrameWebp: input is not animated'); + } - const parsed = sharpFrom(inputBuffer, { pages: 1 }); - const jpeg = parsed.jpeg(); - const outputBuffer = await jpeg.toBuffer(); - const outputMetadata = await metadataFromBuffer(outputBuffer); - if (!outputMetadata) { - return null; - } + const parsed = sharpFrom(inputBuffer, { pages: 1 }); + const webp = parsed.webp(); + const outputBuffer = await webp.toBuffer(); + const outputMetadata = await metadataFromBuffer(outputBuffer); + if (!outputMetadata) { + return null; + } - const outputMetadataSize = metadataSizeIsSetOrThrow(outputMetadata, 'extractFirstFrameJpeg'); + const outputMetadataSize = metadataSizeIsSetOrThrow(outputMetadata, 'extractFirstFrameWebp'); - if (isAnimated(outputMetadata)) { - throw new Error('extractFirstFrameJpeg: outputMetadata cannot be animated'); - } + if (isAnimated(outputMetadata)) { + throw new Error('extractFirstFrameWebp: outputMetadata cannot be animated'); + } - return { - outputBuffer: outputBuffer.buffer, - width: outputMetadata.width, - height: outputMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` - size: outputMetadataSize, - format: 'jpeg' as const, - contentType: 'image/jpeg' as const, - }; - }, + return { + outputBuffer: outputBuffer.buffer, + width: outputMetadata.width, + height: outputMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` + size: outputMetadataSize, + format: 'webp' as const, + contentType: 'image/webp' as const, + }; +} +const workerActions: ImageProcessorWorkerActions = { imageMetadata: async inputBuffer => { if (!inputBuffer?.byteLength) { throw new Error('imageMetadata: inputBuffer is required'); @@ -170,7 +199,7 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processAvatarData: async (inputBuffer: ArrayBufferLike, maxSidePx: number) => { + processAvatarData: async (inputBuffer: ArrayBufferLike, planForReupload: boolean) => { if (!inputBuffer?.byteLength) { throw new Error('processAvatarData: inputBuffer is required'); } @@ -189,16 +218,34 @@ const workerActions: ImageProcessorWorkerActions = { // generate a square image of the avatar, scaled down or up to `maxSide` - const resized = sharpFrom(inputBuffer, { animated: true }).resize(centerCoverOpts(maxSidePx)); + const resized = sharpFrom(inputBuffer, { animated: true }).resize( + centerCoverOpts({ + maxSidePx: planForReupload + ? maxAvatarDetails.maxSidePlanReupload + : maxAvatarDetails.maxSideNoReuploadRequired, + withoutEnlargement: true, + }) + ); - // we know the avatar is animated and gif or webp, force it to webp for performance reasons - if (avatarIsAnimated) { - resized.webp(); + const isSourceGif = metadata.format === 'gif'; + // if the avatar was animated, we want an animated webp. + // if it was static, we want a static webp. + if (planForReupload) { + // see the comment in image_processor.d.ts: + // we don't want to convert gif to webp when planning for reupload + if (isSourceGif) { + resized.gif(); + } else { + resized.webp(); + } } else { - resized.jpeg(); + // when not planning for reupload, we always want a webp + resized.webp(); } - const resizedBuffer = await resized.toBuffer(); + const resizedBuffer = await resized + .timeout({ seconds: defaultTimeoutProcessingSeconds }) + .toBuffer(); // Note: we need to use the resized buffer here, not the original one, // as metadata is always linked to the source buffer (even if a resize() is done before the metadata call) @@ -216,44 +263,42 @@ const workerActions: ImageProcessorWorkerActions = { const resizedIsAnimated = isAnimated(resizedMetadata); - const formatDetails = avatarIsAnimated - ? { format: 'webp' as const, contentType: 'image/webp' as const } - : { format: 'jpeg' as const, contentType: 'image/jpeg' as const }; - - const mainAvatarDetails = { - outputBuffer: resizedBuffer.buffer, - height: resizedMetadata.height, - width: resizedMetadata.width, - isAnimated: resizedIsAnimated, - ...formatDetails, - size: resizedMetadataSize, - }; - let avatarFallback = null; if (resizedIsAnimated) { // also extract the first frame of the resized (animated) avatar - const firstFrameJpeg = await workerActions.extractFirstFrameJpeg(resizedBuffer.buffer); - if (!firstFrameJpeg) { - throw new Error('processAvatarData: failed to extract first frame as jpeg'); + const firstFrameWebp = await extractFirstFrameWebp(resizedBuffer.buffer); + if (!firstFrameWebp) { + throw new Error('processAvatarData: failed to extract first frame as webp'); } - const fallbackFormat = 'jpeg' as const; + const fallbackFormat = 'webp' as const; avatarFallback = { - outputBuffer: firstFrameJpeg.outputBuffer, - height: firstFrameJpeg.height, // this one is only the frame height already. No need for `metadataToFrameHeight` - width: firstFrameJpeg.width, + outputBuffer: firstFrameWebp.outputBuffer, + height: firstFrameWebp.height, // this one is only the frame height already. No need for `metadataToFrameHeight` + width: firstFrameWebp.width, format: fallbackFormat, contentType: `image/${fallbackFormat}` as const, - size: firstFrameJpeg.size, + size: firstFrameWebp.size, }; } logIfOn( - `[imageProcessorWorker] processAvatarData sizes: main: ${mainAvatarDetails.size} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` + `[imageProcessorWorker] processAvatarData sizes: main: ${resizedMetadataSize} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` ); - return { mainAvatarDetails, avatarFallback }; + return { + mainAvatarDetails: { + outputBuffer: resizedBuffer.buffer, + height: resizedMetadata.height, + width: resizedMetadata.width, + isAnimated: resizedIsAnimated, + format: planForReupload && isSourceGif ? 'gif' : 'webp', + contentType: planForReupload && isSourceGif ? 'image/gif' : 'image/webp', + size: resizedMetadataSize, + }, + avatarFallback, + }; }, testIntegrationFakeAvatar: async ( @@ -267,7 +312,7 @@ const workerActions: ImageProcessorWorkerActions = { channels: 3, // RGB background, }, - }).jpeg({ quality: 90 }); + }).webp({ quality: 90 }); const createdBuffer = await created.toBuffer(); const createdMetadata = await metadataFromBuffer(createdBuffer); @@ -279,7 +324,7 @@ const workerActions: ImageProcessorWorkerActions = { const size = metadataSizeIsSetOrThrow(createdMetadata, 'testIntegrationFakeAvatar'); - const format = 'jpeg' as const; + const format = 'webp' as const; return { outputBuffer: createdBuffer.buffer, height: createdMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` @@ -305,9 +350,10 @@ const workerActions: ImageProcessorWorkerActions = { metadataSizeIsSetOrThrow(metadata, 'processForLinkPreviewThumbnail'); - const resized = parsed.resize(centerCoverOpts(maxSidePx)); + // for thumbnail, we actually want to enlarge the image if required + const resized = parsed.resize(centerCoverOpts({ maxSidePx, withoutEnlargement: false })); - const resizedBuffer = await resized.png().toBuffer(); + const resizedBuffer = await resized.webp().toBuffer(); const resizedMetadata = await metadataFromBuffer(resizedBuffer); if (!resizedMetadata) { @@ -316,7 +362,7 @@ const workerActions: ImageProcessorWorkerActions = { const resizedSize = metadataSizeIsSetOrThrow(resizedMetadata, 'processForLinkPreviewThumbnail'); - const format = 'png' as const; + const format = 'webp' as const; return { outputBuffer: resizedBuffer.buffer, @@ -328,13 +374,15 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processForInConversationThumbnail: async (inputBuffer, maxSidePx) => { + processForInConversationThumbnail: async (inputBuffer: ArrayBufferLike, maxSidePx: number) => { if (!inputBuffer?.byteLength) { throw new Error('processForInConversationThumbnail: inputBuffer is required'); } - // Note: this is false here because we want to force a static image (so no need to extract all the frames) - const parsed = sharpFrom(inputBuffer, { animated: false }).resize(centerCoverOpts(maxSidePx)); + // Note: this `animated` is false here because we want to force a static image (so no need to extract all the frames) + const parsed = sharpFrom(inputBuffer, { animated: false }).resize( + centerCoverOpts({ maxSidePx, withoutEnlargement: false }) // We actually want to enlarge the image if required for a thumbnail in conversation + ); const metadata = await metadataFromBuffer(inputBuffer, { animated: false }); if (!metadata) { @@ -342,7 +390,10 @@ const workerActions: ImageProcessorWorkerActions = { } const animated = isAnimated(metadata); - const resizedBuffer = await parsed.png().toBuffer(); // animated ? await parsed.webp().toBuffer() : ; + const resizedBuffer = await parsed + .webp() + .timeout({ seconds: defaultTimeoutProcessingSeconds }) + .toBuffer(); const resizedMetadata = await metadataFromBuffer(resizedBuffer); if (!resizedMetadata) { @@ -351,7 +402,7 @@ const workerActions: ImageProcessorWorkerActions = { const size = metadataSizeIsSetOrThrow(resizedMetadata, 'processForInConversationThumbnail'); - const formatDetails = { format: 'png' as const, contentType: 'image/png' as const }; + const formatDetails = { format: 'webp' as const, contentType: 'image/webp' as const }; return { outputBuffer: resizedBuffer.buffer, @@ -363,7 +414,11 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processForFileServerUpload: async (inputBuffer, maxSidePx, maxSizeBytes) => { + processForFileServerUpload: async ( + inputBuffer: ArrayBufferLike, + maxSidePx: number, + maxSizeBytes: number + ) => { if (!inputBuffer?.byteLength) { throw new Error('processForFileServerUpload: inputBuffer is required'); } @@ -469,7 +524,7 @@ const workerActions: ImageProcessorWorkerActions = { } // eslint-disable-next-line no-await-in-loop - const buffer = await pipeline.toBuffer(); + const buffer = await pipeline.toBuffer(); // no timeout here for now if (buffer.length < maxSizeBytes) { // eslint-disable-next-line no-await-in-loop diff --git a/yarn.lock b/yarn.lock index 984617a9ad..cf8dfae051 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5121,9 +5121,9 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -"libsession_util_nodejs@https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz": - version "0.5.8" - resolved "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz#5bd0bd24782813bb5f67eee767f7840d42bce93b" +"libsession_util_nodejs@https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz": + version "0.5.9" + resolved "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz#33248ecab095c8b1223c3582890d9708731a3ab8" dependencies: cmake-js "7.3.1" node-addon-api "^8.3.1" @@ -5720,9 +5720,9 @@ node-addon-api@^8.3.1: integrity sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A== node-api-headers@^1.1.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.5.0.tgz#73a0bab642c77e39b815b6d24ad4c6b56f695912" - integrity sha512-Yi/FgnN8IU/Cd6KeLxyHkylBUvDTsSScT0Tna2zTrz8klmc8qF2ppj6Q1LHsmOueJWhigQwR4cO2p0XBGW5IaQ== + version "1.6.0" + resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.6.0.tgz#88ad7fb817b44e33baba81f92ae1566bda462d0c" + integrity sha512-81T99+mWLZnxX0LlZPYuafyFlxVVaWKQ0BDAbSrOqLO+v+gzCzu0GTAVNeVK8lucqjqo9L/1UcK9cpkem8Py4Q== node-api-version@^0.2.0: version "0.2.0"