diff --git a/package.json b/package.json index 020945e5a8..cb6fa4a04b 100644 --- a/package.json +++ b/package.json @@ -76,7 +76,7 @@ "fs-extra": "11.3.0", "glob": "10.4.5", "image-type": "^4.1.0", - "libsession_util_nodejs": "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz", + "libsession_util_nodejs": "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz", "libsodium-wrappers-sumo": "^0.7.15", "linkify-it": "^5.0.0", "lodash": "^4.17.21", diff --git a/preload.js b/preload.js index 68a925d9c0..829de8dbb9 100644 --- a/preload.js +++ b/preload.js @@ -57,6 +57,7 @@ window.sessionFeatureFlags = { replaceLocalizedStringsWithKeys: false, // Hooks useClosedGroupV2QAButtons: false, // TODO DO NOT MERGE + useDeterministicEncryption: !isEmpty(process.env.SESSION_ATTACH_DETERMINISTIC_ENCRYPTION), useOnionRequests: true, useTestNet: isTestNet() || isTestIntegration(), useLocalDevNet: !isEmpty(process.env.LOCAL_DEVNET_SEED_URL) diff --git a/protos/SignalService.proto b/protos/SignalService.proto index 14df6e5db9..976b8f34cf 100644 --- a/protos/SignalService.proto +++ b/protos/SignalService.proto @@ -178,17 +178,11 @@ message DataMessage { } message Quote { - - message QuotedAttachment { - optional string contentType = 1; - optional string fileName = 2; - optional AttachmentPointer thumbnail = 3; - } + reserved 3, 4; + reserved "text", "attachments"; required uint64 id = 1; required string author = 2; - optional string text = 3; - repeated QuotedAttachment attachments = 4; } message Preview { @@ -268,16 +262,22 @@ message AttachmentPointer { // @required required fixed64 deprecated_id = 1; - optional string contentType = 2; - optional bytes key = 3; - optional uint32 size = 4; - optional bytes digest = 6; - optional string fileName = 7; - optional uint32 flags = 8; - optional uint32 width = 9; - optional uint32 height = 10; - optional string caption = 11; - optional string url = 101; + optional string contentType = 2; + optional bytes key = 3; + optional uint32 size = 4; + optional bytes digest = 6; + optional string fileName = 7; + optional uint32 flags = 8; + optional uint32 width = 9; + optional uint32 height = 10; + optional string caption = 11; + /** + * This field can be just an url to the file, or have a fragment appended to it that can contain: + * - `p=` // hex encoded pubkey of the file server + * - `d=` // if the file is deterministically encrypted, this field is present, otherwise it is not + * If needed, those fields are a &, and can be parsed/built with the usual URLSearchParams logic + */ + optional string url = 101; } diff --git a/ts/components/conversation/composition/CompositionBox.tsx b/ts/components/conversation/composition/CompositionBox.tsx index b3dcc7c8d9..b7b78082c9 100644 --- a/ts/components/conversation/composition/CompositionBox.tsx +++ b/ts/components/conversation/composition/CompositionBox.tsx @@ -83,10 +83,10 @@ export interface StagedLinkPreviewData { scaledDown: ProcessedLinkPreviewThumbnailType | null; } -export interface StagedAttachmentType extends AttachmentType { +export type StagedAttachmentType = AttachmentType & { file: File; path?: string; // a bit hacky, but this is the only way to make our sending audio message be playable, this must be used only for those message -} +}; export type SendMessageType = { conversationId: string; diff --git a/ts/components/conversation/message/message-content/MessageContextMenu.tsx b/ts/components/conversation/message/message-content/MessageContextMenu.tsx index ce14864ee8..315621110f 100644 --- a/ts/components/conversation/message/message-content/MessageContextMenu.tsx +++ b/ts/components/conversation/message/message-content/MessageContextMenu.tsx @@ -216,7 +216,10 @@ export const MessageContextMenu = (props: Props) => { }, [isSelectedBlocked, messageId]); const copyText = useCallback(() => { - MessageInteraction.copyBodyToClipboard(text); + const selection = window.getSelection(); + const selectedText = selection?.toString().trim(); + // Note: we want to allow to copy through the "Copy" menu item the currently selected text, if any. + MessageInteraction.copyBodyToClipboard(selectedText || text); }, [text]); const onSelect = useCallback(() => { diff --git a/ts/components/conversation/right-panel/overlay/message-info/OverlayMessageInfo.tsx b/ts/components/conversation/right-panel/overlay/message-info/OverlayMessageInfo.tsx index 2f1dc6ff10..4fb4f74c70 100644 --- a/ts/components/conversation/right-panel/overlay/message-info/OverlayMessageInfo.tsx +++ b/ts/components/conversation/right-panel/overlay/message-info/OverlayMessageInfo.tsx @@ -117,9 +117,12 @@ async function getPropsForMessageInfo( const found = await Data.getMessageById(messageId); const attachmentsWithMediaDetails: Array = []; if (found) { + const attachmentsInMsg = found.get('attachments') || []; + // process attachments so we have the fileSize, url and screenshots for (let i = 0; i < attachments.length; i++) { const props = found.getPropsForAttachment(attachments[i]); + const fsUrl = attachmentsInMsg?.[i].url; if ( props?.contentType && GoogleChrome.isVideoTypeSupported(props?.contentType) && @@ -134,6 +137,7 @@ async function getPropsForMessageInfo( attachmentsWithMediaDetails.push({ ...props, duration, + url: fsUrl, }); } else if (props?.contentType && isAudio(props.contentType) && !props.duration && props.url) { // eslint-disable-next-line no-await-in-loop @@ -145,9 +149,10 @@ async function getPropsForMessageInfo( attachmentsWithMediaDetails.push({ ...props, duration, + url: fsUrl, }); } else if (props) { - attachmentsWithMediaDetails.push(props); + attachmentsWithMediaDetails.push({ ...props, url: fsUrl }); } } diff --git a/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx b/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx index 885520ce3f..f2ce68addf 100644 --- a/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx +++ b/ts/components/conversation/right-panel/overlay/message-info/components/AttachmentInfo.tsx @@ -4,6 +4,8 @@ import { PropsForAttachment } from '../../../../../../state/ducks/conversations' import { Flex } from '../../../../../basic/Flex'; import { tr } from '../../../../../../localization/localeTools'; import { saveLogToDesktop } from '../../../../../../util/logger/renderer_process_logging'; +import { extractDetailsFromUrlFragment } from '../../../../../../session/url'; +import { isDevProd } from '../../../../../../shared/env_vars'; type Props = { attachment: PropsForAttachment; @@ -16,7 +18,7 @@ const StyledLabelContainer = styled(Flex)` } `; -function formatAttachmentUrl(attachment: PropsForAttachment) { +function formatAttachmentUrl(attachment: Pick) { // Note: desktop overwrites the url with the local path once the file is downloaded, // and I think this is how we know the file was downloaded. @@ -28,7 +30,8 @@ function formatAttachmentUrl(attachment: PropsForAttachment) { return tr('attachmentsNa'); } - const fileId = attachment.url.split('/').pop() || ''; + const fileUrl = URL.canParse(attachment.url) && new URL(attachment.url); + const fileId = fileUrl ? fileUrl?.pathname.split('/').pop() || '' : ''; if (!fileId) { return tr('attachmentsNa'); @@ -37,12 +40,23 @@ function formatAttachmentUrl(attachment: PropsForAttachment) { return fileId; } +function extractAttachmentDetails(attachment: Pick) { + const fileUrl = URL.canParse(attachment?.url) && new URL(attachment.url); + return { + deterministicEncryption: + (fileUrl && extractDetailsFromUrlFragment(fileUrl)?.deterministicEncryption) || false, + fsHost: fileUrl ? fileUrl.hostname : tr('attachmentsNa'), + }; +} + export const AttachmentInfo = (props: Props) => { const { attachment } = props; // NOTE the attachment.url will be an empty string if the attachment is broken const hasError = attachment.error || attachment.url === ''; + const { deterministicEncryption, fsHost } = extractAttachmentDetails(attachment); + return ( @@ -78,6 +92,15 @@ export const AttachmentInfo = (props: Props) => { }} /> ) : null} + {isDevProd() ? ( + <> + + + + ) : null} ); diff --git a/ts/components/dialog/EditProfilePictureModal.tsx b/ts/components/dialog/EditProfilePictureModal.tsx index 02146e0313..a649833778 100644 --- a/ts/components/dialog/EditProfilePictureModal.tsx +++ b/ts/components/dialog/EditProfilePictureModal.tsx @@ -141,6 +141,7 @@ export const EditProfilePictureModal = ({ conversationId }: EditProfilePictureMo const ourAvatarIsUploading = useOurAvatarIsUploading(); const ourAvatarUploadFailed = useOurAvatarUploadFailed(); const sogsAvatarIsUploading = useAvatarOfRoomIsUploading(conversationId); + const [isProcessing, setIsProcessing] = useState(false); const [newAvatarObjectUrl, setNewAvatarObjectUrl] = useState(avatarPath); const [isNewAvatarAnimated, setIsNewAvatarAnimated] = useState(false); @@ -169,7 +170,7 @@ export const EditProfilePictureModal = ({ conversationId }: EditProfilePictureMo const isPublic = useIsPublic(conversationId); const handleAvatarClick = async () => { - const res = await pickFileForAvatar(); + const res = await pickFileForAvatar(setIsProcessing); if (!res) { window.log.error('Failed to pick avatar'); @@ -210,7 +211,8 @@ export const EditProfilePictureModal = ({ conversationId }: EditProfilePictureMo await triggerUploadProfileAvatar(newAvatarObjectUrl, conversationId, dispatch); }; - const loading = ourAvatarIsUploading || groupAvatarChangePending || sogsAvatarIsUploading; + const loading = + ourAvatarIsUploading || groupAvatarChangePending || sogsAvatarIsUploading || isProcessing; const newAvatarLoaded = newAvatarObjectUrl !== avatarPath; @@ -328,7 +330,7 @@ export const EditProfilePictureModal = ({ conversationId }: EditProfilePictureMo {loading ? ( <> - {isMe ? : null} + {isMe && !isProcessing ? : null} ) : ( diff --git a/ts/components/leftpane/ActionsPanel.tsx b/ts/components/leftpane/ActionsPanel.tsx index ca66eb466b..883be17efa 100644 --- a/ts/components/leftpane/ActionsPanel.tsx +++ b/ts/components/leftpane/ActionsPanel.tsx @@ -22,7 +22,6 @@ import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttac import { DURATION } from '../../session/constants'; -import { reuploadCurrentAvatarUs } from '../../interactions/avatar-interactions/nts-avatar-interactions'; import { onionPathModal, updateDebugMenuModal, @@ -52,14 +51,13 @@ import { useDebugMode } from '../../state/selectors/debug'; import { networkDataActions } from '../../state/ducks/networkData'; import { LUCIDE_ICONS_UNICODE } from '../icon/lucide'; import { AvatarMigrate } from '../../session/utils/job_runners/jobs/AvatarMigrateJob'; -import { NetworkTime } from '../../util/NetworkTime'; import { Storage } from '../../util/storage'; -import { getFileInfoFromFileServer } from '../../session/apis/file_server_api/FileServerApi'; import { themesArray } from '../../themes/constants/colors'; import { isDebugMode, isDevProd } from '../../shared/env_vars'; import { GearAvatarButton } from '../buttons/avatar/GearAvatarButton'; import { useZoomShortcuts } from '../../hooks/useZoomingShortcut'; import { OnionStatusLight } from '../dialog/OnionStatusPathDialog'; +import { AvatarReupload } from '../../session/utils/job_runners/jobs/AvatarReuploadJob'; const StyledContainerAvatar = styled.div` padding: var(--margins-lg); @@ -98,17 +96,6 @@ const triggerSyncIfNeeded = async () => { } }; -const triggerAvatarReUploadIfNeeded = async () => { - const lastAvatarUploadExpiryMs = - (await Data.getItemById(SettingsKey.ntsAvatarExpiryMs))?.value || Number.MAX_SAFE_INTEGER; - - if (NetworkTime.now() > lastAvatarUploadExpiryMs) { - window.log.info('Reuploading avatar...'); - // reupload the avatar - await reuploadCurrentAvatarUs(); - } -}; - /** * This function is called only once: on app startup with a logged in user */ @@ -127,9 +114,8 @@ const doAppStartUp = async () => { }); // refresh our swarm on start to speed up the first message fetching event void Data.cleanupOrphanedAttachments(); - // TODOLATER make this a job of the JobRunner // Note: do not make this a debounce call (as for some reason it doesn't work with promises) - void triggerAvatarReUploadIfNeeded(); + await AvatarReupload.addAvatarReuploadJob(); /* Postpone a little bit of the polling of sogs messages to let the swarm messages come in first. */ global.setTimeout(() => { @@ -147,17 +133,6 @@ const doAppStartUp = async () => { // Schedule a confSyncJob in some time to let anything incoming from the network be applied and see if there is a push needed // Note: this also starts periodic jobs, so we don't need to keep doing it await UserSync.queueNewJobIfNeeded(); - - // on app startup, check that the avatar expiry on the file server - const avatarPointer = ConvoHub.use() - .get(UserUtils.getOurPubKeyStrFromCache()) - .getAvatarPointer(); - if (avatarPointer) { - const details = await getFileInfoFromFileServer(avatarPointer); - if (details?.expiryMs) { - await Storage.put(SettingsKey.ntsAvatarExpiryMs, details.expiryMs); - } - } }, 20000); global.setTimeout(() => { @@ -283,8 +258,7 @@ export const ActionsPanel = () => { if (!ourPrimaryConversation) { return; } - // this won't be run every days, but if the app stays open for more than 10 days - void triggerAvatarReUploadIfNeeded(); + void AvatarReupload.addAvatarReuploadJob(); }, window.sessionFeatureFlags.fsTTL30s ? DURATION.SECONDS * 1 : DURATION.DAYS * 1 ); diff --git a/ts/data/settings-key.ts b/ts/data/settings-key.ts index ee5707eb85..57644e5de4 100644 --- a/ts/data/settings-key.ts +++ b/ts/data/settings-key.ts @@ -11,7 +11,6 @@ const settingsOpengroupPruning = 'prune-setting'; const settingsNotification = 'notification-setting'; const settingsAudioNotification = 'audio-notification-setting'; const hasSyncedInitialConfigurationItem = 'hasSyncedInitialConfigurationItem'; -const ntsAvatarExpiryMs = 'ntsAvatarExpiryMs'; const hasLinkPreviewPopupBeenDisplayed = 'hasLinkPreviewPopupBeenDisplayed'; const hasFollowSystemThemeEnabled = 'hasFollowSystemThemeEnabled'; const hideRecoveryPassword = 'hideRecoveryPassword'; @@ -44,7 +43,6 @@ export const SettingsKey = { settingsNotification, settingsAudioNotification, hasSyncedInitialConfigurationItem, - ntsAvatarExpiryMs, hasLinkPreviewPopupBeenDisplayed, latestUserProfileEnvelopeTimestamp, latestUserGroupEnvelopeTimestamp, diff --git a/ts/interactions/avatar-interactions/nts-avatar-interactions.ts b/ts/interactions/avatar-interactions/nts-avatar-interactions.ts index 3ae3182179..707888e668 100644 --- a/ts/interactions/avatar-interactions/nts-avatar-interactions.ts +++ b/ts/interactions/avatar-interactions/nts-avatar-interactions.ts @@ -1,99 +1,68 @@ -import { isEmpty } from 'lodash'; -import { SettingsKey } from '../../data/settings-key'; +import { randombytes_buf } from 'libsodium-wrappers-sumo'; + import { uploadFileToFsWithOnionV4 } from '../../session/apis/file_server_api/FileServerApi'; -import { ConvoHub } from '../../session/conversations'; -import { DecryptedAttachmentsManager } from '../../session/crypto/DecryptedAttachmentsManager'; -import { UserUtils } from '../../session/utils'; -import { fromHexToArray } from '../../session/utils/String'; -import { urlToBlob } from '../../types/attachments/VisualAttachment'; import { processNewAttachment } from '../../types/MessageAttachment'; -import { IMAGE_JPEG } from '../../types/MIME'; import { encryptProfile } from '../../util/crypto/profileEncrypter'; -import { Storage } from '../../util/storage'; import type { ConversationModel } from '../../models/conversation'; import { processAvatarData } from '../../util/avatar/processAvatarData'; -import { UserConfigWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; - -/** - * This function can be used for reupload our avatar to the file server. - * It will reuse the same profileKey and avatarContent if we have some, or do nothing if one of those is missing. - */ -export async function reuploadCurrentAvatarUs() { - const ourConvo = ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache()); - if (!ourConvo) { - window.log.warn('ourConvo not found... This is not a valid case'); - return null; - } - - // this is a reupload. no need to generate a new profileKey - const ourConvoProfileKey = - ConvoHub.use().get(UserUtils.getOurPubKeyStrFromCache())?.getProfileKey() || null; - - const profileKey = ourConvoProfileKey ? fromHexToArray(ourConvoProfileKey) : null; - if (!profileKey || isEmpty(profileKey)) { - window.log.info('reuploadCurrentAvatarUs: our profileKey empty'); - - return null; - } - // Note: we do want to grab the current non-static avatar path here - // to reupload it, no matter if we are a pro user or not. - const currentNonStaticAvatarPath = ourConvo.getAvatarInProfilePath(); - - if (!currentNonStaticAvatarPath) { - window.log.info('No attachment currently set for our convo.. Nothing to do.'); - return null; - } - - const decryptedAvatarUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( - currentNonStaticAvatarPath, - IMAGE_JPEG, - true - ); - - if (!decryptedAvatarUrl) { - window.log.warn('Could not decrypt avatar stored locally..'); - return null; - } - const blob = await urlToBlob(decryptedAvatarUrl); - - const decryptedAvatarData = await blob.arrayBuffer(); - - return uploadAndSetOurAvatarShared({ - decryptedAvatarData, - ourConvo, - profileKey, - context: 'reuploadAvatar', - }); -} +import { + MultiEncryptWrapperActions, + UserConfigWrapperActions, +} from '../../webworker/workers/browser/libsession_worker_interface'; +import { UserUtils } from '../../session/utils'; +import { fromHexToArray } from '../../session/utils/String'; export async function uploadAndSetOurAvatarShared({ decryptedAvatarData, ourConvo, - profileKey, context, }: { ourConvo: ConversationModel; decryptedAvatarData: ArrayBuffer; - profileKey: Uint8Array; context: 'uploadNewAvatar' | 'reuploadAvatar'; }) { if (!decryptedAvatarData?.byteLength) { window.log.warn('uploadAndSetOurAvatarShared: avatar content is empty'); return null; } + // Note: we want to encrypt & upload the **processed** avatar + // below (resized & converted), not the original one. + const { avatarFallback, mainAvatarDetails } = await processAvatarData(decryptedAvatarData, true); + + let encryptedData: ArrayBuffer; + let encryptionKey: Uint8Array; + const deterministicEncryption = window.sessionFeatureFlags?.useDeterministicEncryption; + if (deterministicEncryption) { + const encryptedContent = await MultiEncryptWrapperActions.attachmentEncrypt({ + allowLarge: false, + seed: await UserUtils.getUserEd25519Seed(), + data: new Uint8Array(mainAvatarDetails.outputBuffer), + domain: 'profilePic', + }); + encryptedData = encryptedContent.encryptedData; + encryptionKey = encryptedContent.encryptionKey; + } else { + // if this is a reupload, reuse the current profile key. Otherwise generate a new one + const existingProfileKeyHex = ourConvo.getProfileKeyHex(); + const profileKey = + context === 'reuploadAvatar' && existingProfileKeyHex + ? fromHexToArray(existingProfileKeyHex) + : randombytes_buf(32); + encryptedData = await encryptProfile(mainAvatarDetails.outputBuffer, profileKey); + encryptionKey = profileKey; + } - const encryptedData = await encryptProfile(decryptedAvatarData, profileKey); - - const avatarPointer = await uploadFileToFsWithOnionV4(encryptedData); + const avatarPointer = await uploadFileToFsWithOnionV4(encryptedData, deterministicEncryption); if (!avatarPointer) { window.log.warn('failed to upload avatar to file server'); return null; } - const { fileUrl, expiresMs } = avatarPointer; + // Note: we don't care about the expiry of the file anymore. + // This is because we renew the expiry of the file itself, and only when that fails we reupload the avatar content. + const { fileUrl } = avatarPointer; // Note: processing the avatar here doesn't change the buffer (unless the first one was uploaded as an image too big for an avatar.) // so, once we have deterministic encryption of avatars, the uploaded should always have the same hash - const { avatarFallback, mainAvatarDetails } = await processAvatarData(decryptedAvatarData); // this encrypts and save the new avatar and returns a new attachment path const savedMainAvatar = await processNewAttachment({ @@ -106,7 +75,7 @@ export async function uploadAndSetOurAvatarShared({ ? await processNewAttachment({ isRaw: true, data: avatarFallback.outputBuffer, - contentType: avatarFallback.contentType, // contentType is mostly used to generate previews and screenshot. We do not care for those in this case. + contentType: avatarFallback.contentType, }) : null; @@ -118,23 +87,22 @@ export async function uploadAndSetOurAvatarShared({ displayName: null, avatarPointer: fileUrl, type: 'setAvatarDownloadedPrivate', - profileKey, + profileKey: encryptionKey, }); - await Storage.put(SettingsKey.ntsAvatarExpiryMs, expiresMs); if (context === 'uploadNewAvatar') { await UserConfigWrapperActions.setNewProfilePic({ - key: profileKey, + key: encryptionKey, url: fileUrl, }); } else if (context === 'reuploadAvatar') { await UserConfigWrapperActions.setReuploadProfilePic({ - key: profileKey, + key: encryptionKey, url: fileUrl, }); } return { avatarPointer: ourConvo.getAvatarPointer(), - profileKey: ourConvo.getProfileKey(), + profileKey: ourConvo.getProfileKeyHex(), }; } diff --git a/ts/mains/main_node.ts b/ts/mains/main_node.ts index 699912e511..5e7ae49a79 100644 --- a/ts/mains/main_node.ts +++ b/ts/mains/main_node.ts @@ -194,7 +194,7 @@ import { setLatestRelease } from '../node/latest_desktop_release'; import { isDevProd, isTestIntegration } from '../shared/env_vars'; import { classicDark } from '../themes'; -import { isSessionLocaleSet, getCrowdinLocale } from '../util/i18n/shared'; +import { isSessionLocaleSet, getCrowdinLocale, keepFullLocalePart } from '../util/i18n/shared'; import { loadLocalizedDictionary } from '../node/locale'; import { simpleDictionaryNoArgs } from '../localization/locales'; import LIBSESSION_CONSTANTS from '../session/utils/libsession/libsession_constants'; @@ -779,7 +779,7 @@ app.on('ready', async () => { ); if (!isSessionLocaleSet()) { - const appLocale = process.env.LANGUAGE || app.getLocale() || 'en'; + const appLocale = keepFullLocalePart(process.env.LANGUAGE || app.getLocale() || 'en'); const loadedLocale = loadLocalizedDictionary({ appLocale }); console.log(`appLocale is ${appLocale}`); console.log(`crowdin locale is ${loadedLocale.crowdinLocale}`); diff --git a/ts/models/conversation.ts b/ts/models/conversation.ts index 942d563faf..a6c80b278c 100644 --- a/ts/models/conversation.ts +++ b/ts/models/conversation.ts @@ -215,16 +215,9 @@ type SetSessionProfileReturn = { * We need to do some extra processing for private actions, as they have a updatedAtSeconds field. */ function isSetProfileWithUpdatedAtSeconds( - _action: T -): _action is Extract { - /** - * We temporarily want to not write the profileUpdatedAtSeconds as we want this behavior to - * be used only once a user has updated their profile picture (and resized it). - */ - window.log.debug('isSetProfileWithUpdatedAtSeconds forced to return false for now'); - return false; - - // return 'profileUpdatedAtSeconds' in action; + action: T +): action is Extract { + return 'profileUpdatedAtSeconds' in action; } /** @@ -1545,7 +1538,7 @@ export class ConversationModel extends Model { this.getAvatarInProfilePath() || this.getFallbackAvatarInProfilePath() || this.getAvatarPointer() || - this.getProfileKey() + this.getProfileKeyHex() ) { this.set({ avatarInProfile: undefined, @@ -1573,7 +1566,7 @@ export class ConversationModel extends Model { : to_hex(newProfile.profileKey); const existingAvatarPointer = this.getAvatarPointer(); - const existingProfileKeyHex = this.getProfileKey(); + const existingProfileKeyHex = this.getProfileKeyHex(); const hasAvatarInNewProfile = !!newProfile.avatarPointer || !!newProfileKeyHex; // if no changes are needed, return early if ( @@ -1603,7 +1596,7 @@ export class ConversationModel extends Model { : to_hex(newProfile.profileKey); const existingAvatarPointer = this.getAvatarPointer(); - const existingProfileKeyHex = this.getProfileKey(); + const existingProfileKeyHex = this.getProfileKeyHex(); const originalAvatar = this.getAvatarInProfilePath(); const originalFallbackAvatar = this.getFallbackAvatarInProfilePath(); @@ -1730,7 +1723,7 @@ export class ConversationModel extends Model { * Returns the profile key attributes of this instance. * If the attribute is unset, empty, or not a string, returns `undefined`. */ - public getProfileKey(): string | undefined { + public getProfileKeyHex(): string | undefined { const profileKey = this.get('profileKey'); if (!profileKey || !isString(profileKey)) { return undefined; @@ -1768,13 +1761,12 @@ export class ConversationModel extends Model { } const avatarPointer = this.getAvatarPointer() ?? null; const displayName = this.getRealSessionUsername() ?? ''; - const profileKey = this.getProfileKey() ?? null; + const profileKeyHex = this.getProfileKeyHex() ?? null; const updatedAtSeconds = this.getProfileUpdatedSeconds(); return new OutgoingUserProfile({ - avatarPointer, + profilePic: { url: avatarPointer, key: profileKeyHex ? from_hex(profileKeyHex) : null }, displayName, - profileKey, updatedAtSeconds, }); } @@ -1857,6 +1849,10 @@ export class ConversationModel extends Model { priority, }); + if (this.isMe()) { + await UserConfigWrapperActions.setPriority(priority); + } + if (shouldCommit) { await this.commit(); } diff --git a/ts/models/message.ts b/ts/models/message.ts index 5837dc7e7a..c424462926 100644 --- a/ts/models/message.ts +++ b/ts/models/message.ts @@ -13,7 +13,6 @@ import { attachmentIdAsStrFromUrl, uploadAttachmentsToFileServer, uploadLinkPreviewToFileServer, - uploadQuoteThumbnailsToFileServer, } from '../session/utils'; import { MessageAttributes, @@ -39,11 +38,7 @@ import { VisibleMessage, VisibleMessageParams, } from '../session/messages/outgoing/visibleMessage/VisibleMessage'; -import { - uploadAttachmentsV3, - uploadLinkPreviewsV3, - uploadQuoteThumbnailsV3, -} from '../session/utils/AttachmentsV2'; +import { uploadAttachmentsV3, uploadLinkPreviewsV3 } from '../session/utils/AttachmentsV2'; import { isUsFromCache } from '../session/utils/User'; import { buildSyncMessage } from '../session/utils/sync/syncUtils'; import { @@ -69,7 +64,6 @@ import { getAbsoluteAttachmentPath, loadAttachmentData, loadPreviewData, - loadQuoteData, } from '../types/MessageAttachment'; import { ReactionList } from '../types/Reaction'; import { getAttachmentMetadata } from '../types/message/initializeAttachmentMetadata'; @@ -787,7 +781,7 @@ export class MessageModel extends Model { (this.get('attachments') || []).map(loadAttachmentData) ); const body = this.get('body'); - const quoteWithData = await loadQuoteData(this.get('quote')); + const previewWithData = await loadPreviewData(this.get('preview')); const { hasAttachments, hasVisualMediaAttachments, hasFileAttachments } = @@ -799,7 +793,6 @@ export class MessageModel extends Model { let attachmentPromise; let linkPreviewPromise; - let quotePromise; const fileIdsToLink: Array = []; // we can only send a single preview @@ -810,33 +803,18 @@ export class MessageModel extends Model { const openGroupV2 = conversation.toOpenGroupV2(); attachmentPromise = uploadAttachmentsV3(finalAttachments, openGroupV2); linkPreviewPromise = uploadLinkPreviewsV3(firstPreviewWithData, openGroupV2); - quotePromise = uploadQuoteThumbnailsV3(openGroupV2, quoteWithData); } else { // if that's not an sogs, the file is uploaded to the file server instead attachmentPromise = uploadAttachmentsToFileServer(finalAttachments); linkPreviewPromise = uploadLinkPreviewToFileServer(firstPreviewWithData); - quotePromise = uploadQuoteThumbnailsToFileServer(quoteWithData); } - const [attachments, preview, quote] = await Promise.all([ - attachmentPromise, - linkPreviewPromise, - quotePromise, - ]); + const [attachments, preview] = await Promise.all([attachmentPromise, linkPreviewPromise]); fileIdsToLink.push(...attachments.map(m => attachmentIdAsStrFromUrl(m.url))); if (preview && preview.image?.url) { fileIdsToLink.push(attachmentIdAsStrFromUrl(preview.image.url)); } - if (quote && quote.attachments?.length && quote.attachments[0].thumbnail) { - // typing for all of this Attachment + quote + preview + send or unsend is pretty bad - const firstQuoteAttachmentUrl = - 'url' in quote.attachments[0].thumbnail ? quote.attachments[0].thumbnail.url : undefined; - if (firstQuoteAttachmentUrl && attachmentIdAsStrFromUrl(firstQuoteAttachmentUrl)) { - fileIdsToLink.push(attachmentIdAsStrFromUrl(firstQuoteAttachmentUrl)); - } - } - const isFirstAttachmentVoiceMessage = finalAttachments?.[0]?.isVoiceMessage; if (isFirstAttachmentVoiceMessage) { attachments[0].flags = SignalService.AttachmentPointer.Flags.VOICE_MESSAGE; @@ -845,13 +823,21 @@ export class MessageModel extends Model { window.log.info( `Upload of message data for message ${this.idForLogging()} is finished in ${ Date.now() - start - }ms.` + }ms. Attachments: ${attachments.map(m => m.url)}` ); + + this.setAttachments( + this.getAttachments()?.map((a: any, index: number) => ({ ...a, url: attachments[index].url })) + ); + // Note: we don't care about the fileUrl/fileId of previews, only of attachments as they are displayed in the message info + + await this.commit(); + return { body, attachments, preview, - quote, + quote: this.get('quote'), fileIdsToLink: uniq(fileIdsToLink), }; } diff --git a/ts/node/locale.ts b/ts/node/locale.ts index 2375fb758e..6372cf751f 100644 --- a/ts/node/locale.ts +++ b/ts/node/locale.ts @@ -1,8 +1,9 @@ import { setupI18n } from '../util/i18n/i18n'; import { CrowdinLocale, isCrowdinLocale } from '../localization/constants'; +import { keepFullLocalePart } from '../util/i18n/shared'; export function normalizeLocaleName(locale: string) { - const dashedLocale = locale.replaceAll('_', '-'); + const dashedLocale = keepFullLocalePart(locale).replaceAll('_', '-'); // Note: this is a pain, but we somehow needs to keep in sync this logic and the LOCALE_PATH_MAPPING from // https://github.com/session-foundation/session-shared-scripts/blob/main/crowdin/generate_desktop_strings.py diff --git a/ts/node/spell_check.ts b/ts/node/spell_check.ts index 2e1733ba20..9c50ec35fc 100644 --- a/ts/node/spell_check.ts +++ b/ts/node/spell_check.ts @@ -1,6 +1,7 @@ import { type BrowserWindow, Menu } from 'electron'; import { sync as osLocaleSync } from 'os-locale'; import { tr } from '../localization/localeTools'; +import { keepFullLocalePart } from '../util/i18n/shared'; export const setup = (browserWindow: BrowserWindow) => { const { session } = browserWindow.webContents; @@ -8,9 +9,7 @@ export const setup = (browserWindow: BrowserWindow) => { // to support a broader list of spell checks than what the app is localised for. // For instance: en_AU is not a supported language on crowdin, but we still want the user to // - if they have the dictionary installed for it - we should be able to spell check "esky", "arvo" or "bogan" - const userLocale = process.env.LANGUAGE - ? process.env.LANGUAGE - : osLocaleSync().replace(/_/g, '-'); + const userLocale = keepFullLocalePart(process.env.LANGUAGE || osLocaleSync().replace(/_/g, '-')); const userLocales = [userLocale, userLocale.split('-')[0], userLocale.split('_')[0]]; const available = session.availableSpellCheckerLanguages; diff --git a/ts/receiver/attachments.ts b/ts/receiver/attachments.ts index dc8fe383cb..2b5a479b59 100644 --- a/ts/receiver/attachments.ts +++ b/ts/receiver/attachments.ts @@ -1,4 +1,4 @@ -import { omit, startsWith } from 'lodash'; +import { omit } from 'lodash'; import { MessageModel } from '../models/message'; import { Data } from '../data/data'; @@ -10,12 +10,14 @@ import { callUtilsWorker } from '../webworker/workers/browser/util_worker_interf import { sogsV3FetchFileByFileID } from '../session/apis/open_group_api/sogsv3/sogsV3FetchFile'; import { OpenGroupData } from '../data/opengroups'; import { OpenGroupRequestCommonType } from '../data/types'; -import { - downloadFileFromFileServer, - fileServerURL, -} from '../session/apis/file_server_api/FileServerApi'; +import { downloadFileFromFileServer } from '../session/apis/file_server_api/FileServerApi'; +import { FileFromFileServerDetails } from '../session/apis/file_server_api/types'; +import { MultiEncryptWrapperActions } from '../webworker/workers/browser/libsession_worker_interface'; -export async function downloadAttachment(attachment: { +/** + * Note: the url must have the serverPubkey as a query parameter + */ +export async function downloadAttachmentFs(attachment: { url: string; id?: string; isRaw?: boolean; @@ -23,24 +25,16 @@ export async function downloadAttachment(attachment: { digest?: string; size?: number; }) { - const asURL = new URL(attachment.url); - const serverUrl = asURL.origin; - - // is it an attachment hosted on the file server - const defaultFileServer = startsWith(serverUrl, fileServerURL); + const toDownload = new FileFromFileServerDetails(attachment.url); let res: ArrayBuffer | null = null; - // try to get the fileId from the end of the URL - const attachmentId = attachmentIdAsStrFromUrl(attachment.url); - if (!defaultFileServer) { - window.log.warn( - `downloadAttachment attachment is neither opengroup attachment nor fileserver... Dropping it ${asURL.href}` - ); - throw new Error('Attachment url is not opengroupv2 nor fileserver. Unsupported'); - } - window?.log?.info('Download v2 file server attachment', attachmentId); - res = await downloadFileFromFileServer(attachmentId); + window?.log?.info( + 'Download v2 file server attachment', + toDownload.fullUrl.toString(), + toDownload.serverEd25519Pk + ); + res = await downloadFileFromFileServer(toDownload); if (!res?.byteLength) { window?.log?.error('Failed to download attachment. Length is 0'); @@ -52,18 +46,36 @@ export async function downloadAttachment(attachment: { if (!attachment.isRaw) { const { key, digest, size } = attachment; - if (!key || !digest) { + // Note: if key is set but digest is not, it means we have a libsession deterministic encryption + if (!key) { throw new Error('Attachment is not raw but we do not have a key to decode it'); } + if (!size) { throw new Error('Attachment expected size is 0'); } - const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; - const digestBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', digest)) as ArrayBuffer; + if (!toDownload.deterministicEncryption) { + const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; + const digestBuffer = (await callUtilsWorker( + 'fromBase64ToArrayBuffer', + digest + )) as ArrayBuffer; + + data = await decryptAttachment(data, keyBuffer, digestBuffer); + } else { + window.log.debug( + `${attachment.url} attachment has deterministicEncryption flag set, assuming it is deterministic encryption` + ); - data = await decryptAttachment(data, keyBuffer, digestBuffer); + const keyBuffer = (await callUtilsWorker('fromBase64ToArrayBuffer', key)) as ArrayBuffer; + const decrypted = await MultiEncryptWrapperActions.attachmentDecrypt({ + encryptedData: new Uint8Array(data), + decryptionKey: new Uint8Array(keyBuffer), + }); + data = decrypted.decryptedData.buffer; + } if (size !== data.byteLength) { // we might have padding, check that all the remaining bytes are padding bytes // otherwise we have an error. @@ -148,7 +160,6 @@ async function processNormalAttachments( convo: ConversationModel ): Promise { const isOpenGroupV2 = convo.isOpenGroupV2(); - if (message.isTrustedForAttachmentDownload()) { const openGroupV2Details = (isOpenGroupV2 && convo.toOpenGroupV2()) || undefined; const attachments = await Promise.all( @@ -202,48 +213,6 @@ async function processPreviews(message: MessageModel, convo: ConversationModel): return addedCount; } -async function processQuoteAttachments( - message: MessageModel, - convo: ConversationModel -): Promise { - let addedCount = 0; - - const quote = message.get('quote'); - - if (!quote || !quote.attachments || !quote.attachments.length) { - return 0; - } - const isOpenGroupV2 = convo.isOpenGroupV2(); - const openGroupV2Details = (isOpenGroupV2 && convo.toOpenGroupV2()) || undefined; - - for (let index = 0; index < quote.attachments.length; index++) { - // If we already have a path, then we copied this image from the quoted - // message and we don't need to download the attachment. - const attachment = quote.attachments[index]; - - if (!attachment.thumbnail || attachment.thumbnail.path) { - continue; - } - - addedCount += 1; - - // eslint-disable-next-line no-await-in-loop - const thumbnail = await AttachmentDownloads.addJob(attachment.thumbnail, { - messageId: message.id, - type: 'quote', - index, - isOpenGroupV2, - openGroupV2Details, - }); - - quote.attachments[index] = { ...attachment, thumbnail }; - } - - message.setQuote(quote); - - return addedCount; -} - export async function queueAttachmentDownloads( message: MessageModel, conversation: ConversationModel @@ -251,11 +220,8 @@ export async function queueAttachmentDownloads( let count = 0; count += await processNormalAttachments(message, message.get('attachments') || [], conversation); - count += await processPreviews(message, conversation); - count += await processQuoteAttachments(message, conversation); - if (count > 0) { await Data.saveMessage(message.cloneAttributes()); } diff --git a/ts/receiver/contentMessage.ts b/ts/receiver/contentMessage.ts index eb0382ca7c..876bd03b9f 100644 --- a/ts/receiver/contentMessage.ts +++ b/ts/receiver/contentMessage.ts @@ -744,7 +744,7 @@ async function handleMessageRequestResponse( const srcProfileDetails = srcConvo.getPrivateProfileDetails(); const srcAvatarPath = srcConvo.getAvatarInProfilePath(); const srcFallbackAvatarPath = srcConvo.getFallbackAvatarInProfilePath(); - const srcProfilePic = srcProfileDetails.toProfilePicture(); + const srcProfilePic = srcProfileDetails.toHexProfilePicture(); const avatarChanges = srcAvatarPath && srcFallbackAvatarPath && srcProfilePic.url && srcProfilePic.key diff --git a/ts/receiver/dataMessage.ts b/ts/receiver/dataMessage.ts index d84acd2fa1..ddf68de507 100644 --- a/ts/receiver/dataMessage.ts +++ b/ts/receiver/dataMessage.ts @@ -69,19 +69,6 @@ function cleanAttachments(decryptedDataMessage: SignalService.DataMessage) { if (quote.id) { quote.id = toNumber(quote.id); } - - quote.attachments = (quote.attachments || []).map((item: any) => { - const { thumbnail } = item; - - if (!thumbnail || thumbnail.length === 0) { - return item; - } - - return { - ...item, - thumbnail: cleanAttachment(item.thumbnail), - }; - }); } } diff --git a/ts/receiver/queuedJob.ts b/ts/receiver/queuedJob.ts index 4f43c89989..d9e981a843 100644 --- a/ts/receiver/queuedJob.ts +++ b/ts/receiver/queuedJob.ts @@ -22,17 +22,11 @@ import { import { showMessageRequestBannerOutsideRedux } from '../state/ducks/userConfig'; import { selectMemberInviteSentOutsideRedux } from '../state/selectors/groups'; import { getHideMessageRequestBannerOutsideRedux } from '../state/selectors/userConfig'; -import { GoogleChrome } from '../util'; import { LinkPreviews } from '../util/linkPreviews'; import { GroupV2Receiver } from './groupv2/handleGroupV2Message'; import { Constants } from '../session'; import { Timestamp } from '../types/timestamp/timestamp'; -function contentTypeSupported(type: string): boolean { - const Chrome = GoogleChrome; - return Chrome.isImageTypeSupported(type) || Chrome.isVideoTypeSupported(type); -} - function isMessageModel( msg: MessageModel | MessageModelPropsWithoutConvoProps ): msg is MessageModel { @@ -50,18 +44,16 @@ async function copyFromQuotedMessage( if (!quote) { return; } - const { attachments, id: quoteId, author } = quote; + const { id: quoteId, author } = quote; const quoteLocal: Quote = { - attachments: attachments || null, + attachments: null, author, id: _.toNumber(quoteId), text: null, referencedMessageNotFound: false, }; - const firstAttachment = attachments?.[0] || undefined; - const id = _.toNumber(quoteId); // First we try to look for the quote in memory @@ -99,11 +91,6 @@ async function copyFromQuotedMessage( window?.log?.info(`Found quoted message id: ${id}`); quoteLocal.referencedMessageNotFound = false; - // NOTE we send the entire body to be consistent with the other platforms - quoteLocal.text = - (isMessageModel(quotedMessage) - ? quotedMessage.get('body') - : quotedMessage.propsForMessage.text) || ''; if (isMessageModel(quotedMessage)) { window.inboxStore?.dispatch(pushQuotedMessageDetails(quotedMessage.getMessageModelProps())); @@ -111,52 +98,6 @@ async function copyFromQuotedMessage( window.inboxStore?.dispatch(pushQuotedMessageDetails(quotedMessage)); } - // no attachments, just save the quote with the body - if ( - !firstAttachment || - !firstAttachment.contentType || - !contentTypeSupported(firstAttachment.contentType) - ) { - msg.setQuote(quoteLocal); - return; - } - - firstAttachment.thumbnail = null; - - const queryAttachments = - (isMessageModel(quotedMessage) - ? quotedMessage.get('attachments') - : quotedMessage.propsForMessage.attachments) || []; - - if (queryAttachments.length > 0) { - const queryFirst = queryAttachments[0]; - const { thumbnail } = queryFirst; - - if (thumbnail && thumbnail.path) { - firstAttachment.thumbnail = { - ...thumbnail, - copied: true, - }; - } - } - - const queryPreview = - (isMessageModel(quotedMessage) - ? quotedMessage.get('preview') - : quotedMessage.propsForMessage.previews) || []; - if (queryPreview.length > 0) { - const queryFirst = queryPreview[0]; - const { image } = queryFirst; - - if (image && image.path) { - firstAttachment.thumbnail = { - ...image, - copied: true, - }; - } - } - quoteLocal.attachments = [firstAttachment]; - msg.setQuote(quoteLocal); } @@ -166,9 +107,12 @@ async function copyFromQuotedMessage( function handleLinkPreviews(messageBody: string, messagePreview: any, message: MessageModel) { const urls = LinkPreviews.findLinks(messageBody); const incomingPreview = messagePreview || []; - const preview = incomingPreview.filter( - (item: any) => (item.image || item.title) && urls.includes(item.url) - ); + const preview = incomingPreview + .filter((item: any) => (item.image || item.title) && urls.includes(item.url)) + .map((p: any) => ({ + ...p, + pending: true, + })); if (preview.length < incomingPreview.length) { window?.log?.info( `${message.idForLogging()}: Eliminated ${ @@ -307,7 +251,10 @@ async function handleRegularMessage( message.set({ // quote: rawDataMessage.quote, // do not do this copy here, it must be done only in copyFromQuotedMessage() - attachments: rawDataMessage.attachments?.map(m => ({ ...m, pending: true })), + attachments: rawDataMessage.attachments?.map(m => ({ + ...m, + pending: true, + })), body, conversationId: conversation.id, messageHash, diff --git a/ts/receiver/receiver.ts b/ts/receiver/receiver.ts index 473ee5374c..ba5a7e6df3 100644 --- a/ts/receiver/receiver.ts +++ b/ts/receiver/receiver.ts @@ -20,8 +20,6 @@ import { createTaskWithTimeout } from '../session/utils/TaskWithTimeout'; import { UnprocessedParameter } from '../types/sqlSharedTypes'; import { getEnvelopeId } from './common'; -export { downloadAttachment } from './attachments'; - const incomingMessagePromises: Array> = []; export async function handleSwarmContentDecryptedWithTimeout({ diff --git a/ts/session/apis/file_server_api/FileServerApi.ts b/ts/session/apis/file_server_api/FileServerApi.ts index b5bb3c9cdb..ac51513a7f 100644 --- a/ts/session/apis/file_server_api/FileServerApi.ts +++ b/ts/session/apis/file_server_api/FileServerApi.ts @@ -1,5 +1,5 @@ import AbortController from 'abort-controller'; -import { isEmpty, isFinite, isNumber, isString } from 'lodash'; +import { isEmpty, isFinite, isNumber, isString, toNumber } from 'lodash'; import { BlindingActions } from '../../../webworker/workers/browser/libsession_worker_interface'; import { OnionSending } from '../../onions/onionSend'; @@ -13,47 +13,49 @@ import { DURATION } from '../../constants'; import { isReleaseChannel, type ReleaseChannels } from '../../../updater/types'; import { Storage } from '../../../util/storage'; import { OnionV4 } from '../../onions/onionv4'; -import { SERVER_HOSTS } from '..'; +import { FileFromFileServerDetails } from './types'; +import { queryParamDeterministicEncryption, queryParamServerEd25519Pubkey } from '../../url'; +import { FS, type FILE_SERVER_TARGET_TYPE } from './FileServerTarget'; -export const fileServerURL = `http://${SERVER_HOSTS.FILE_SERVER}`; - -export const fileServerPubKey = 'da21e1d886c6fbaea313f75298bd64aab03a97ce985b46bb2dad9f2089c8ee59'; const RELEASE_VERSION_ENDPOINT = '/session_version'; +const FILE_ENDPOINT = '/file'; -const POST_GET_FILE_ENDPOINT = '/file'; - -function fileUrlToFileId(fullURL?: string) { - const prefix = `${fileServerURL}${POST_GET_FILE_ENDPOINT}/`; - if (!fullURL || !fullURL.startsWith(prefix)) { - return null; +function getShortTTLHeadersIfNeeded(): Record { + if (window.sessionFeatureFlags?.fsTTL30s) { + return { 'X-FS-TTL': '30' }; } - const fileId = fullURL.substring(prefix.length); - - if (!fileId) { - return null; - } - return fileId; + return {}; } /** * Upload a file to the file server v2 using the onion v4 encoding * @param fileContent the data to send + * @param deterministicEncryption whether the file is deterministically encrypted or not * @returns null or the complete URL to share this file */ export const uploadFileToFsWithOnionV4 = async ( - fileContent: ArrayBuffer + fileContent: ArrayBuffer, + deterministicEncryption: boolean ): Promise<{ fileUrl: string; expiresMs: number } | null> => { if (!fileContent || !fileContent.byteLength) { return null; } + // TODO: remove this once QA is done + const target = process.env.POTATO_FS + ? 'POTATO' + : process.env.SUPER_DUPER_FS + ? 'SUPER_DUPER' + : 'DEFAULT'; + const result = await OnionSending.sendBinaryViaOnionV4ToFileServer({ abortSignal: new AbortController().signal, bodyBinary: new Uint8Array(fileContent), - endpoint: POST_GET_FILE_ENDPOINT, + target, + endpoint: FILE_ENDPOINT, method: 'POST', timeoutMs: 30 * DURATION.SECONDS, // longer time for file upload - headers: window.sessionFeatureFlags.fsTTL30s ? { 'X-FS-TTL': '30' } : {}, + headers: getShortTTLHeadersIfNeeded(), }); if (!batchGlobalIsSuccess(result)) { @@ -72,7 +74,18 @@ export const uploadFileToFsWithOnionV4 = async ( ) { return null; } - const fileUrl = `${fileServerURL}${POST_GET_FILE_ENDPOINT}/${fileId}`; + + // we now have the `fileUrl` provide the `serverPubkey` and the deterministic flag as an url fragment. + const urlParams = new URLSearchParams(); + // Note: we don't want to set the pk for the default FS (it breaks prod builds on mobile) + if (target !== 'DEFAULT') { + urlParams.set(queryParamServerEd25519Pubkey, FS.FILE_SERVERS[target].edPk); + } + if (deterministicEncryption) { + urlParams.set(queryParamDeterministicEncryption, ''); + } + const urlParamStr = urlParams.toString(); + const fileUrl = `${FS.FILE_SERVERS[target].url}${FILE_ENDPOINT}/${fileId}${urlParamStr ? `#${urlParamStr}` : ''}`; const expiresMs = Math.floor(expires * 1000); return { fileUrl, @@ -86,42 +99,21 @@ export const uploadFileToFsWithOnionV4 = async ( * @returns the data as an Uint8Array or null */ export const downloadFileFromFileServer = async ( - fileIdOrCompleteUrl: string + toDownload: FileFromFileServerDetails ): Promise => { - let fileId = fileIdOrCompleteUrl; - if (!fileIdOrCompleteUrl) { - window?.log?.warn('Empty url to download for fileserver'); - return null; - } - - if (fileIdOrCompleteUrl.lastIndexOf('/') >= 0) { - fileId = fileId.substring(fileIdOrCompleteUrl.lastIndexOf('/') + 1); - } - - if (fileId.startsWith('/')) { - fileId = fileId.substring(1); - } - - if (!fileId) { - window.log.info('downloadFileFromFileServer given empty fileId'); - return null; - } - - const urlToGet = `${POST_GET_FILE_ENDPOINT}/${fileId}`; if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`about to try to download fsv2: "${urlToGet}"`); + window.log.info(`about to try to download fsv2: "${toDownload.fullUrl}"`); } // this throws if we get a 404 from the file server const result = await OnionSending.getBinaryViaOnionV4FromFileServer({ abortSignal: new AbortController().signal, - endpoint: urlToGet, - method: 'GET', + fileToGet: toDownload, throwError: true, timeoutMs: 30 * DURATION.SECONDS, // longer time for file download }); if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`download fsv2: "${urlToGet} got result:`, JSON.stringify(result)); + window.log.info(`download fsv2: "${toDownload.fullUrl} got result:`, JSON.stringify(result)); } if (!result) { return null; @@ -190,6 +182,7 @@ export const getLatestReleaseFromFileServer = async ( stringifiedBody: null, headers, timeoutMs: 10 * DURATION.SECONDS, + target: 'DEFAULT' as const, }; const result = await OnionSending.sendJsonViaOnionV4ToFileServer(params); @@ -206,34 +199,43 @@ export const getLatestReleaseFromFileServer = async ( }; /** - * Fetch the expiry in ms of the corresponding file. + * Extend a file expiry from the file server. + * This only works with files that have an alphanumeric id. + * */ -export const getFileInfoFromFileServer = async ( - fileUrl: string -): Promise<{ expiryMs: number } | null> => { - const fileId = fileUrlToFileId(fileUrl); - - if (!fileId) { - throw new Error("getFileInfoFromFileServer: fileUrl doesn't start with the file server url"); +export const extendFileExpiry = async (fileId: string, fsTarget: FILE_SERVER_TARGET_TYPE) => { + if (window.sessionFeatureFlags?.debugServerRequests) { + window.log.info(`about to renew expiry of file: "${fileId}"`); } + const method = 'POST'; + const endpoint = `/file/${fileId}/extend`; + const result = await OnionSending.sendJsonViaOnionV4ToFileServer({ abortSignal: new AbortController().signal, + endpoint, + method, stringifiedBody: null, - endpoint: `${POST_GET_FILE_ENDPOINT}/${fileId}/info`, - method: 'GET', + headers: getShortTTLHeadersIfNeeded(), timeoutMs: 10 * DURATION.SECONDS, - headers: {}, + target: fsTarget, }); - const fileExpirySeconds = result?.body?.expires as number | undefined; - if (!batchGlobalIsSuccess(result)) { return null; } - if (!fileExpirySeconds || !isNumber(fileExpirySeconds) || !isFinite(fileExpirySeconds)) { + const { + expires: fileNewExpirySeconds, + uploaded: fileUploadedSeconds, + size, + } = result?.body as any; + if (!fileNewExpirySeconds) { return null; } - return { expiryMs: Math.floor(fileExpirySeconds * 1000) }; + return { + fileNewExpiryMs: Math.floor(fileNewExpirySeconds * 1000), // the expires/uploaded have the ms in the decimals (i.e `1761002358.02229`) + fileUploadedMs: Math.floor(fileUploadedSeconds * 1000), + size: toNumber(size), + }; }; diff --git a/ts/session/apis/file_server_api/FileServerTarget.ts b/ts/session/apis/file_server_api/FileServerTarget.ts new file mode 100644 index 0000000000..e5b7f8c651 --- /dev/null +++ b/ts/session/apis/file_server_api/FileServerTarget.ts @@ -0,0 +1,88 @@ +import { SERVER_HOSTS } from '..'; +import { assertUnreachable } from '../../../types/sqlSharedTypes'; + +enum FS_FEATURES { + fsExtend = 'fsExtend', +} + +type FileServerConfigType = { + url: string; + xPk: string; + edPk: string; + extraFeatures: Array; +}; + +// not exported/included in the SERVER_HOSTS as this is for testing only +const POTATO_FS_HOST = 'potatofiles.getsession.org'; +const SUPER_DUPER_FS_HOST = 'superduperfiles.oxen.io'; + +const FILE_SERVERS: Record<'DEFAULT' | 'POTATO' | 'SUPER_DUPER', FileServerConfigType> = { + DEFAULT: { + url: `http://${SERVER_HOSTS.DEFAULT_FILE_SERVER}`, + xPk: '09324794aa9c11948189762d198c618148e9136ac9582068180661208927ef34', + edPk: 'b8eef9821445ae16e2e97ef8aa6fe782fd11ad5253cd6723b281341dba22e371', + extraFeatures: [], + }, + POTATO: { + url: `http://${POTATO_FS_HOST}`, + edPk: 'ff86dcd4b26d1bfec944c59859494248626d6428efc12168749d65a1b92f5e28', + xPk: 'fc097b06821c98a2db75ce02e521cef5fd9d3446e42e81d843c4c8c4e9260f48', + extraFeatures: [FS_FEATURES.fsExtend], + }, + SUPER_DUPER: { + url: `http://${SUPER_DUPER_FS_HOST}`, + edPk: '929e33ded05e653fec04b49645117f51851f102a947e04806791be416ed76602', + xPk: '16d6c60aebb0851de7e6f4dc0a4734671dbf80f73664c008596511454cb6576d', + extraFeatures: [FS_FEATURES.fsExtend], + }, +}; + +const FILE_SERVER_TARGETS = Object.keys(FILE_SERVERS) as Array; + +function isDefaultFileServer(edOrXPk: string) { + return edOrXPk === FILE_SERVERS.DEFAULT.edPk || edOrXPk === FILE_SERVERS.DEFAULT.xPk; +} + +function supportsFsExtend(target: FILE_SERVER_TARGET_TYPE) { + return FILE_SERVERS[target].extraFeatures.includes(FS_FEATURES.fsExtend); +} + +function fileUrlToFileTarget(url: string): FILE_SERVER_TARGET_TYPE { + if (!URL.canParse(url)) { + throw new Error(`fileUrlToFileTarget: url can't be parsed: "${url}"`); + } + const parsedUrl = new URL(url); + // this for loop is just here to get a compile error if we ever add a fs target + for (let index = 0; index < FILE_SERVER_TARGETS.length; index++) { + const target = FILE_SERVER_TARGETS[index]; + switch (target) { + case 'POTATO': + if (parsedUrl.host.includes(POTATO_FS_HOST)) { + return 'POTATO'; + } + break; + case 'SUPER_DUPER': + if (parsedUrl.host.includes(SUPER_DUPER_FS_HOST)) { + return 'SUPER_DUPER'; + } + break; + case 'DEFAULT': + if (parsedUrl.host.includes(SERVER_HOSTS.DEFAULT_FILE_SERVER)) { + return 'DEFAULT'; + } + break; + default: + assertUnreachable(target, 'fileUrlToFileTarget: target is not a valid target'); + } + } + throw new Error(`fileUrlToFileTarget: url host is not a valid file server: "${url}"`); +} + +export const FS = { + isDefaultFileServer, + supportsFsExtend, + FILE_SERVERS, + fileUrlToFileTarget, +}; + +export type FILE_SERVER_TARGET_TYPE = keyof typeof FILE_SERVERS; diff --git a/ts/session/apis/file_server_api/types.ts b/ts/session/apis/file_server_api/types.ts new file mode 100644 index 0000000000..5ee99c969f --- /dev/null +++ b/ts/session/apis/file_server_api/types.ts @@ -0,0 +1,55 @@ +import { + extractDetailsFromUrlFragment, + extractLastPathSegment, + parseFileServerUrl, +} from '../../url'; + +export function fileServerUrlToFileId(fullURL?: string) { + const parsedUrl = parseFileServerUrl(fullURL); + if (!parsedUrl) { + return { fileId: '', fullUrl: null }; + } + const fileId = extractLastPathSegment(parsedUrl); + + if (!fileId) { + return { fileId: '', fullUrl: null }; + } + return { fileId, fullUrl: parsedUrl }; +} + +function getDownloadFileDetails(urlWithFragment: string) { + const { fileId, fullUrl } = fileServerUrlToFileId(urlWithFragment); + if (!fileId || !fullUrl) { + throw new Error('DownloadFromFileServer: fileId is empty or not a file server url'); + } + + const { serverEd25519Pk, deterministicEncryption } = extractDetailsFromUrlFragment(fullUrl); + + return { fileId, fullUrl, serverEd25519Pk, deterministicEncryption }; +} + +/** + * A utility class to store a file that needs to be downloaded from a file server. + * It validates that the url is one of the valid file server urls. + * Throws if the url is not valid or not a file server url. + */ +export class FileFromFileServerDetails { + public readonly fileId: string; + public readonly fullUrl: URL; + public readonly serverEd25519Pk: string; + public readonly deterministicEncryption: boolean; + + /** + * Construct a FileFromFileServer object. + * @param url the url to download from. It must have the serverPubkey as a query parameter (serverPubkey) + */ + constructor(url: string) { + const { fileId, fullUrl, serverEd25519Pk, deterministicEncryption } = + getDownloadFileDetails(url); + + this.fileId = fileId; + this.fullUrl = fullUrl; + this.serverEd25519Pk = serverEd25519Pk; + this.deterministicEncryption = deterministicEncryption; + } +} diff --git a/ts/session/apis/index.ts b/ts/session/apis/index.ts index b30f67d560..04562dbf8c 100644 --- a/ts/session/apis/index.ts +++ b/ts/session/apis/index.ts @@ -1,4 +1,4 @@ export const SERVER_HOSTS = { - FILE_SERVER: 'filev2.getsession.org', + DEFAULT_FILE_SERVER: 'filev2.getsession.org', NETWORK_SERVER: 'networkv1.getsession.org', }; diff --git a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts index 8ed68bde5f..21e4bc526b 100644 --- a/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts +++ b/ts/session/apis/open_group_api/sogsv3/sogsV3FetchFile.ts @@ -33,7 +33,11 @@ export function fileDetailsToURL({ } function imageUrlToImageId(imageFullUrl?: string) { - const imageId = imageFullUrl?.split('/').pop(); + if (!imageFullUrl) { + return null; + } + const parsedUrl = URL.canParse(imageFullUrl) && new URL(imageFullUrl); + const imageId = parsedUrl && parsedUrl?.pathname.split('/').pop(); if (isNil(imageId) || !isNumber(toNumber(imageId)) || !isFinite(toNumber(imageId))) { return null; } diff --git a/ts/session/apis/push_notification_api/PnServer.ts b/ts/session/apis/push_notification_api/PnServer.ts deleted file mode 100644 index 23195b8623..0000000000 --- a/ts/session/apis/push_notification_api/PnServer.ts +++ /dev/null @@ -1,25 +0,0 @@ -import AbortController from 'abort-controller'; -import { callUtilsWorker } from '../../../webworker/workers/browser/util_worker_interface'; -import { OnionSending } from '../../onions/onionSend'; -import { DURATION } from '../../constants'; - -export const pnServerPubkeyHex = '642a6585919742e5a2d4dc51244964fbcd8bcab2b75612407de58b810740d049'; -export const hrefPnServerProd = 'live.apns.getsession.org'; -export const pnServerUrl = `https://${hrefPnServerProd}`; - -export async function notifyPnServer(wrappedEnvelope: ArrayBuffer, sentTo: string) { - const wrappedEnvelopeBase64 = await callUtilsWorker('arrayBufferToStringBase64', wrappedEnvelope); - - // we actually don't care about the result of this request, and it's better like this - // as it is not a response encoded back for us with a symmetric key - await OnionSending.sendJsonViaOnionV4ToPnServer({ - abortSignal: new AbortController().signal, - endpoint: '/notify', - method: 'POST', - stringifiedBody: JSON.stringify({ - data: wrappedEnvelopeBase64, - send_to: sentTo, - }), - timeoutMs: 10 * DURATION.SECONDS, - }); -} diff --git a/ts/session/apis/push_notification_api/index.ts b/ts/session/apis/push_notification_api/index.ts deleted file mode 100644 index 3c33137e6f..0000000000 --- a/ts/session/apis/push_notification_api/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as PnServer from './PnServer'; - -export { PnServer }; diff --git a/ts/session/apis/seed_node_api/SeedNodeAPI.ts b/ts/session/apis/seed_node_api/SeedNodeAPI.ts index 87b83ab65f..7353c74c11 100644 --- a/ts/session/apis/seed_node_api/SeedNodeAPI.ts +++ b/ts/session/apis/seed_node_api/SeedNodeAPI.ts @@ -208,23 +208,21 @@ export async function TEST_fetchSnodePoolFromSeedNodeRetryable( throw new Error('fetchSnodePoolFromSeedNodeRetryable: Seed nodes are empty'); } - const seedNodeUrl = _.sample(seedNodes); - if (!seedNodeUrl) { + if (!seedNodes.length) { window?.log?.warn( - 'loki_snode_api::fetchSnodePoolFromSeedNodeRetryable - Could not select random snodes from', + 'loki_snode_api::fetchSnodePoolFromSeedNodeRetryable - no seednodes', seedNodes ); throw new Error('fetchSnodePoolFromSeedNodeRetryable: Seed nodes are empty #2'); } - const tryUrl = new URL(seedNodeUrl); + const snodes = await Promise.race(seedNodes.map(s => getSnodesFromSeedUrl(new URL(s)))); - const snodes = await getSnodesFromSeedUrl(tryUrl); if (snodes.length === 0) { window?.log?.warn( - `loki_snode_api::fetchSnodePoolFromSeedNodeRetryable - ${seedNodeUrl} did not return any snodes` + `loki_snode_api::fetchSnodePoolFromSeedNodeRetryable - Promise.race did not return any snodes` ); - throw new Error(`Failed to contact seed node: ${seedNodeUrl}`); + throw new Error(`Failed to contact seed node: Promise.race did not return any snodes`); } return snodes; diff --git a/ts/session/apis/snode_api/onions.ts b/ts/session/apis/snode_api/onions.ts index ea6553be47..7cde11b417 100644 --- a/ts/session/apis/snode_api/onions.ts +++ b/ts/session/apis/snode_api/onions.ts @@ -17,7 +17,6 @@ import { Snode } from '../../../data/types'; import { callUtilsWorker } from '../../../webworker/workers/browser/util_worker_interface'; import { encodeV4Request } from '../../onions/onionv4'; import { SnodeResponseError } from '../../utils/errors'; -import { hrefPnServerProd } from '../push_notification_api/PnServer'; import { ERROR_CODE_NO_CONNECT } from './SNodeAPI'; import { MergedAbortSignal, WithAbortSignal, WithTimeoutMs } from './requestWith'; import { @@ -185,8 +184,7 @@ async function buildOnionCtxs( const relayingToFinalDestination = i === firstPos; // if last position if (relayingToFinalDestination && finalRelayOptions) { - const isCallToPn = finalRelayOptions?.host === hrefPnServerProd; - const target = !isCallToPn && !useV4 ? '/loki/v3/lsrpc' : '/oxen/v4/lsrpc'; + const target = useV4 ? '/oxen/v4/lsrpc' : '/loki/v3/lsrpc'; dest = { host: finalRelayOptions.host, @@ -389,7 +387,6 @@ async function processAnyOtherErrorOnPath( } processOxenServerError(status, ciphertext); - throw new Error(`Bad Path handled. Retry this request. Status: ${status}`); } } @@ -428,7 +425,6 @@ async function processAnyOtherErrorAtDestination( snodeEd25519: destinationEd25519, associatedWith, }); - throw new Error(`Bad Path handled. Retry this request. Status: ${status}`); } } diff --git a/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts index eac6ee016b..c49844dfb2 100644 --- a/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts +++ b/ts/session/apis/snode_api/swarm_polling_config/SwarmPollingGroupConfig.ts @@ -279,7 +279,7 @@ async function scheduleAvatarDownloadJobIfNeeded(groupPk: GroupPubkeyType) { if (!profileUrl || !profileKeyHex) { // no avatar set for this group: make sure we also remove the one we might have locally. - if (conversation.getAvatarPointer() || conversation.getProfileKey()) { + if (conversation.getAvatarPointer() || conversation.getProfileKeyHex()) { await conversation.setSessionProfile({ type: 'resetAvatarGroup', displayName: null, @@ -291,7 +291,7 @@ async function scheduleAvatarDownloadJobIfNeeded(groupPk: GroupPubkeyType) { // here, an avatar for this group is set. First we need to make sure if that's the same as we already have const prevPointer = conversation.getAvatarPointer(); - const prevProfileKey = conversation.getProfileKey(); + const prevProfileKey = conversation.getProfileKeyHex(); if (prevPointer !== profileUrl || prevProfileKey !== profileKeyHex) { // set the avatar for this group, it will be downloaded by the job scheduled below diff --git a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts index a6d36812c4..8f4f7a4fa2 100644 --- a/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts +++ b/ts/session/messages/outgoing/visibleMessage/VisibleMessage.ts @@ -56,8 +56,6 @@ export interface QuotedAttachmentWithUrl extends QuotedAttachmentCommon { export interface Quote { id: number; author: string; - text?: string; - attachments?: Array; } export type VisibleMessageParams = ExpirableMessageParams & @@ -123,7 +121,7 @@ export class VisibleMessage extends DataMessage { dataMessage.body = this.body; } - dataMessage.attachments = this.attachments || []; + dataMessage.attachments = this.attachments ?? []; if (this.reaction) { dataMessage.reaction = this.reaction; @@ -143,23 +141,6 @@ export class VisibleMessage extends DataMessage { dataMessage.quote.id = this.quote.id; dataMessage.quote.author = this.quote.author; - dataMessage.quote.text = this.quote.text; - if (this.quote.attachments) { - dataMessage.quote.attachments = this.quote.attachments.map(attachment => { - const quotedAttachment = new SignalService.DataMessage.Quote.QuotedAttachment(); - if (attachment.contentType) { - quotedAttachment.contentType = attachment.contentType; - } - if (attachment.fileName) { - quotedAttachment.fileName = attachment.fileName; - } - if (attachment.thumbnail && (attachment.thumbnail as any).id) { - quotedAttachment.thumbnail = attachment.thumbnail as any; // be sure to keep the typescript guard on id above - } - - return quotedAttachment; - }); - } } if (Array.isArray(this.preview)) { diff --git a/ts/session/onions/onionSend.ts b/ts/session/onions/onionSend.ts index 1bf2260d7e..72303c2cb9 100644 --- a/ts/session/onions/onionSend.ts +++ b/ts/session/onions/onionSend.ts @@ -1,6 +1,7 @@ import { AbortSignal } from 'abort-controller'; import { toNumber } from 'lodash'; import pRetry from 'p-retry'; +import { crypto_sign_ed25519_pk_to_curve25519, from_hex, to_hex } from 'libsodium-wrappers-sumo'; import { OnionPaths } from '.'; import { Snode } from '../../data/types'; @@ -10,7 +11,6 @@ import { addBinaryContentTypeToHeaders, addJsonContentTypeToHeaders, } from '../apis/open_group_api/sogsv3/sogsV3SendMessage'; -import { pnServerPubkeyHex, pnServerUrl } from '../apis/push_notification_api/PnServer'; import { FinalDestNonSnodeOptions, FinalRelayOptions, @@ -23,8 +23,9 @@ import { OnionV4 } from './onionv4'; import { MergedAbortSignal, WithAbortSignal, WithTimeoutMs } from '../apis/snode_api/requestWith'; import { OnionPathEmptyError } from '../utils/errors'; import { SnodePool } from '../apis/snode_api/snodePool'; -import { fileServerURL, fileServerPubKey } from '../apis/file_server_api/FileServerApi'; import { SERVER_HOSTS } from '../apis'; +import type { FileFromFileServerDetails } from '../apis/file_server_api/types'; +import { FS, type FILE_SERVER_TARGET_TYPE } from '../apis/file_server_api/FileServerTarget'; export type OnionFetchOptions = { method: string; @@ -374,42 +375,6 @@ async function sendJsonViaOnionV4ToSogs( return res as OnionV4JSONSnodeResponse | null; } -/** - * Send some json to the PushNotification server. - * Desktop only send `/notify` requests. - * - * You should probably not use this function directly but instead rely on the PnServer.notifyPnServer() function - */ -async function sendJsonViaOnionV4ToPnServer( - sendOptions: WithTimeoutMs & { - endpoint: string; - method: string; - stringifiedBody: string | null; - abortSignal: AbortSignal; - } -): Promise { - const { endpoint, method, stringifiedBody, abortSignal, timeoutMs } = sendOptions; - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); - } - const builtUrl = new URL(`${pnServerUrl}${endpoint}`); - - const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - pnServerPubkeyHex, - builtUrl, - { - method, - headers: {}, - body: stringifiedBody, - useV4: true, - }, - false, - abortSignal, - timeoutMs - ); - return res as OnionV4JSONSnodeResponse; -} - async function sendBinaryViaOnionV4ToSogs( sendOptions: WithTimeoutMs & { serverUrl: string; @@ -481,6 +446,7 @@ async function sendBinaryViaOnionV4ToSogs( * You should probably not use this function directly, but instead rely on the FileServerAPI.uploadFileToFsWithOnionV4() */ async function sendBinaryViaOnionV4ToFileServer({ + target, endpoint, method, bodyBinary, @@ -489,19 +455,15 @@ async function sendBinaryViaOnionV4ToFileServer({ headers = {}, }: WithTimeoutMs & WithAbortSignal & { + target: FILE_SERVER_TARGET_TYPE; endpoint: string; method: string; bodyBinary: Uint8Array; headers?: Record; }): Promise { - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); - } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); - const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, - builtUrl, + FS.FILE_SERVERS[target].xPk, + new URL(`${FS.FILE_SERVERS[target].url}${endpoint}`), { method, headers, @@ -521,33 +483,34 @@ async function sendBinaryViaOnionV4ToFileServer({ * You should probably not use this function directly, but instead rely on the FileServerAPI.downloadFileFromFileServer() */ async function getBinaryViaOnionV4FromFileServer({ - endpoint, - method, + fileToGet, abortSignal, throwError, timeoutMs, }: WithTimeoutMs & WithAbortSignal & { - endpoint: string; - method: string; + fileToGet: FileFromFileServerDetails; throwError: boolean; }): Promise { - if (!endpoint.startsWith('/')) { - throw new Error('endpoint needs a leading /'); + if (window.sessionFeatureFlags?.debugServerRequests) { + window.log.info(`getBinaryViaOnionV4FromFileServer fsv2: "${fileToGet.fullUrl} `); } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); - if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info(`getBinaryViaOnionV4FromFileServer fsv2: "${builtUrl} `); + if (!fileToGet.fullUrl) { + throw new Error('getBinaryViaOnionV4FromFileServer: fullUrl is required'); } + const serverX25519Pk = to_hex( + crypto_sign_ed25519_pk_to_curve25519(from_hex(fileToGet.serverEd25519Pk)) + ); + // this throws for a bunch of reasons. // One of them, is if we get a 404 (i.e. the file server was reached but reported no such attachments exists) const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, - builtUrl, + serverX25519Pk, + fileToGet.fullUrl, { - method, + method: 'GET', headers: {}, body: null, useV4: true, @@ -558,8 +521,8 @@ async function getBinaryViaOnionV4FromFileServer({ ); if (window.sessionFeatureFlags?.debugServerRequests) { - window.log.info( - `getBinaryViaOnionV4FromFileServer fsv2: "${builtUrl}; got:`, + window.log.debug( + `getBinaryViaOnionV4FromFileServer fsv2: "${fileToGet.fullUrl}; got:`, JSON.stringify(res) ); } @@ -572,6 +535,7 @@ async function getBinaryViaOnionV4FromFileServer({ */ async function sendJsonViaOnionV4ToFileServer({ endpoint, + target, method, stringifiedBody, abortSignal, @@ -579,6 +543,7 @@ async function sendJsonViaOnionV4ToFileServer({ timeoutMs, }: WithAbortSignal & WithTimeoutMs & { + target: FILE_SERVER_TARGET_TYPE; endpoint: string; method: string; stringifiedBody: string | null; @@ -587,10 +552,10 @@ async function sendJsonViaOnionV4ToFileServer({ if (!endpoint.startsWith('/')) { throw new Error('endpoint needs a leading /'); } - const builtUrl = new URL(`${fileServerURL}${endpoint}`); + const builtUrl = new URL(`${FS.FILE_SERVERS[target].url}${endpoint}`); const res = await OnionSending.sendViaOnionV4ToNonSnodeWithRetries( - fileServerPubKey, + FS.FILE_SERVERS[target].xPk, builtUrl, { method, @@ -668,7 +633,6 @@ export const OnionSending = { sendViaOnionV4ToNonSnodeWithRetries, getOnionPathForSending, sendJsonViaOnionV4ToSogs, - sendJsonViaOnionV4ToPnServer, sendBinaryViaOnionV4ToFileServer, sendBinaryViaOnionV4ToSogs, getBinaryViaOnionV4FromFileServer, diff --git a/ts/session/url/index.ts b/ts/session/url/index.ts new file mode 100644 index 0000000000..7520afdc72 --- /dev/null +++ b/ts/session/url/index.ts @@ -0,0 +1,119 @@ +import { FS } from '../apis/file_server_api/FileServerTarget'; + +export const queryParamServerEd25519Pubkey = 'p'; +export const queryParamDeterministicEncryption = 'd'; +/** + * The encryption key is a hex string, and was used to encrypt the file. + * It is the same as the profileKey for a user profile. + */ +export const queryParamEncryptionKey = 'e'; + +function parseSearchParamsFromFragment(url: URL) { + // slice to remove the leading '#' + const fragment = (url.hash || '').slice(1); + + const searchParams = new URLSearchParams(fragment); + return searchParams; +} + +/** + * Returns the serverPk/deterministicEncryption/profileKey from the provided url fragment + * Note: + * - for the default file server, the serverPk is hardcoded. + * - if no serverPk is provided, the defaultFileServerPubKey is returned. + * - if no profileKey is provided, the profileKey is null + * - if no deterministicEncryption is provided, the deterministicEncryption is false (presence is used, the value is not checked) + * + * Also, the fs serverPk is removed from the url if it is the default one. + */ +export function extractDetailsFromUrlFragment(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + // if the serverPk is not present in the fragment, we assume it is the default file server + const serverEd25519Pk = + searchParams.get(queryParamServerEd25519Pubkey) ?? FS.FILE_SERVERS.DEFAULT.edPk; + const profileKey = searchParams.get(queryParamEncryptionKey); + const deterministicEncryption = searchParams.has(queryParamDeterministicEncryption) ?? false; + if (!serverEd25519Pk) { + throw new Error( + 'FileFromFileServer: serverPubkey & other details are required as a fragment-query parameter for non-default file server' + ); + } + + return { + serverEd25519Pk, + deterministicEncryption, + profileKey, + urlWithoutProfileKey: removeDefaultServerPk(removeProfileKey(url)).toString(), + }; +} + +export function addProfileKeyToUrl(url: URL, profileKeyHex: string) { + const searchParams = parseSearchParamsFromFragment(url); + const profileKey = searchParams.get(queryParamEncryptionKey); + if (profileKey) { + // a profile key field is already present + return url; + } + const urlCopy = new URL(url.toString()); + searchParams.set(queryParamEncryptionKey, profileKeyHex); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +function removeProfileKey(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + const profileKey = searchParams.get(queryParamEncryptionKey); + if (!profileKey) { + // a profile key field is not present + return url; + } + const urlCopy = new URL(url.toString()); + searchParams.delete(queryParamEncryptionKey); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +function removeDefaultServerPk(url: URL) { + const searchParams = parseSearchParamsFromFragment(url); + const serverPk = searchParams.get(queryParamServerEd25519Pubkey); + if (!serverPk || !FS.isDefaultFileServer(serverPk)) { + // a serverPk is not present, or it is not the default file server + return url; + } + + const urlCopy = new URL(url.toString()); + searchParams.delete(queryParamEncryptionKey); + urlCopy.hash = searchParams.toString() ?? ''; + + return urlCopy; +} + +export function extractLastPathSegment(url: URL) { + const lastSegment = url.pathname.split('/').filter(Boolean).pop(); + if (!lastSegment) { + return null; + } + return lastSegment; +} + +/** + * Returns the parsed url from the provided string only if that matches one of our file server urls. + */ +export function parseFileServerUrl(fullURL?: string) { + if (!fullURL) { + return null; + } + + const parsedUrl = URL.canParse(fullURL) && new URL(fullURL); + if (!parsedUrl) { + return null; + } + + if (parsedUrl.host.includes('open.getsession.org')) { + // we need to filter out communities we host on getsession.org as they do not have the same api. + return null; + } + return parsedUrl; +} diff --git a/ts/session/utils/Attachments.ts b/ts/session/utils/Attachments.ts index 4f4aa2f6dc..6680a6c5d9 100644 --- a/ts/session/utils/Attachments.ts +++ b/ts/session/utils/Attachments.ts @@ -1,5 +1,5 @@ import * as crypto from 'crypto'; -import _, { isEmpty, isString } from 'lodash'; +import { isEmpty, isString } from 'lodash'; import Long from 'long'; import { Attachment } from '../../types/Attachment'; @@ -10,17 +10,20 @@ import { AttachmentPointer, AttachmentPointerWithUrl, PreviewWithAttachmentUrl, - Quote, - QuotedAttachmentWithUrl, } from '../messages/outgoing/visibleMessage/VisibleMessage'; import { uploadFileToFsWithOnionV4 } from '../apis/file_server_api/FileServerApi'; +import { MultiEncryptWrapperActions } from '../../webworker/workers/browser/libsession_worker_interface'; +import { UserUtils } from '.'; +import { extractLastPathSegment } from '../url'; -interface UploadParams { +type UploadParams = { attachment: Attachment; - isAvatar?: boolean; - isRaw?: boolean; + + /** + * Explicit padding is only needed for the legacy encryption, as libsession deterministic encryption already pads the data. + */ shouldPad?: boolean; -} +}; export interface RawPreview { url: string; @@ -42,7 +45,7 @@ export interface RawQuote { } async function uploadToFileServer(params: UploadParams): Promise { - const { attachment, isRaw = false, shouldPad = false } = params; + const { attachment, shouldPad = false } = params; if (typeof attachment !== 'object' || attachment == null) { throw new Error('Invalid attachment passed.'); } @@ -64,9 +67,25 @@ async function uploadToFileServer(params: UploadParams): Promise { - if (!quote) { - return undefined; - } - - const promises = (quote.attachments ?? []).map(async attachment => { - let thumbnail: AttachmentPointer | undefined; - if (attachment.thumbnail) { - thumbnail = await uploadToFileServer({ - attachment: attachment.thumbnail, - }); - } - if (!thumbnail) { - return attachment; - } - return { - ...attachment, - thumbnail, - url: thumbnail.url, - } as QuotedAttachmentWithUrl; - }); - - const attachments = _.compact(await Promise.all(promises)); - - return { - ...quote, - attachments, - }; -} - -export function attachmentIdAsStrFromUrl(url: string) { - const lastSegment = url?.split('/')?.pop(); +export function attachmentIdAsStrFromUrl(fullUrl: string) { + const url = new URL(fullUrl); + const lastSegment = extractLastPathSegment(url); if (!lastSegment) { - throw new Error('attachmentIdAsStrFromUrl last is not valid'); + throw new Error('attachmentIdAsStrFromUrl last segment is not valid'); } return lastSegment; } export function attachmentIdAsLongFromUrl(url: string) { - const lastSegment = url?.split('/')?.pop(); + const parsedUrl = URL.canParse(url) && new URL(url); + + const lastSegment = parsedUrl && parsedUrl.pathname.split('/').filter(Boolean).pop(); if (!lastSegment) { throw new Error('attachmentIdAsLongFromUrl last is not valid'); } diff --git a/ts/session/utils/AttachmentsDownload.ts b/ts/session/utils/AttachmentsDownload.ts index a2734539a6..801768c13e 100644 --- a/ts/session/utils/AttachmentsDownload.ts +++ b/ts/session/utils/AttachmentsDownload.ts @@ -4,7 +4,7 @@ import { v4 as uuidV4 } from 'uuid'; import { Data } from '../../data/data'; import { MessageModel } from '../../models/message'; -import { downloadAttachment, downloadAttachmentSogsV3 } from '../../receiver/attachments'; +import { downloadAttachmentFs, downloadAttachmentSogsV3 } from '../../receiver/attachments'; import { initializeAttachmentLogic, processNewAttachment } from '../../types/MessageAttachment'; import { getAttachmentMetadata } from '../../types/message/initializeAttachmentMetadata'; import { AttachmentDownloadMessageDetails } from '../../types/sqlSharedTypes'; @@ -179,7 +179,7 @@ async function _runJob(job: any) { // those two functions throw if they get a 404 downloaded = isOpenGroupV2 ? await downloadAttachmentSogsV3(attachment, openGroupV2Details) - : await downloadAttachment(attachment); + : await downloadAttachmentFs(attachment); } catch (error) { // Attachments on the server expire after 60 days, then start returning 404 if (error && error.code === 404) { diff --git a/ts/session/utils/AttachmentsV2.ts b/ts/session/utils/AttachmentsV2.ts index 69328e91f9..59432efc41 100644 --- a/ts/session/utils/AttachmentsV2.ts +++ b/ts/session/utils/AttachmentsV2.ts @@ -7,10 +7,8 @@ import { AttachmentPointer, AttachmentPointerWithUrl, PreviewWithAttachmentUrl, - Quote, - QuotedAttachment, } from '../messages/outgoing/visibleMessage/VisibleMessage'; -import { RawPreview, RawQuote } from './Attachments'; +import { RawPreview } from './Attachments'; import { OpenGroupRequestCommonType } from '../../data/types'; interface UploadParamsV2 { @@ -90,33 +88,3 @@ export async function uploadLinkPreviewsV3( url: preview.url || image.url, }; } - -export async function uploadQuoteThumbnailsV3( - openGroup: OpenGroupRequestCommonType, - quote?: RawQuote -): Promise { - if (!quote) { - return undefined; - } - - const promises = (quote.attachments ?? []).map(async attachment => { - let thumbnail: QuotedAttachment | undefined; - if (attachment.thumbnail) { - thumbnail = (await uploadV3({ - attachment: attachment.thumbnail, - openGroup, - })) as any; - } - return { - ...attachment, - thumbnail, - }; - }); - - const attachments = await Promise.all(promises); - - return { - ...quote, - attachments, - }; -} diff --git a/ts/session/utils/User.ts b/ts/session/utils/User.ts index 94bd20d5d4..cb00538381 100644 --- a/ts/session/utils/User.ts +++ b/ts/session/utils/User.ts @@ -99,14 +99,25 @@ export const getUserED25519KeyPairBytes = async (): Promise => { throw new Error('getUserED25519KeyPairBytes: user has no keypair'); }; +/** + * Return the ed25519 seed of the current user. + * This is used to generate deterministic encryption keys for attachments/profile pictures. + * + * This is cached so will only be slow on the first fetch. + */ +export async function getUserEd25519Seed() { + const ed25519KeyPairBytes = await getUserED25519KeyPairBytes(); + return ed25519KeyPairBytes.privKeyBytes.slice(0, 32); +} + export async function getOurProfile() { const displayName = (await UserConfigWrapperActions.getName()) || 'Anonymous'; const updatedAtSeconds = await UserConfigWrapperActions.getProfileUpdatedSeconds(); - const profilePicWithKey = await UserConfigWrapperActions.getProfilePicWithKeyHex(); + const profilePic = await UserConfigWrapperActions.getProfilePic(); return new OutgoingUserProfile({ displayName, updatedAtSeconds, - picUrlWithProfileKey: profilePicWithKey ?? null, + profilePic: profilePic ?? null, }); } diff --git a/ts/session/utils/calling/CallManager.ts b/ts/session/utils/calling/CallManager.ts index b5727fbd5a..b35ce1fbae 100644 --- a/ts/session/utils/calling/CallManager.ts +++ b/ts/session/utils/calling/CallManager.ts @@ -24,7 +24,6 @@ import { getCallMediaPermissionsSettings } from '../../../components/settings/Se import { Data } from '../../../data/data'; import { handleAcceptConversationRequest } from '../../../interactions/conversationInteractions'; import { READ_MESSAGE_STATE } from '../../../models/conversationAttributes'; -import { PnServer } from '../../apis/push_notification_api'; import { SnodeNamespaces } from '../../apis/snode_api/namespaces'; import { DURATION } from '../../constants'; import { DisappearingMessages } from '../../disappearing_messages'; @@ -553,12 +552,11 @@ export async function USER_callRecipient(recipient: string) { preOfferMsg, SnodeNamespaces.Default ); - const { wrappedEnvelope } = await MessageSender.sendSingleMessage({ + await MessageSender.sendSingleMessage({ message: rawPreOffer, isSyncMessage: false, abortSignal: null, }); - void PnServer.notifyPnServer(wrappedEnvelope, recipient); await openMediaDevicesAndAddTracks(); // Note CallMessages are very custom, as we moslty don't sync them to ourselves. diff --git a/ts/session/utils/job_runners/JobDeserialization.ts b/ts/session/utils/job_runners/JobDeserialization.ts index 4528d00dba..f43f4777eb 100644 --- a/ts/session/utils/job_runners/JobDeserialization.ts +++ b/ts/session/utils/job_runners/JobDeserialization.ts @@ -13,6 +13,7 @@ import { GroupInvite } from './jobs/GroupInviteJob'; import { GroupPendingRemovals } from './jobs/GroupPendingRemovalsJob'; import { GroupSync } from './jobs/GroupSyncJob'; import { UpdateMsgExpirySwarm } from './jobs/UpdateMsgExpirySwarmJob'; +import { AvatarReupload } from './jobs/AvatarReuploadJob'; export function persistedJobFromData( data: T @@ -27,6 +28,8 @@ export function persistedJobFromData( return new UserSync.UserSyncJob(data) as unknown as PersistedJob; case 'AvatarDownloadJobType': return new AvatarDownload.AvatarDownloadJob(data) as unknown as PersistedJob; + case 'AvatarReuploadJobType': + return new AvatarReupload.AvatarReuploadJob(data) as unknown as PersistedJob; case 'AvatarMigrateJobType': return new AvatarMigrate.AvatarMigrateJob(data) as unknown as PersistedJob; case 'FetchMsgExpirySwarmJobType': diff --git a/ts/session/utils/job_runners/JobRunner.ts b/ts/session/utils/job_runners/JobRunner.ts index e24e4fc0f8..192c78a6e1 100644 --- a/ts/session/utils/job_runners/JobRunner.ts +++ b/ts/session/utils/job_runners/JobRunner.ts @@ -15,6 +15,7 @@ import { UpdateMsgExpirySwarmPersistedData, UserSyncPersistedData, type AvatarMigratePersistedData, + type AvatarReuploadPersistedData, } from './PersistedJob'; import { JobRunnerType } from './jobs/JobRunnerType'; import { DURATION } from '../../constants'; @@ -385,6 +386,9 @@ const groupSyncRunner = new PersistedJobRunner('GroupSyn const avatarDownloadRunner = new PersistedJobRunner( 'AvatarDownloadJob' ); +const avatarReuploadRunner = new PersistedJobRunner( + 'AvatarReuploadJob' +); const avatarMigrateRunner = new PersistedJobRunner('AvatarMigrateJob'); const groupInviteJobRunner = new PersistedJobRunner('GroupInviteJob', 4); @@ -408,6 +412,7 @@ export const runners = { updateMsgExpiryRunner, fetchSwarmMsgExpiryRunner, avatarDownloadRunner, + avatarReuploadRunner, avatarMigrateRunner, groupInviteJobRunner, groupPendingRemovalJobRunner, diff --git a/ts/session/utils/job_runners/PersistedJob.ts b/ts/session/utils/job_runners/PersistedJob.ts index 6931c015bf..d33ddee652 100644 --- a/ts/session/utils/job_runners/PersistedJob.ts +++ b/ts/session/utils/job_runners/PersistedJob.ts @@ -5,6 +5,7 @@ export type PersistedJobType = | 'UserSyncJobType' | 'GroupSyncJobType' | 'AvatarDownloadJobType' + | 'AvatarReuploadJobType' | 'AvatarMigrateJobType' | 'GroupInviteJobType' | 'GroupPendingRemovalJobType' @@ -38,6 +39,11 @@ export interface AvatarDownloadPersistedData extends PersistedJobData { conversationId: string; } +export interface AvatarReuploadPersistedData extends PersistedJobData { + jobType: 'AvatarReuploadJobType'; + conversationId: string; +} + export interface AvatarMigratePersistedData extends PersistedJobData { jobType: 'AvatarMigrateJobType'; conversationId: string; @@ -61,15 +67,15 @@ export interface UserSyncPersistedData extends PersistedJobData { export interface GroupSyncPersistedData extends PersistedJobData { jobType: 'GroupSyncJobType'; } -interface PersitedDataWithMsgIds extends PersistedJobData { +interface PersistedDataWithMsgIds extends PersistedJobData { msgIds: Array; } -export interface FetchMsgExpirySwarmPersistedData extends PersitedDataWithMsgIds { +export interface FetchMsgExpirySwarmPersistedData extends PersistedDataWithMsgIds { jobType: 'FetchMsgExpirySwarmJobType'; } -export interface UpdateMsgExpirySwarmPersistedData extends PersitedDataWithMsgIds { +export interface UpdateMsgExpirySwarmPersistedData extends PersistedDataWithMsgIds { jobType: 'UpdateMsgExpirySwarmJobType'; } @@ -77,6 +83,7 @@ export type TypeOfPersistedData = | UserSyncPersistedData | AvatarDownloadPersistedData | AvatarMigratePersistedData + | AvatarReuploadPersistedData | FetchMsgExpirySwarmPersistedData | UpdateMsgExpirySwarmPersistedData | FakeSleepJobData @@ -92,8 +99,8 @@ export enum RunJobResult { RetryJobIfPossible = 2, PermanentFailure = 3, } -function isDataWithMsgIds(data: PersistedJobData): data is PersitedDataWithMsgIds { - return !isNil((data as PersitedDataWithMsgIds)?.msgIds); +function isDataWithMsgIds(data: PersistedJobData): data is PersistedDataWithMsgIds { + return !isNil((data as PersistedDataWithMsgIds)?.msgIds); } /** diff --git a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts index b76d7c92e0..868b972c64 100644 --- a/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts +++ b/ts/session/utils/job_runners/jobs/AvatarDownloadJob.ts @@ -1,11 +1,10 @@ import { isEmpty, isNumber, isString } from 'lodash'; import { v4 } from 'uuid'; import { UserUtils } from '../..'; -import { downloadAttachment } from '../../../../receiver/attachments'; import { processNewAttachment } from '../../../../types/MessageAttachment'; import { decryptProfile } from '../../../../util/crypto/profileEncrypter'; import { ConvoHub } from '../../../conversations'; -import { fromHexToArray } from '../../String'; +import { ed25519Str, fromHexToArray } from '../../String'; import { runners } from '../JobRunner'; import { AddJobCheckReturn, @@ -14,6 +13,9 @@ import { RunJobResult, } from '../PersistedJob'; import { processAvatarData } from '../../../../util/avatar/processAvatarData'; +import { downloadAttachmentFs } from '../../../../receiver/attachments'; +import { extractDetailsFromUrlFragment } from '../../../url'; +import { MultiEncryptWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; const defaultMsBetweenRetries = 10000; const defaultMaxAttempts = 3; @@ -34,7 +36,7 @@ export function shouldAddAvatarDownloadJob({ conversationId }: { conversationId: return false; } const prevPointer = conversation.getAvatarPointer(); - const profileKey = conversation.getProfileKey(); + const profileKey = conversation.getProfileKeyHex(); const hasNoAvatar = isEmpty(prevPointer) || isEmpty(profileKey); if (hasNoAvatar) { @@ -109,7 +111,7 @@ class AvatarDownloadJob extends PersistedJob { } let changes = false; const toDownloadPointer = conversation.getAvatarPointer(); - const toDownloadProfileKey = conversation.getProfileKey(); + const toDownloadProfileKey = conversation.getProfileKeyHex(); // if there is an avatar and profileKey for that user/group ('', null and undefined excluded), download, decrypt and save the avatar locally. if (toDownloadPointer && toDownloadProfileKey) { @@ -117,10 +119,13 @@ class AvatarDownloadJob extends PersistedJob { window.log.debug(`[profileupdate] starting downloading task for ${conversation.id}`); // This is an avatar download, we are free to resize/compress/convert what is downloaded as we wish. // Desktop will generate a normal avatar and a forced static one. Both resized and converted if required. - const downloaded = await downloadAttachment({ + const downloaded = await downloadAttachmentFs({ url: toDownloadPointer, isRaw: true, }); + const { deterministicEncryption } = extractDetailsFromUrlFragment( + new URL(toDownloadPointer) + ); conversation = ConvoHub.use().getOrThrow(convoId); if (!downloaded.data.byteLength) { @@ -136,10 +141,19 @@ class AvatarDownloadJob extends PersistedJob { const profileKeyArrayBuffer = fromHexToArray(toDownloadProfileKey); let decryptedData: ArrayBuffer; try { - decryptedData = await decryptProfile(downloaded.data, profileKeyArrayBuffer); + if (deterministicEncryption) { + const { decryptedData: decryptedData2 } = + await MultiEncryptWrapperActions.attachmentDecrypt({ + encryptedData: new Uint8Array(downloaded.data), + decryptionKey: profileKeyArrayBuffer, + }); + decryptedData = decryptedData2.buffer; + } else { + decryptedData = await decryptProfile(downloaded.data, profileKeyArrayBuffer); + } } catch (decryptError) { window.log.info( - `[profileupdate] failed to decrypt downloaded data ${conversation.id} with provided profileKey` + `[profileupdate] failed to decrypt downloaded data for ${ed25519Str(conversation.id)} with provided profileKey` ); // if we got content, but cannot decrypt it with the provided profileKey, there is no need to keep retrying. return RunJobResult.PermanentFailure; @@ -150,7 +164,7 @@ class AvatarDownloadJob extends PersistedJob { ); // we autoscale incoming avatars because our app keeps decrypted avatars in memory and some platforms allows large avatars to be uploaded. - const processed = await processAvatarData(decryptedData); + const processed = await processAvatarData(decryptedData, conversation.isMe(), true); const upgradedMainAvatar = await processNewAttachment({ data: processed.mainAvatarDetails.outputBuffer, diff --git a/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts b/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts index ee681caab9..ec476cade0 100644 --- a/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts +++ b/ts/session/utils/job_runners/jobs/AvatarMigrateJob.ts @@ -109,7 +109,7 @@ class AvatarMigrateJob extends PersistedJob { window.log.warn('AvatarMigrateJob: no avatar pointer found for conversation'); return RunJobResult.Success; } - const existingProfileKeyHex = conversation.getProfileKey(); + const existingProfileKeyHex = conversation.getProfileKeyHex(); if (!existingProfileKeyHex) { window.log.warn('AvatarMigrateJob: no profileKey found for conversation'); return RunJobResult.Success; @@ -141,7 +141,7 @@ class AvatarMigrateJob extends PersistedJob { } // we autoscale incoming avatars because our app keeps decrypted avatars in memory and some platforms allows large avatars to be uploaded. - const processed = await processAvatarData(decryptedData); + const processed = await processAvatarData(decryptedData, conversation.isMe(), true); const upgradedMainAvatar = await processNewAttachment({ data: processed.mainAvatarDetails.outputBuffer, diff --git a/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts b/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts new file mode 100644 index 0000000000..0503ef118e --- /dev/null +++ b/ts/session/utils/job_runners/jobs/AvatarReuploadJob.ts @@ -0,0 +1,272 @@ +import { from_hex } from 'libsodium-wrappers-sumo'; +import { isNumber } from 'lodash'; +import { v4 } from 'uuid'; +import { UserUtils } from '../..'; +import { ConvoHub } from '../../../conversations'; +import { ed25519Str } from '../../String'; +import { runners } from '../JobRunner'; +import { + AddJobCheckReturn, + AvatarDownloadPersistedData, + PersistedJob, + RunJobResult, + type AvatarReuploadPersistedData, +} from '../PersistedJob'; +import { DecryptedAttachmentsManager } from '../../../crypto/DecryptedAttachmentsManager'; +import { IMAGE_JPEG } from '../../../../types/MIME'; +import { urlToBlob } from '../../../../types/attachments/VisualAttachment'; +import { ImageProcessor } from '../../../../webworker/workers/browser/image_processor_interface'; +import { maxAvatarDetails } from '../../../../util/attachment/attachmentSizes'; +import { UserConfigWrapperActions } from '../../../../webworker/workers/browser/libsession_worker_interface'; +import { extendFileExpiry } from '../../../apis/file_server_api/FileServerApi'; +import { fileServerUrlToFileId } from '../../../apis/file_server_api/types'; +import { DURATION, DURATION_SECONDS } from '../../../constants'; +import { uploadAndSetOurAvatarShared } from '../../../../interactions/avatar-interactions/nts-avatar-interactions'; +import { FS } from '../../../apis/file_server_api/FileServerTarget'; + +const defaultMsBetweenRetries = 10000; +const defaultMaxAttempts = 3; + +async function addAvatarReuploadJob() { + const avatarReuploadJob = new AvatarReuploadJob({ + // postpone this job for 30 seconds, so we don't reupload right on start (we need an onion path to be valid) + nextAttemptTimestamp: Date.now() + DURATION.SECONDS * 30, + conversationId: UserUtils.getOurPubKeyStrFromCache(), + }); + window.log.debug(`addAvatarReuploadJob: adding job reupload `); + await runners.avatarReuploadRunner.addJob(avatarReuploadJob); +} + +async function fetchLocalAvatarDetails(currentMainPath: string) { + try { + const decryptedAvatarLocalUrl = await DecryptedAttachmentsManager.getDecryptedMediaUrl( + currentMainPath, + IMAGE_JPEG, // not needed + true + ); + + if (!decryptedAvatarLocalUrl) { + window.log.warn('Could not decrypt avatar stored locally..'); + return null; + } + const blob = await urlToBlob(decryptedAvatarLocalUrl); + const decryptedAvatarData = await blob.arrayBuffer(); + const metadata = await ImageProcessor.imageMetadata(decryptedAvatarData); + if (!metadata) { + window.log.warn('Failed to get metadata from avatar'); + return null; + } + return { decryptedAvatarData, metadata }; + } catch (e) { + window.log.warn('[avatarReupload] Failed to get metadata from avatar'); + return null; + } +} + +/** + * Returns the current timestamp in seconds. + * Note: this is not the network time, but our local time with an offset, potentially. + * We want to use that one here, as the UserProfile actions are not based on the network timestamp either.k + */ +function nowSeconds() { + return Math.floor(Date.now() / 1000); +} + +function shouldSkipRenew({ + ourProfileLastUpdatedSeconds, +}: { + ourProfileLastUpdatedSeconds: number; +}) { + if (window.sessionFeatureFlags.fsTTL30s) { + // this is in dev + return Date.now() / 1000 - ourProfileLastUpdatedSeconds <= 10 * DURATION_SECONDS.SECONDS; + } + // this is in prod + return nowSeconds() - ourProfileLastUpdatedSeconds <= 2 * DURATION_SECONDS.HOURS; +} + +function shouldSkipReupload({ + ourProfileLastUpdatedSeconds, +}: { + ourProfileLastUpdatedSeconds: number; +}) { + if (window.sessionFeatureFlags.fsTTL30s) { + return nowSeconds() - ourProfileLastUpdatedSeconds <= 10 * DURATION_SECONDS.SECONDS; + } + return nowSeconds() - ourProfileLastUpdatedSeconds <= 12 * DURATION_SECONDS.DAYS; +} + +class AvatarReuploadJob extends PersistedJob { + constructor({ + conversationId, + nextAttemptTimestamp, + maxAttempts, + currentRetry, + identifier, + }: Pick & + Partial< + Pick< + AvatarDownloadPersistedData, + 'nextAttemptTimestamp' | 'identifier' | 'maxAttempts' | 'currentRetry' + > + >) { + super({ + jobType: 'AvatarReuploadJobType', + identifier: identifier || v4(), + conversationId, + delayBetweenRetries: defaultMsBetweenRetries, + maxAttempts: isNumber(maxAttempts) ? maxAttempts : defaultMaxAttempts, + nextAttemptTimestamp: nextAttemptTimestamp || Date.now() + defaultMsBetweenRetries, + currentRetry: isNumber(currentRetry) ? currentRetry : 0, + }); + } + + public async run(): Promise { + const convoId = this.persistedData.conversationId; + window.log.debug( + `[avatarReupload] running job ${this.persistedData.jobType} id:"${this.persistedData.identifier}" ` + ); + + if (!this.persistedData.identifier) { + return RunJobResult.PermanentFailure; + } + if (!convoId) { + return RunJobResult.PermanentFailure; + } + + let conversation = ConvoHub.use().get(convoId); + if (!conversation || !conversation.isMe()) { + // Note: if we add the groupv2 case here, we'd need to add a profile_updated timestamp to the metagroup wrapper + window.log.warn('[avatarReupload] did not find corresponding conversation, or not us'); + + return RunJobResult.PermanentFailure; + } + const ourProfileLastUpdatedSeconds = await UserConfigWrapperActions.getProfileUpdatedSeconds(); + const currentMainPath = conversation.getAvatarInProfilePath(); + const avatarPointer = conversation.getAvatarPointer(); + const profileKey = conversation.getProfileKeyHex(); + const { fileId, fullUrl } = fileServerUrlToFileId(avatarPointer); + if (!currentMainPath || !avatarPointer || !profileKey || !fullUrl) { + // we do not have an avatar to reupload, nothing to do. + return RunJobResult.Success; + } + + try { + const currentAvatarDetails = await fetchLocalAvatarDetails(currentMainPath); + if (!currentAvatarDetails) { + return RunJobResult.RetryJobIfPossible; + } + const { decryptedAvatarData, metadata } = currentAvatarDetails; + + window.log.debug(`[avatarReupload] starting for ${ed25519Str(conversation.id)}`); + + if ( + ourProfileLastUpdatedSeconds !== 0 && + metadata.width <= maxAvatarDetails.maxSidePlanReupload && + metadata.height <= maxAvatarDetails.maxSidePlanReupload + ) { + const target = FS.fileUrlToFileTarget(fullUrl?.toString()); + window.log.debug( + `[avatarReupload] main avatar is already the right size for ${ed25519Str(conversation.id)} target:${target}` + ); + if (shouldSkipRenew({ ourProfileLastUpdatedSeconds })) { + // we don't want to call `renew` too often. Only once every 2hours (or more when the fsTTL30s feature is enabled) + window.log.debug( + `[avatarReupload] not trying to renew avatar for ${ed25519Str(conversation.id)} of file ${fileId} as we did one recently` + ); + // considering this to be a success + return RunJobResult.Success; + } + window.log.debug( + `[avatarReupload] renewing avatar on fs: ${target} for ${ed25519Str(conversation.id)} and file:${fileId}` + ); + const expiryRenewResult = await extendFileExpiry(fileId, target); + + if (expiryRenewResult) { + window.log.debug( + `[avatarReupload] expiry renew for ${ed25519Str(conversation.id)} of file:${fileId} on fs: ${target} was successful` + ); + + await UserConfigWrapperActions.getProfilePic(); + + await UserConfigWrapperActions.setReuploadProfilePic({ + key: from_hex(profileKey), + url: avatarPointer, + }); + + return RunJobResult.Success; + } + window.log.debug( + `[avatarReupload] expiry renew for ${ed25519Str(conversation.id)} of file:${fileId} on fs: ${target} failed` + ); + + // AUDRIC: expiry renew for (...efb27b5b) of file:Ff1CvAQIo1BXCeoV3DwTjYEzSoBPZW56FeExk8qij79h on fs: POTATO failed + // keep failing even whe it shouldnt + + if (shouldSkipReupload({ ourProfileLastUpdatedSeconds })) { + window.log.debug( + `[avatarReupload] ${ed25519Str(conversation.id)} last reupload was recent enough, so we don't want to reupload it` + ); + // considering this to be a success + return RunJobResult.Success; + } + // renew failed, and our last reupload was not too recent, so we want to reprocess and + // reupload our current avatar, see below... + } + + // here, + // - either the format or the size is wrong + // - or we do not have a ourProfileLastUpdatedSeconds yet + // - or the expiry renew failed and our last reupload not recent + // In all those cases, we want to reprocess our current avatar, and reupload it. + + window.log.info( + `[avatarReupload] about to auto scale avatar for convo ${ed25519Str(conversation.id)}` + ); + + conversation = ConvoHub.use().getOrThrow(convoId); + + // Reprocess the avatar content, and reupload it + // This will pick the correct file server depending on the env variables set. + const details = await uploadAndSetOurAvatarShared({ + decryptedAvatarData, + ourConvo: conversation, + context: 'reuploadAvatar', + }); + if (!details?.avatarPointer) { + window.log.warn( + `[avatarReupload] failed to reupload avatar for ${ed25519Str(conversation.id)}` + ); + throw new Error('details.avatarPointer is not valid after uploadAndSetOurAvatarShared'); + } + window.log.info( + `[avatarReupload] reupload done for ${ed25519Str(conversation.id)}: ${details?.avatarPointer}` + ); + return RunJobResult.Success; + } catch (e) { + window.log.warn(`[avatarReupload] failed with ${e.message}`); + return RunJobResult.RetryJobIfPossible; + } + } + + public serializeJob(): AvatarReuploadPersistedData { + return super.serializeBase(); + } + + public nonRunningJobsToRemove(_jobs: Array) { + return []; + } + + public addJobCheck(_jobs: Array): AddJobCheckReturn { + return null; + } + + public getJobTimeoutMs(): number { + return 10000; + } +} + +export const AvatarReupload = { + AvatarReuploadJob, + addAvatarReuploadJob, +}; diff --git a/ts/session/utils/job_runners/jobs/JobRunnerType.ts b/ts/session/utils/job_runners/jobs/JobRunnerType.ts index 46b9d0c0b1..58dd135748 100644 --- a/ts/session/utils/job_runners/jobs/JobRunnerType.ts +++ b/ts/session/utils/job_runners/jobs/JobRunnerType.ts @@ -6,6 +6,7 @@ export type JobRunnerType = | 'FakeSleepForJob' | 'FakeSleepForMultiJob' | 'AvatarDownloadJob' + | 'AvatarReuploadJob' | 'AvatarMigrateJob' | 'GroupInviteJob' | 'GroupPromoteJob' diff --git a/ts/session/utils/libsession/libsession_utils_contacts.ts b/ts/session/utils/libsession/libsession_utils_contacts.ts index e13509e5d4..5bb591ce53 100644 --- a/ts/session/utils/libsession/libsession_utils_contacts.ts +++ b/ts/session/utils/libsession/libsession_utils_contacts.ts @@ -62,7 +62,7 @@ async function insertContactFromDBIntoWrapperAndRefresh( const dbName = foundConvo.getRealSessionUsername() || undefined; const dbNickname = foundConvo.get('nickname'); const dbProfileUrl = foundConvo.getAvatarPointer() || undefined; - const dbProfileKey = foundConvo.getProfileKey() || undefined; + const dbProfileKey = foundConvo.getProfileKeyHex() || undefined; const dbApproved = !!foundConvo.get('isApproved'); const dbApprovedMe = !!foundConvo.get('didApproveMe'); const dbBlocked = foundConvo.isBlocked(); diff --git a/ts/state/ducks/metaGroups.ts b/ts/state/ducks/metaGroups.ts index 694843afc1..7194d0fda6 100644 --- a/ts/state/ducks/metaGroups.ts +++ b/ts/state/ducks/metaGroups.ts @@ -1000,7 +1000,7 @@ async function handleAvatarChangeFromUI({ const dataUnencrypted = await blobAvatarAlreadyScaled.arrayBuffer(); - const processed = await processAvatarData(dataUnencrypted); + const processed = await processAvatarData(dataUnencrypted, true); if (!processed) { throw new Error('Failed to process avatar'); @@ -1011,8 +1011,13 @@ async function handleAvatarChangeFromUI({ // encrypt the avatar data with the profile key const encryptedData = await encryptProfile(processed.mainAvatarDetails.outputBuffer, profileKey); - // TODO: we should store the expiries of the attachment somewhere in libsession I assume, and reupload as needed - const uploadedFileDetails = await uploadFileToFsWithOnionV4(encryptedData); + // Note: currently deterministic encryption is not supported for group's avatars + const deterministicEncryption = false; + + const uploadedFileDetails = await uploadFileToFsWithOnionV4( + encryptedData, + deterministicEncryption + ); if (!uploadedFileDetails || !uploadedFileDetails.fileUrl) { window?.log?.warn('File upload for groupv2 to file server failed'); throw new Error('File upload for groupv2 to file server failed'); @@ -1115,7 +1120,7 @@ async function handleClearAvatarFromUI({ groupPk }: WithGroupPubkey) { isNil(convo.getAvatarPointer()) && isNil(convo.getAvatarInProfilePath()) && isNil(convo.getFallbackAvatarInProfilePath()) && - isNil(convo.getProfileKey()) + isNil(convo.getProfileKeyHex()) ) { return; } diff --git a/ts/state/ducks/types/releasedFeaturesReduxTypes.ts b/ts/state/ducks/types/releasedFeaturesReduxTypes.ts index 65b54acadd..bb9a05d9d4 100644 --- a/ts/state/ducks/types/releasedFeaturesReduxTypes.ts +++ b/ts/state/ducks/types/releasedFeaturesReduxTypes.ts @@ -5,6 +5,7 @@ export type SessionFeatureFlags = { replaceLocalizedStringsWithKeys: boolean; // Hooks useOnionRequests: boolean; + useDeterministicEncryption: boolean; useTestNet: boolean; useLocalDevNet: string; useClosedGroupV2QAButtons: boolean; diff --git a/ts/state/ducks/user.ts b/ts/state/ducks/user.ts index 96ba1a71da..4be27fc057 100644 --- a/ts/state/ducks/user.ts +++ b/ts/state/ducks/user.ts @@ -2,7 +2,6 @@ import { isNil } from 'lodash'; import { createAsyncThunk, createSlice, type PayloadAction } from '@reduxjs/toolkit'; import { ConvoHub } from '../../session/conversations'; import { SyncUtils, UserUtils } from '../../session/utils'; -import { getSodiumRenderer } from '../../session/crypto'; import { uploadAndSetOurAvatarShared } from '../../interactions/avatar-interactions/nts-avatar-interactions'; import { ed25519Str } from '../../session/utils/String'; import { userSettingsModal, updateEditProfilePictureModal } from './modalDialog'; @@ -37,14 +36,9 @@ const updateOurAvatar = createAsyncThunk( return null; } - const sodium = await getSodiumRenderer(); - // Uploading a new avatar, we want to encrypt its data with a new key. - const profileKey = sodium.randombytes_buf(32); - const res = await uploadAndSetOurAvatarShared({ decryptedAvatarData: mainAvatarDecrypted, ourConvo, - profileKey, context: 'uploadNewAvatar', }); @@ -75,7 +69,7 @@ const clearOurAvatar = createAsyncThunk('user/clearOurAvatar', async () => { isNil(convo.getAvatarPointer()) && isNil(convo.getAvatarInProfilePath()) && isNil(convo.getFallbackAvatarInProfilePath()) && - isNil(convo.getProfileKey()) + isNil(convo.getProfileKeyHex()) ) { return; } diff --git a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts index a421517374..7bde96ea0c 100644 --- a/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts +++ b/ts/test/session/unit/decrypted_attachments/decryptedAttachmentsManager_test.ts @@ -61,10 +61,10 @@ describe('DecryptedAttachmentsManager', () => { 'decryptAttachmentBufferNode', new Uint8Array(5) ); - TestUtils.stubCreateObjectUrl(); }); it('url starts with attachment path but is not already decrypted', async () => { + TestUtils.stubUrlCreateObjectURL(); expect( DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') ).to.be.eq(null); @@ -89,6 +89,7 @@ describe('DecryptedAttachmentsManager', () => { }); it('url starts with attachment path and is already decrypted', async () => { + TestUtils.stubUrlCreateObjectURL(); expect( DecryptedAttachmentsManager.getAlreadyDecryptedMediaUrl('/local/attachment/attachment1') ).to.be.eq(null); diff --git a/ts/test/session/unit/messages/ChatMessage_test.ts b/ts/test/session/unit/messages/ChatMessage_test.ts index 9744a8dbbd..ae5436c556 100644 --- a/ts/test/session/unit/messages/ChatMessage_test.ts +++ b/ts/test/session/unit/messages/ChatMessage_test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai'; +import Sinon from 'sinon'; // eslint-disable-next-line import/order -import { TextEncoder } from 'util'; import { toNumber } from 'lodash'; import { SignalService } from '../../../../protobuf'; @@ -12,7 +12,7 @@ import { VisibleMessage, } from '../../../../session/messages/outgoing/visibleMessage/VisibleMessage'; import { DisappearingMessageMode } from '../../../../session/disappearing_messages/types'; -import { OutgoingUserProfile } from '../../../../types/message'; +import { TestUtils } from '../../../test-utils'; const sharedNoExpire = { expirationType: DisappearingMessageMode[0], @@ -20,6 +20,9 @@ const sharedNoExpire = { }; describe('VisibleMessage', () => { + afterEach(() => { + Sinon.restore(); + }); it('can create empty message with just a timestamp', () => { const message = new VisibleMessage({ createAtNetworkTimestamp: Date.now(), @@ -84,35 +87,8 @@ describe('VisibleMessage', () => { ); }); - it('can create message with a full loki profile', () => { - const profileKey = new TextEncoder().encode('profileKey'); - - const lokiProfile = { - displayName: 'displayName', - avatarPointer: 'avatarPointer', - profileKey, - updatedAtSeconds: 1, - }; - const message = new VisibleMessage({ - createAtNetworkTimestamp: Date.now(), - userProfile: new OutgoingUserProfile(lokiProfile), - ...sharedNoExpire, - }); - const plainText = message.plainTextBuffer(); - const decoded = SignalService.Content.decode(plainText); - expect(decoded.dataMessage).to.have.deep.property('profile'); - - expect(decoded.dataMessage) - .to.have.property('profile') - .to.have.deep.property('displayName', 'displayName'); - expect(decoded.dataMessage) - .to.have.property('profile') - .to.have.deep.property('profilePicture', 'avatarPointer'); - expect(decoded.dataMessage).to.have.deep.property('profileKey', profileKey); - }); - it('can create message with a quote without attachments', () => { - const quote: Quote = { id: 1234, author: 'author', text: 'text' }; + const quote: Quote = { id: 1234, author: 'author' }; const message = new VisibleMessage({ createAtNetworkTimestamp: Date.now(), quote, @@ -124,7 +100,6 @@ describe('VisibleMessage', () => { const decodedID = toNumber(decoded.dataMessage?.quote?.id); expect(decodedID).to.be.equal(1234); expect(decoded.dataMessage?.quote).to.have.deep.property('author', 'author'); - expect(decoded.dataMessage?.quote).to.have.deep.property('text', 'text'); }); it('can create message with a preview', () => { @@ -148,6 +123,9 @@ describe('VisibleMessage', () => { }); it('can create message with an AttachmentPointer', () => { + TestUtils.stubWindowFeatureFlags(); + TestUtils.stubURLCanParse(); + const attachment: AttachmentPointerWithUrl = { url: 'http://thisisaareal/url/1234', contentType: 'contentType', diff --git a/ts/test/session/unit/messages/MessageRequestResponse_test.ts b/ts/test/session/unit/messages/MessageRequestResponse_test.ts index 184e879d85..ac81ed0407 100644 --- a/ts/test/session/unit/messages/MessageRequestResponse_test.ts +++ b/ts/test/session/unit/messages/MessageRequestResponse_test.ts @@ -1,13 +1,19 @@ import { expect } from 'chai'; +import { from_hex } from 'libsodium-wrappers-sumo'; import { v4 } from 'uuid'; +import Sinon from 'sinon'; import { SignalService } from '../../../../protobuf'; import { Constants } from '../../../../session'; import { MessageRequestResponse } from '../../../../session/messages/outgoing/controlMessage/MessageRequestResponse'; import { OutgoingUserProfile } from '../../../../types/message'; +import { TestUtils } from '../../../test-utils'; describe('MessageRequestResponse', () => { let message: MessageRequestResponse | undefined; + afterEach(() => { + Sinon.restore(); + }); it('correct ttl', () => { message = new MessageRequestResponse({ createAtNetworkTimestamp: Date.now(), @@ -67,8 +73,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: null, + profilePic: { url: null, key: null }, updatedAtSeconds: 1, }), }); @@ -85,8 +90,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: new Uint8Array(), + profilePic: null, updatedAtSeconds: 1, }), }); @@ -100,36 +104,32 @@ describe('MessageRequestResponse', () => { }); it('can create response with display name and profileKey and profileImage', () => { + TestUtils.stubURLCanParse(); + + const userProfile = new OutgoingUserProfile({ + displayName: 'Jane', + profilePic: { + url: 'http://filev2.getsession.org/file/abcdefghijklmnop', + key: from_hex('0102030405060102030405060102030401020304050601020304050601020304'), + }, + updatedAtSeconds: 1, + }); message = new MessageRequestResponse({ createAtNetworkTimestamp: Date.now(), - userProfile: new OutgoingUserProfile({ - displayName: 'Jane', - avatarPointer: 'https://somevalidurl.com', - profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]), - updatedAtSeconds: 1, - }), + userProfile, }); const plainText = message.plainTextBuffer(); const decoded = SignalService.Content.decode(plainText); expect(decoded.messageRequestResponse?.profile?.displayName).to.be.deep.eq('Jane'); - expect(decoded.messageRequestResponse?.profileKey).to.be.not.empty; - - if (!decoded.messageRequestResponse?.profileKey?.buffer) { - throw new Error('decoded.messageRequestResponse?.profileKey?.buffer should be set'); - } expect(decoded.messageRequestResponse?.profile?.profilePicture).to.be.eq( - 'https://somevalidurl.com' + 'http://filev2.getsession.org/file/abcdefghijklmnop' ); // don't ask me why deep.eq ([1,2,3, ...]) gives nothing interesting but a 8192 buffer not matching - expect(decoded.messageRequestResponse?.profileKey.length).to.be.eq(6); - expect(decoded.messageRequestResponse?.profileKey[0]).to.be.eq(1); - expect(decoded.messageRequestResponse?.profileKey[1]).to.be.eq(2); - expect(decoded.messageRequestResponse?.profileKey[2]).to.be.eq(3); - expect(decoded.messageRequestResponse?.profileKey[3]).to.be.eq(4); - expect(decoded.messageRequestResponse?.profileKey[4]).to.be.eq(5); - expect(decoded.messageRequestResponse?.profileKey[5]).to.be.eq(6); + expect(decoded.messageRequestResponse?.profileKey).to.be.deep.eq( + from_hex('0102030405060102030405060102030401020304050601020304050601020304') + ); }); it('profileKey not included if profileUrl not set', () => { @@ -137,8 +137,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: null, - profileKey: new Uint8Array([1, 2, 3, 4, 5, 6]), + profilePic: { url: null, key: new Uint8Array([1, 2, 3, 4, 5, 6]) }, updatedAtSeconds: 1, }), }); @@ -160,8 +159,7 @@ describe('MessageRequestResponse', () => { createAtNetworkTimestamp: Date.now(), userProfile: new OutgoingUserProfile({ displayName: 'Jane', - avatarPointer: 'https://somevalidurl.com', - profileKey: null, + profilePic: { url: 'https://somevalidurl.com', key: null }, updatedAtSeconds: 1, }), }); diff --git a/ts/test/session/unit/sending/MessageSender_test.ts b/ts/test/session/unit/sending/MessageSender_test.ts index 37f0371bc6..0047cb5796 100644 --- a/ts/test/session/unit/sending/MessageSender_test.ts +++ b/ts/test/session/unit/sending/MessageSender_test.ts @@ -21,7 +21,6 @@ import { TestUtils } from '../../../test-utils'; import { TypedStub, expectAsyncToThrow, - stubCreateObjectUrl, stubData, stubUtilWorker, stubValidSnodeSwarm, @@ -319,7 +318,6 @@ describe('MessageSender', () => { 'X-SOGS-Signature': 'gYqpWZX6fnF4Gb2xQM3xaXs0WIYEI49+B8q4mUUEg8Rw0ObaHUWfoWjMHMArAtP9QlORfiydsKWz1o6zdPVeCQ==', }); - stubCreateObjectUrl(); Sinon.stub(OpenGroupMessageV2, 'fromJson').resolves(); }); diff --git a/ts/test/test-utils/utils/stubbing.ts b/ts/test/test-utils/utils/stubbing.ts index 1f583e116e..aa72517c41 100644 --- a/ts/test/test-utils/utils/stubbing.ts +++ b/ts/test/test-utils/utils/stubbing.ts @@ -62,11 +62,30 @@ export function stubUserGroupWrapper { +export function stubURLCanParse() { + if (!URL.canParse) { + URL.canParse = () => true; + } + Sinon.stub(URL, 'canParse').callsFake((url, base) => { + try { + // eslint-disable-next-line no-new + new URL(url, base); + return true; + } catch { + return false; + } + }); +} + +export function stubUrlCreateObjectURL() { + if (!URL.createObjectURL) { + URL.createObjectURL = () => { + return `${Date.now()}:${Math.floor(Math.random() * 1000)}`; + }; + } + Sinon.stub(URL, 'createObjectURL').callsFake(() => { return `${Date.now()}:${Math.floor(Math.random() * 1000)}`; - }; + }); } type WindowValue = Partial | undefined; diff --git a/ts/types/Attachment.ts b/ts/types/Attachment.ts index 3b11f14f1f..0aa7101336 100644 --- a/ts/types/Attachment.ts +++ b/ts/types/Attachment.ts @@ -33,7 +33,7 @@ export type AttachmentThumbnail = { path?: string; }; -export interface AttachmentType { +export type AttachmentType = { contentType: MIME.MIMEType; fileName: string; /** For messages not already on disk, this will be a data url */ @@ -50,13 +50,13 @@ export interface AttachmentType { videoUrl?: string; /** Not included in protobuf, needs to be pulled from flags */ isVoiceMessage?: boolean; -} +}; -export interface AttachmentTypeWithPath extends AttachmentType { +export type AttachmentTypeWithPath = AttachmentType & { path: string; flags?: number; error?: any; -} +}; // UI-focused functions @@ -116,7 +116,7 @@ export function getThumbnailUrl(attachment: AttachmentType): string { return attachment?.thumbnail?.url || getUrl(attachment); } -export function getUrl(attachment: AttachmentType): string { +export function getUrl(attachment: Pick): string { return attachment?.screenshot?.url || attachment.url; } @@ -129,7 +129,7 @@ export function isImage(attachments?: Array) { ); } -export function isImageAttachment(attachment: AttachmentType): boolean { +export function isImageAttachment(attachment: Pick): boolean { return Boolean( attachment && attachment.contentType && isImageTypeSupported(attachment.contentType) ); @@ -143,7 +143,7 @@ export function isVideo(attachments?: Array): boolean { return Boolean(attachments && isVideoAttachment(attachments[0])); } -export function isVideoAttachment(attachment?: AttachmentType): boolean { +export function isVideoAttachment(attachment?: Pick): boolean { return Boolean( !!attachment && !!attachment.contentType && isVideoTypeSupported(attachment.contentType) ); @@ -190,7 +190,9 @@ export function getImageDimensionsInAttachment(attachment: AttachmentType): Dime }; } -export function areAllAttachmentsVisual(attachments?: Array): boolean { +export function areAllAttachmentsVisual( + attachments?: Array> +): boolean { if (!attachments) { return false; } diff --git a/ts/types/MessageAttachment.ts b/ts/types/MessageAttachment.ts index 8df234c107..ad757445b9 100644 --- a/ts/types/MessageAttachment.ts +++ b/ts/types/MessageAttachment.ts @@ -118,35 +118,6 @@ export const loadPreviewData = async (preview: any): Promise> => { ]; }; -export const loadQuoteData = async (quote: any) => { - if (!quote) { - return null; - } - if (!quote.attachments?.length || isEmpty(quote.attachments[0])) { - return quote; - } - - const quotedFirstAttachment = await quote.attachments[0]; - - const { thumbnail } = quotedFirstAttachment; - - if (!thumbnail || !thumbnail.path) { - return { - ...quote, - attachments: [quotedFirstAttachment], - }; - } - const quotedAttachmentWithThumbnail = { - ...quotedFirstAttachment, - thumbnail: await loadAttachmentData(thumbnail), - }; - - return { - ...quote, - attachments: [quotedAttachmentWithThumbnail], - }; -}; - /** * Any `data: ArrayBuffer` provided here must first have been oriented to the * right orientation using one of the ImageProcessor functions. @@ -216,7 +187,9 @@ const migrateDataToFileSystem = async (data?: ArrayBuffer) => { const isValidData = isArrayBuffer(data); if (!isValidData) { - throw new TypeError(`Expected ${data} to be an array buffer got: ${typeof data}`); + throw new TypeError( + `migrateDataToFileSystem: Expected 'data' to be an array buffer got: ${typeof data}` + ); } const path = await writeNewAttachmentData(data); diff --git a/ts/types/attachments/VisualAttachment.ts b/ts/types/attachments/VisualAttachment.ts index 04d12c7cea..ef58e5b419 100644 --- a/ts/types/attachments/VisualAttachment.ts +++ b/ts/types/attachments/VisualAttachment.ts @@ -14,9 +14,9 @@ import { getFeatureFlag } from '../../state/ducks/types/releasedFeaturesReduxTyp import { processAvatarData } from '../../util/avatar/processAvatarData'; import type { ProcessedAvatarDataType } from '../../webworker/workers/node/image_processor/image_processor'; import { ImageProcessor } from '../../webworker/workers/browser/image_processor_interface'; -import { maxThumbnailDetails } from '../../util/attachment/attachmentSizes'; +import { maxAvatarDetails, maxThumbnailDetails } from '../../util/attachment/attachmentSizes'; -export const THUMBNAIL_CONTENT_TYPE = 'image/png'; +export const THUMBNAIL_CONTENT_TYPE = 'image/webp'; export const urlToBlob = async (dataUrl: string) => { return (await fetch(dataUrl)).blob(); @@ -186,21 +186,6 @@ export const revokeObjectUrl = (objectUrl: string) => { URL.revokeObjectURL(objectUrl); }; -async function autoScaleAvatarBlob(file: File): Promise { - try { - const arrayBuffer = await file.arrayBuffer(); - const processed = await processAvatarData(arrayBuffer); - return processed; - } catch (e) { - ToastUtils.pushToastError( - 'pickFileForAvatar', - `An error happened while picking/resizing the image: "${e.message?.slice(200) || ''}"` - ); - window.log.error(e); - return null; - } -} - async function pickFileForReal() { const acceptedImages = ['.png', '.gif', '.jpeg', '.jpg']; if (getFeatureFlag('proAvailable')) { @@ -225,11 +210,14 @@ async function pickFileForReal() { } async function pickFileForTestIntegration() { - const blueAvatarDetails = await ImageProcessor.testIntegrationFakeAvatar(500, { - r: 0, - g: 0, - b: 255, - }); + const blueAvatarDetails = await ImageProcessor.testIntegrationFakeAvatar( + maxAvatarDetails.maxSidePlanReupload, + { + r: 0, + g: 0, + b: 255, + } + ); const file = new File([blueAvatarDetails.outputBuffer], 'testIntegrationFakeAvatar.jpeg', { type: blueAvatarDetails.format, }); @@ -239,8 +227,27 @@ async function pickFileForTestIntegration() { /** * Shows the system file picker for images, scale the image down for avatar/opengroup measurements and return the blob objectURL on success */ -export async function pickFileForAvatar(): Promise { +export async function pickFileForAvatar( + processingCb: (isProcessing: boolean) => void +): Promise { const file = isTestIntegration() ? await pickFileForTestIntegration() : await pickFileForReal(); - return autoScaleAvatarBlob(file); + try { + processingCb(true); + const arrayBuffer = await file.arrayBuffer(); + // pickFileForAvatar is only used for avatars we want to be able to reupload (ourselves or 03-groups) + const processed = await processAvatarData(arrayBuffer, true); + return processed; + } catch (e) { + ToastUtils.pushToastError( + 'pickFileForAvatar', + `An error happened while picking/resizing the image: "${ + e.message.slice(0, e.message.indexOf('\n')).slice(0, 200) || '' + }"` + ); + window.log.error(e); + return null; + } finally { + processingCb(false); + } } diff --git a/ts/types/message/index.ts b/ts/types/message/index.ts index c082a76ca1..ace10637be 100644 --- a/ts/types/message/index.ts +++ b/ts/types/message/index.ts @@ -5,31 +5,29 @@ import { isEmpty, isString, isTypedArray } from 'lodash'; import { MessageAttributes } from '../../models/messageType'; import { SignalService } from '../../protobuf'; import { Timestamp } from '../timestamp/timestamp'; +import { addProfileKeyToUrl, extractDetailsFromUrlFragment } from '../../session/url'; +// Note: this removes the profile key from the url on purpose, +// as we do not want to share this design with the other platforms +// (and we do not want to send it as part of the URL). function extractPicDetailsFromUrl(src: string | null): ProfilePicture { - if (!src) { + if (!src || !URL.canParse(src)) { return { url: null, key: null }; } - const urlParts = src.split('#'); - if (urlParts.length !== 2) { - throw new Error('extractPicDetailsFromUrl url does not contain a profileKey'); - } - const url = urlParts[0]; - const key = urlParts[1]; - + const { profileKey: key, urlWithoutProfileKey } = extractDetailsFromUrlFragment(new URL(src)); // throwing here, as if src is not empty we expect a key to be set if (!isEmpty(key) && !isString(key)) { throw new Error('extractPicDetailsFromUrl: profileKey is set but not a string'); } // throwing here, as if src is not empty we expect an url to be set - if (!isEmpty(url) && !isString(url)) { + if (!isEmpty(urlWithoutProfileKey) && !isString(urlWithoutProfileKey)) { throw new Error('extractPicDetailsFromUrl: avatarPointer is set but not a string'); } - if (!url || !key) { + if (!urlWithoutProfileKey || !key) { // this shouldn't happen, but we check it anyway return { url: null, key: null }; } - return { url, key: from_hex(key) }; + return { url: urlWithoutProfileKey, key: from_hex(key) }; } class OutgoingUserProfile { @@ -48,10 +46,8 @@ class OutgoingUserProfile { }: { displayName: string; updatedAtSeconds: number; - } & ( - | { picUrlWithProfileKey: string | null } - | { profileKey: Uint8Array | string | null; avatarPointer: string | null } - )) { + profilePic: ProfilePicture | null; + }) { if (!isString(displayName)) { throw new Error('displayName is not a string'); } @@ -61,33 +57,16 @@ class OutgoingUserProfile { value: updatedAtSeconds, expectedUnit: 'seconds', }); - if ('picUrlWithProfileKey' in args) { - this.initFromPicWithUrl(args.picUrlWithProfileKey); - } else { - this.initFromPicDetails(args); - } + this.initFromPicDetails(args.profilePic); } - private initFromPicWithUrl(picUrlWithProfileKey: string | null) { - if (!picUrlWithProfileKey) { + private initFromPicDetails(args: ProfilePicture | null) { + if (!args) { this.picUrlWithProfileKey = null; return; } - // this throws if the url is not valid - // or if the fields cannot be extracted - extractPicDetailsFromUrl(picUrlWithProfileKey); - - this.picUrlWithProfileKey = picUrlWithProfileKey; - } - - private initFromPicDetails({ - profileKey: profileKeyIn, - avatarPointer, - }: { - profileKey: Uint8Array | string | null; - avatarPointer: string | null; - }) { - if (!profileKeyIn && !avatarPointer) { + const { key: profileKeyIn, url: avatarPointer } = args; + if (!profileKeyIn || !avatarPointer) { this.picUrlWithProfileKey = null; return; } @@ -107,39 +86,39 @@ class OutgoingUserProfile { this.picUrlWithProfileKey = null; return; } + if (profileKey) { - this.picUrlWithProfileKey = `${avatarPointer}#${to_hex(profileKey)}`; + const withProfileKey = addProfileKeyToUrl(new URL(avatarPointer), to_hex(profileKey)); + this.picUrlWithProfileKey = withProfileKey.toString(); } else { this.picUrlWithProfileKey = avatarPointer; } } + /** + * @returns the profile picture with the key in a Uint8Array format. + * Note: this removes the profile key from the url on purpose, + * as we do not want to share this design with the other platforms. + */ public toProfilePicture(): ProfilePicture { return extractPicDetailsFromUrl(this.picUrlWithProfileKey); } + /** + * This function is used to get the profile picture with the key in hex format. + * Note: this removes the profile key from the url on purpose, + * as we do not want to share this design with the other platforms. + */ public toHexProfilePicture() { - const details = extractPicDetailsFromUrl(this.picUrlWithProfileKey); + const details = this.toProfilePicture(); return { url: details.url, key: details.key ? to_hex(details.key) : null }; } - public isEmpty(): boolean { - return !this.displayName && !this.picUrlWithProfileKey; - } - - private emptyProtobufDetails() { - // Note: profileKey: undefined is not allowed by protobuf - return { profile: undefined }; - } - - public getUpdatedAtSeconds(): number { - return this.lastProfileUpdateTs.seconds(); - } - - public getUpdatedAtMs(): number { - return this.lastProfileUpdateTs.ms(); - } - + /** + * This function is used to get the protobuf details that needs to be added to an outgoing message. + * The URL of the profile picture will be stripped of the profile key if it was set on purpose, see `toProfilePicture()`. + * The URL of the profile picture will also be stripped of the serverPk, if the fileServer used is the default one. + */ public toProtobufDetails(): Partial> { if (this.isEmpty()) { return this.emptyProtobufDetails(); @@ -160,6 +139,23 @@ class OutgoingUserProfile { // no profileKey provided here return { profile }; } + + public isEmpty(): boolean { + return !this.displayName && !this.picUrlWithProfileKey; + } + + private emptyProtobufDetails() { + // Note: profileKey: undefined is not allowed by protobuf + return { profile: undefined }; + } + + public getUpdatedAtSeconds(): number { + return this.lastProfileUpdateTs.seconds(); + } + + public getUpdatedAtMs(): number { + return this.lastProfileUpdateTs.ms(); + } } export function longOrNumberToNumber(value: number | Long): number { diff --git a/ts/util/attachment/attachmentSizes.ts b/ts/util/attachment/attachmentSizes.ts index 21548a2454..7934f624a4 100644 --- a/ts/util/attachment/attachmentSizes.ts +++ b/ts/util/attachment/attachmentSizes.ts @@ -1,13 +1,19 @@ -import { FILESIZE } from '../../session/constants'; - export const ATTACHMENT_DEFAULT_MAX_SIDE = 4096; export const maxAvatarDetails = { - maxSide: 200, - maxSize: 5 * FILESIZE.MB, + /** + * 600 px + */ + maxSidePlanReupload: 600, + /** + * 200 px + */ + maxSideNoReuploadRequired: 200, }; export const maxThumbnailDetails = { + /** + * 200 px + */ maxSide: 200, - maxSize: 200 * 1000, // 200 ko }; diff --git a/ts/util/avatar/processAvatarData.ts b/ts/util/avatar/processAvatarData.ts index 021b14e3ce..38aec32a48 100644 --- a/ts/util/avatar/processAvatarData.ts +++ b/ts/util/avatar/processAvatarData.ts @@ -1,6 +1,5 @@ import { isArrayBuffer } from 'lodash'; import { ImageProcessor } from '../../webworker/workers/browser/image_processor_interface'; -import { maxAvatarDetails } from '../attachment/attachmentSizes'; import { MAX_ATTACHMENT_FILESIZE_BYTES } from '../../session/constants'; /** @@ -11,8 +10,17 @@ import { MAX_ATTACHMENT_FILESIZE_BYTES } from '../../session/constants'; * - mainAvatarDetails will be animated (webp enforced) if the source was animated, or a jpeg of the original image * - avatarFallback will be an image (jpeg enforced) of the first frame of `mainAvatarDetails` if it was animated, or null * + * There is a specific case for the avatars that we need to be able to reupload, + * as we do want to keep a resolution of 600 x 600 instead of the usual 200 x 200. + * + * This is because we need to be able to reupload our full avatar to the file server, and mobile pixel density can be 3x. + * We still want to reduce incoming avatars to 200 x 200 for performance reasons. */ -export async function processAvatarData(arrayBuffer: ArrayBuffer) { +export async function processAvatarData( + arrayBuffer: ArrayBuffer, + planForReupload: boolean, + remoteChange = false +) { if (!arrayBuffer || arrayBuffer.byteLength === 0 || !isArrayBuffer(arrayBuffer)) { throw new Error('processAvatarData: arrayBuffer is empty'); } @@ -23,7 +31,11 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { * 2. a fallback avatar in case the user looses its pro (static image, even if the main avatar is animated) */ // this is step 1, we generate a scaled down avatar, but keep its nature (animated or not) - const processed = await ImageProcessor.processAvatarData(arrayBuffer, maxAvatarDetails.maxSide); + const processed = await ImageProcessor.processAvatarData( + arrayBuffer, + planForReupload, + remoteChange + ); if (!processed) { throw new Error('processLocalAvatarChange: failed to process avatar'); @@ -32,17 +44,12 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { const { mainAvatarDetails, avatarFallback } = processed; // sanity check the returned data - if (mainAvatarDetails.isAnimated && mainAvatarDetails.format !== 'webp') { - throw new Error( - 'processLocalAvatarChange: we only support animated mainAvatarDetails in webp after conversion' - ); - } - // sanity check the returned data - if (!mainAvatarDetails.isAnimated && mainAvatarDetails.format !== 'jpeg') { + if (mainAvatarDetails.format !== 'webp' && mainAvatarDetails.format !== 'gif') { throw new Error( - 'processLocalAvatarChange: we only support non animated mainAvatarDetails in jpeg after conversion' + 'processLocalAvatarChange: we only support animated mainAvatarDetails in webp or gif after conversion' ); } + if (mainAvatarDetails.isAnimated && !avatarFallback) { throw new Error( 'processLocalAvatarChange: we only support animated mainAvatarDetails with fallback after conversion' @@ -50,16 +57,14 @@ export async function processAvatarData(arrayBuffer: ArrayBuffer) { } // sanity check the returned data - if (avatarFallback && avatarFallback.format !== 'jpeg') { + if (avatarFallback && avatarFallback.format !== 'webp') { throw new Error( 'processLocalAvatarChange: we only support avatarFallback in jpeg after conversion' ); } if (mainAvatarDetails.size >= MAX_ATTACHMENT_FILESIZE_BYTES) { - throw new Error( - 'processLocalAvatarChange: mainAvatarDetails size is too big after conversion (bigger than fs limit' - ); + throw new Error('Provided image is too big after conversion. Please use another image.'); } return { mainAvatarDetails, avatarFallback }; diff --git a/ts/util/crypto/attachmentsEncrypter.ts b/ts/util/crypto/attachmentsEncrypter.ts index dce29b1170..153848347d 100644 --- a/ts/util/crypto/attachmentsEncrypter.ts +++ b/ts/util/crypto/attachmentsEncrypter.ts @@ -103,10 +103,10 @@ export async function encryptAttachment( } if (keys.byteLength !== 64) { - throw new Error('Got invalid length attachment keys'); + throw new Error(`Got invalid length attachment keys: ${keys.byteLength}`); } if (iv.byteLength !== 16) { - throw new Error('Got invalid length attachment iv'); + throw new Error(`Got invalid length attachment iv: ${iv.byteLength}`); } const aesKey = keys.slice(0, 32); const macKey = keys.slice(32, 64); diff --git a/ts/util/i18n/shared.ts b/ts/util/i18n/shared.ts index ce29da39ee..04a5563eb8 100644 --- a/ts/util/i18n/shared.ts +++ b/ts/util/i18n/shared.ts @@ -20,6 +20,29 @@ export function getTimeLocaleDictionary() { return (timeLocaleMap as Record)[getBrowserLocale()] || timeLocaleMap.en; } +/** + * Some users have a locale setup with a ':' in it. + * When this happens, the full locale (i.e. the part with the "_") can sometimes be on the left, sometimes on the right. + * This function will always return the part with the "_" if found, otherwise the full locale. + */ +export function keepFullLocalePart(locale: string) { + const firstUnderscore = locale.indexOf('_'); + const firstDash = locale.indexOf('-'); + const firstSemiColon = locale.indexOf(':'); + if (firstSemiColon === -1) { + return locale; + } + const firstUnderscoreOrDash = Math.max(firstUnderscore, firstDash); + + if (firstSemiColon > firstUnderscoreOrDash) { + // the semicolon is after the underscore, so we return the start of the string (de_DE:en we return de_DE) + return locale.substring(0, firstSemiColon); + } + + // the semicolon is before the underscore, so we return the end of the string (in en:de_DE wew return de_DE) + return locale.substring(firstSemiColon + 1); +} + /** * Returns the current locale as supported by Session (i.e. one generated by crowdin) */ @@ -36,7 +59,7 @@ export function getCrowdinLocale(): CrowdinLocale { * Returns the closest supported locale by the browser. */ export function getBrowserLocale() { - const browserLocale = process.env.LANGUAGE || getCrowdinLocale() || 'en'; + const browserLocale = keepFullLocalePart(process.env.LANGUAGE || getCrowdinLocale() || 'en'); // supportedLocalesOf will throw if the locales has a '_' instead of a '-' in it. const userLocaleDashed = browserLocale.replaceAll('_', '-'); diff --git a/ts/webworker/worker_interface.ts b/ts/webworker/worker_interface.ts index 496d8b2006..856e0adc6e 100644 --- a/ts/webworker/worker_interface.ts +++ b/ts/webworker/worker_interface.ts @@ -43,12 +43,10 @@ export class WorkerInterface { const { resolve, reject, fnName } = job; if (errorForDisplay) { - // eslint:disable: no-console - window?.log?.error(`Error received from worker job ${jobId} (${fnName}):`, errorForDisplay); - return reject( - new Error(`Error received from worker job ${jobId} (${fnName}): ${errorForDisplay}`) - ); + // Note: don't wrap this with a prefix as we want to be able to show what was the error as is to the user in a toast. + // If you want to add something, add it at the end. + return reject(new Error(errorForDisplay)); } return resolve(result); diff --git a/ts/webworker/workers/browser/image_processor_interface.ts b/ts/webworker/workers/browser/image_processor_interface.ts index fab226af69..82f06cd5b8 100644 --- a/ts/webworker/workers/browser/image_processor_interface.ts +++ b/ts/webworker/workers/browser/image_processor_interface.ts @@ -35,10 +35,6 @@ async function callImageProcessorWorker( } export const ImageProcessor: ImageProcessorWorkerActions = { - extractFirstFrameJpeg: ( - ...args: Parameters - ) => callImageProcessorWorker('extractFirstFrameJpeg', ...args), - imageMetadata: (...args: Parameters) => callImageProcessorWorker('imageMetadata', ...args), diff --git a/ts/webworker/workers/browser/libsession_worker_interface.ts b/ts/webworker/workers/browser/libsession_worker_interface.ts index e61ebb4d4a..2177f3717d 100644 --- a/ts/webworker/workers/browser/libsession_worker_interface.ts +++ b/ts/webworker/workers/browser/libsession_worker_interface.ts @@ -185,10 +185,6 @@ export const UserConfigWrapperActions: UserConfigWrapperActionsCalls = { callLibSessionWorker(['UserConfig', 'getProfileUpdatedSeconds']) as Promise< ReturnType >, - getProfilePicWithKeyHex: async () => - callLibSessionWorker(['UserConfig', 'getProfilePicWithKeyHex']) as Promise< - ReturnType - >, getEnableBlindedMsgRequest: async () => callLibSessionWorker(['UserConfig', 'getEnableBlindedMsgRequest']) as Promise< ReturnType @@ -818,6 +814,16 @@ export const MultiEncryptWrapperActions: MultiEncryptActionsCalls = { callLibSessionWorker(['MultiEncrypt', 'multiDecryptEd25519', args]) as Promise< ReturnType >, + + attachmentDecrypt: async args => + callLibSessionWorker(['MultiEncrypt', 'attachmentDecrypt', args]) as Promise< + ReturnType + >, + + attachmentEncrypt: async args => + callLibSessionWorker(['MultiEncrypt', 'attachmentEncrypt', args]) as Promise< + ReturnType + >, }; export const allKnownEncryptionDomains: Array = ['SessionGroupKickedMessage']; diff --git a/ts/webworker/workers/node/image_processor/image_processor.d.ts b/ts/webworker/workers/node/image_processor/image_processor.d.ts index fff25ebea9..2124bd8d1f 100644 --- a/ts/webworker/workers/node/image_processor/image_processor.d.ts +++ b/ts/webworker/workers/node/image_processor/image_processor.d.ts @@ -12,18 +12,21 @@ type WithOutputBuffer = { outputBuffer: ArrayBufferLike }; type WithCustomSharpFormat = { format: T }; -type WithImageFormat = WithCustomSharpFormat & { +type WithImageFormat = WithCustomSharpFormat & { contentType: `image/${T}`; }; -type WithJpegFormat = WithImageFormat<'jpeg'>; -type WithPngFormat = WithImageFormat<'png'>; type WithWebpFormat = WithImageFormat<'webp'>; +type WithGifFormat = WithImageFormat<'gif'>; /** * The output of a always static output image. */ -type StaticOutputType = WithOutputBuffer & WithSharpSize & WithSharpWidth & WithSharpHeight; +type StaticOutputType = WithOutputBuffer & + WithSharpSize & + WithSharpWidth & + WithSharpHeight & + WithSharpFormat; /** * Can be animated or not. Another `With` will be needed to express the type of the content. @@ -39,23 +42,30 @@ export type ProcessedLinkPreviewThumbnailType = NonNullable< >; export type ImageProcessorWorkerActions = { - extractFirstFrameJpeg: ( - input: ArrayBufferLike - ) => Promise<(StaticOutputType & WithJpegFormat) | null>; - /** - * Process an avatar change. + * Process an avatar. Depending on if we want this to be reuploaded or not, we allow gif as a return format or not. + * The reason is that when we plan for reupload, we don't **always** convert gif to webp, as we might want to keep it as gif. + * We will try to convert an input gif to webp, but if it takes too long or the resulting file size is too big, we will just use the original gif. + * When the change is not planned for reupload, we convert everything to a webp. * This function will generate a mainAvatar, and a fallbackAvatar if needed. * - * The mainAvatar can be animated or not. If animated it is a webp, if not it is a jpeg. - * The fallbackAvatar, if set, is a always a jpeg. + * The mainAvatar can be animated or not. + * - If animated it is an animated gif or webp, + * - If not, it is a static webp (always). + * The fallbackAvatar, if set, is always a static webp. + * + * planForReupload must be true for + * - our own avatar (changed by the current user, locally or not) + * - our own avatar (automatic reupload) + * - (later: for a groupv2 avatar: locally or not and on reupload, even if we are not an admin (as we might become one) */ processAvatarData: ( input: ArrayBufferLike, - maxSidePx: number + planForReupload: boolean, + remoteChange: boolean ) => Promise<{ - mainAvatarDetails: MaybeAnimatedOutputType & (WithWebpFormat | WithJpegFormat); - avatarFallback: (StaticOutputType & WithJpegFormat) | null; + mainAvatarDetails: Omit & WithImageFormat<'gif' | 'webp'>; + avatarFallback: (StaticOutputType & WithWebpFormat) | null; } | null>; /** @@ -65,21 +75,19 @@ export type ImageProcessorWorkerActions = { processForLinkPreviewThumbnail: ( input: ArrayBufferLike, maxSidePx: number - ) => Promise<(StaticOutputType & WithPngFormat) | null>; + ) => Promise<(StaticOutputType & WithWebpFormat) | null>; /** * Process an image to get a thumbnail matching our required details for in conversation thumbnails * This is about the thumbnail in the conversation list (for attachments in messages). We generate a preview to avoid loading huge files until we show them in fullscreen. * - * Note: animated or not, an image will always be returned as a png. - * Note: eventually we want to support animated images as previews too. When we do, we will need to - * convert them to webp and resize their preview heavily for performance reasons. - * A 'in conversation thumbnail' is always resized to "cover". + * Note: animated or not, an thumbnail will always be returned as a static webp currently. + * A 'in conversation thumbnail' is always resized to "cover" and enlarged if it was smaller than maxSidePx. */ processForInConversationThumbnail: ( input: ArrayBufferLike, maxSidePx: number - ) => Promise<(MaybeAnimatedOutputType & WithPngFormat) | null>; + ) => Promise<(Omit & WithWebpFormat) | null>; /** * Process an image to get something that we can upload to the file server. @@ -114,7 +122,7 @@ export type ImageProcessorWorkerActions = { input: ArrayBufferLike, maxSidePx: number, maxSizeBytes: number - ) => Promise; + ) => Promise; /** * Utility function to generate a fake avatar for testing purposes. @@ -123,7 +131,7 @@ export type ImageProcessorWorkerActions = { testIntegrationFakeAvatar: ( maxSidePx: number, background: { r: number; g: number; b: number } // { r: 0, g: 0, b: 255 } for fully blue - ) => Promise; + ) => Promise & WithWebpFormat>; /** * Extract the metadata retrieved from the image. diff --git a/ts/webworker/workers/node/image_processor/image_processor.worker.ts b/ts/webworker/workers/node/image_processor/image_processor.worker.ts index 1926675ed1..cc668f29ef 100644 --- a/ts/webworker/workers/node/image_processor/image_processor.worker.ts +++ b/ts/webworker/workers/node/image_processor/image_processor.worker.ts @@ -1,6 +1,10 @@ -import { isEmpty, isFinite, isNumber } from 'lodash'; +import { isBuffer, isEmpty, isFinite, isNumber } from 'lodash'; import sharp from 'sharp'; -import type { ImageProcessorWorkerActions } from './image_processor'; +import type { + ImageProcessorWorkerActions, + StaticOutputType, + WithWebpFormat, +} from './image_processor'; /* eslint-disable no-console */ /* eslint-disable strict */ @@ -12,6 +16,31 @@ function logIfOn(...args: Array) { } } +/** + * iOS allows 5 seconds for converting images, and 2s for resizing. + * We can't separate those two without making addition copies, so we use a timeout of 7s. + */ +const defaultTimeoutProcessingSeconds = 7; + +/** + * This is the default of sharp, but better to have it explicit in case they (or we) want to change it. + */ +const webpDefaultQuality = 80; + +/** + * Duplicated to be used in the worker environment + */ +const maxAvatarDetails = { + /** + * 600 px + */ + maxSidePlanReupload: 600, + /** + * 200 px + */ + maxSideNoReuploadRequired: 200, +}; + onmessage = async (e: any) => { const [jobId, fnName, ...args] = e.data; @@ -54,11 +83,18 @@ function isAnimated(metadata: sharp.Metadata) { return (metadata.pages || 0) > 1; // more than 1 frame means that the image is animated } -function centerCoverOpts(maxSidePx: number) { +function centerCoverOpts({ + maxSidePx, + withoutEnlargement, +}: { + maxSidePx: number; + withoutEnlargement: boolean; +}) { return { height: maxSidePx, width: maxSidePx, fit: 'cover' as const, // a thumbnail we generate should contain the source image + withoutEnlargement, }; } @@ -91,69 +127,403 @@ function metadataToFrameHeight(metadata: sharp.Metadata) { * * Note: this will also orient a jpeg if needed. (i.e. calls rotate() through sharpFrom) * Note: metadata height will be set to the frame height, not the full height - * of the canvas (as sharp.metadata does with animated webps) + * of the canvas (as sharp.metadata does with animated webp) */ async function metadataFromBuffer( inputBuffer: ArrayBufferLike | Buffer, + rethrow = false, options?: sharp.SharpOptions ) { + // Note: this might throw and we want to allow the error to be forwarded to the user if that happens. + // A toast will display the error try { const metadata = await sharpFrom(inputBuffer, options).metadata(); const frameHeight = metadataToFrameHeight(metadata); - // we do need the await above so the try/catch does its job return { ...metadata, height: frameHeight }; } catch (e) { - console.info('metadataFromBuffer failed with', e.message); + if (rethrow) { + throw e; + } return null; } } -const workerActions: ImageProcessorWorkerActions = { - extractFirstFrameJpeg: async inputBuffer => { - if (!inputBuffer?.byteLength) { - throw new Error('inputBuffer is required'); - } - const inputMetadata = await metadataFromBuffer(inputBuffer); - if (!inputMetadata) { - return null; - } +async function extractFirstFrameWebp( + inputBuffer: ArrayBufferLike +): Promise<(StaticOutputType & WithWebpFormat) | null> { + if (!inputBuffer?.byteLength) { + throw new Error('inputBuffer is required'); + } + const inputMetadata = await metadataFromBuffer(inputBuffer); + if (!inputMetadata) { + return null; + } - metadataSizeIsSetOrThrow(inputMetadata, 'extractFirstFrameJpeg'); + metadataSizeIsSetOrThrow(inputMetadata, 'extractFirstFrameWebp'); - if (!isAnimated(inputMetadata)) { - throw new Error('extractFirstFrameJpeg: input is not animated'); - } + if (!isAnimated(inputMetadata)) { + throw new Error('extractFirstFrameWebp: input is not animated'); + } - const parsed = sharpFrom(inputBuffer, { pages: 1 }); - const jpeg = parsed.jpeg(); - const outputBuffer = await jpeg.toBuffer(); - const outputMetadata = await metadataFromBuffer(outputBuffer); - if (!outputMetadata) { - return null; - } + const webp = sharpFrom(inputBuffer, { pages: 1 }) + .resize( + centerCoverOpts({ + // Note: the extracted avatar fallback is never used for reupload + maxSidePx: maxAvatarDetails.maxSideNoReuploadRequired, + withoutEnlargement: true, + }) + ) + .webp({ quality: webpDefaultQuality }); + + const outputBuffer = await webp.toBuffer(); + const outputMetadata = await metadataFromBuffer(outputBuffer); + if (!outputMetadata) { + return null; + } + + const outputMetadataSize = metadataSizeIsSetOrThrow(outputMetadata, 'extractFirstFrameWebp'); + + if (isAnimated(outputMetadata)) { + throw new Error('extractFirstFrameWebp: outputMetadata cannot be animated'); + } - const outputMetadataSize = metadataSizeIsSetOrThrow(outputMetadata, 'extractFirstFrameJpeg'); + return { + outputBuffer: outputBuffer.buffer, + width: outputMetadata.width, + height: outputMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` + size: outputMetadataSize, + format: 'webp' as const, + contentType: 'image/webp' as const, + }; +} + +async function extractAvatarFallback({ + resizedBuffer, + avatarIsAnimated, +}: { + resizedBuffer: ArrayBufferLike; + avatarIsAnimated: boolean; +}) { + if (!avatarIsAnimated) { + return null; + } + const firstFrameWebp = await extractFirstFrameWebp(resizedBuffer); + if (!firstFrameWebp) { + throw new Error('extractAvatarFallback: failed to extract first frame as webp'); + } + // the fallback (static image out of an animated one) is always a webp + const fallbackFormat = 'webp' as const; + + if ( + firstFrameWebp.height > maxAvatarDetails.maxSideNoReuploadRequired || + firstFrameWebp.width > maxAvatarDetails.maxSideNoReuploadRequired + ) { + throw new Error( + 'extractAvatarFallback: fallback image is too big. Have you provided the correct resizedBuffer?' + ); + } - if (isAnimated(outputMetadata)) { - throw new Error('extractFirstFrameJpeg: outputMetadata cannot be animated'); + return { + outputBuffer: firstFrameWebp.outputBuffer, + height: firstFrameWebp.height, // this one is only the frame height already. No need for `metadataToFrameHeight` + width: firstFrameWebp.width, + format: fallbackFormat, + contentType: `image/${fallbackFormat}` as const, + size: firstFrameWebp.size, + }; +} + +async function extractMainAvatarDetails({ + isSourceGif, + planForReupload, + resizedBuffer, + resizedMetadata, +}: { + resizedBuffer: ArrayBufferLike; + resizedMetadata: sharp.Metadata; + planForReupload: boolean; + isSourceGif: boolean; +}) { + const resizedIsAnimated = isAnimated(resizedMetadata); + const resizedMetadataSize = metadataSizeIsSetOrThrow(resizedMetadata, 'extractMainAvatarDetails'); + + return { + outputBuffer: resizedBuffer, + height: resizedMetadata.height, + width: resizedMetadata.width, + isAnimated: resizedIsAnimated, + format: planForReupload && isSourceGif ? ('gif' as const) : ('webp' as const), + contentType: planForReupload && isSourceGif ? ('image/gif' as const) : ('image/webp' as const), + size: resizedMetadataSize, + }; +} + +async function sleepFor(ms: number) { + return new Promise(resolve => { + setTimeout(resolve, ms); + }); +} + +async function processPlanForReuploadAvatar({ + inputBuffer, + remoteChange, +}: { + inputBuffer: ArrayBufferLike; + remoteChange: boolean; +}) { + const start = Date.now(); + + const metadata = await metadataFromBuffer(inputBuffer, true, { animated: true }); + if (!metadata) { + return null; + } + + /** + * This is not pretty, but when we download our own avatar from the network and we didn't set it locally, + * we need to make sure a reupload will be planned if required. + * What this means is that, if we get an avatar of size 640 from the network we should plan for a reupload. + * But, if we resize it here to 600, the AvatarReuploadJob will be skipped as the avatar is already the correct size. + * As a hack, we add 1 pixel to the size required when this is a remote change, so that the AvatarReuploadJob will be triggered. + * + * Note: We do not upscale the file if it's already smaller than 600px, so a reupload won't be triggered if a device set an avatar to 600 already. + */ + const sizeRequired = remoteChange + ? maxAvatarDetails.maxSidePlanReupload + 1 + : maxAvatarDetails.maxSidePlanReupload; + const avatarIsAnimated = isAnimated(metadata); + + if (avatarIsAnimated && metadata.format !== 'webp' && metadata.format !== 'gif') { + throw new Error('processPlanForReuploadAvatar: we only support animated images in webp or gif'); + } + + // When planning for reupload, the rules about gif/webp are quite different that when not planning for reupload. + // Essentially, we want to try to resize a gif to webp, but if it takes too long or the resulting file size is too big, we will just use the original gif. + const isSourceGif = metadata.format === 'gif'; + if ( + metadata.width <= sizeRequired && + metadata.height <= sizeRequired && + metadata.format === 'webp' + ) { + // It appears this avatar is already small enough and of the correct format, so we don't want to resize it. + // We still want to extract the first frame of the animated avatar, if it is animated though. + + // also extract the first frame of the resized (animated) avatar + const avatarFallback = await extractAvatarFallback({ + resizedBuffer: inputBuffer, + avatarIsAnimated, + }); + const mainAvatarDetails = await extractMainAvatarDetails({ + resizedBuffer: inputBuffer, // we can just reuse the input buffer here as the dimensions and format are correct + resizedMetadata: metadata, + planForReupload: true, + isSourceGif, + }); + + logIfOn( + `[imageProcessorWorker] processPlanForReuploadAvatar sizes (already correct sizes & format): main: ${inputBuffer.byteLength} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` + ); + + return { + mainAvatarDetails, + avatarFallback, + }; + } + const resizeOpts = centerCoverOpts({ + maxSidePx: sizeRequired, + withoutEnlargement: true, + }); + + let awaited: any; + // if the avatar was animated, we want an animated webp. + // if it was static, we want a static webp. + if (isSourceGif) { + logIfOn( + `[imageProcessorWorker] src is gif, trying to convert to webp with timeout of ${defaultTimeoutProcessingSeconds}s` + ); + // See the comment in image_processor.d.ts: + // We want to try to convert a gif to webp, but if it takes too long or the resulting file size is too big, we will just use the original gif. + awaited = await Promise.race([ + sharpFrom(inputBuffer, { animated: true }).resize(resizeOpts).webp().toBuffer(), + sleepFor(defaultTimeoutProcessingSeconds * 1000), // it seems that timeout is not working as expected in sharp --' + ]); + if (awaited && isBuffer(awaited)) { + logIfOn( + `[imageProcessorWorker] processPlanForReuploadAvatar: gif conversion took ${Date.now() - start}ms for ${awaited.byteLength} bytes` + ); + } else { + logIfOn(`[imageProcessorWorker] processPlanForReuploadAvatar: gif conversion failed`); } + } else { + // when not planning for reupload, we always want a webp, and no timeout for that + awaited = await sharpFrom(inputBuffer, { animated: true }) + .resize(resizeOpts) + .webp({ quality: webpDefaultQuality }) + .toBuffer(); + logIfOn( + `[imageProcessorWorker] always webp conversion took ${Date.now() - start}ms for ${awaited.byteLength} bytes` + ); + } + + if (isSourceGif && (!isBuffer(awaited) || awaited.byteLength > inputBuffer.byteLength)) { + logIfOn( + `[imageProcessorWorker] isSourceGif & gif conversion failed, using original gif without resize` + ); + // we failed to process the gif fast enough, or the resulting webp is bigger than the original gif. Fallback to the original gif. + awaited = Buffer.from(inputBuffer); + } + + if (!isBuffer(awaited)) { + throw new Error('Image processing failed for an unknown reason'); + } + + const resizedBuffer = awaited as Buffer; + + // Note: we need to use the resized buffer here, not the original one, + // as metadata is always linked to the source buffer (even if a resize() is done before the metadata call) + const resizedMetadata = await metadataFromBuffer(resizedBuffer); + + if (!resizedMetadata) { + return null; + } + + const resizedMetadataSize = metadataSizeIsSetOrThrow( + resizedMetadata, + 'processPlanForReuploadAvatar' + ); + + logIfOn( + `[imageProcessorWorker] processPlanForReuploadAvatar mainAvatar resize took ${Date.now() - start}ms for ${inputBuffer.byteLength} bytes` + ); + + const resizedIsAnimated = isAnimated(resizedMetadata); + + // also extract the first frame of the resized (animated) avatar + const avatarFallback = await extractAvatarFallback({ + resizedBuffer: resizedBuffer.buffer, + avatarIsAnimated: resizedIsAnimated, + }); + + logIfOn( + `[imageProcessorWorker] processPlanForReuploadAvatar sizes: main: ${resizedMetadataSize} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` + ); + const mainAvatarDetails = await extractMainAvatarDetails({ + resizedBuffer: resizedBuffer.buffer, + resizedMetadata, + planForReupload: true, + isSourceGif, + }); + + return { + mainAvatarDetails, + avatarFallback, + }; +} + +async function processNoPlanForReuploadAvatar({ inputBuffer }: { inputBuffer: ArrayBufferLike }) { + const start = Date.now(); + const sizeRequired = maxAvatarDetails.maxSideNoReuploadRequired; + const metadata = await metadataFromBuffer(inputBuffer, false, { animated: true }); + + if (!metadata) { + return null; + } + const avatarIsAnimated = isAnimated(metadata); + + if (avatarIsAnimated && metadata.format !== 'webp' && metadata.format !== 'gif') { + throw new Error( + 'processNoPlanForReuploadAvatar: we only support animated images in webp or gif' + ); + } + // Not planning for reupload. We always generate a webp instead for the main avatar. + if ( + metadata.width <= sizeRequired && + metadata.height <= sizeRequired && + metadata.format === 'webp' + ) { + // It appears this avatar is already small enough and of the correct format, so we don't want to resize it. + // We still want to extract the first frame of the animated avatar, if it is animated though. + + // also extract the first frame of the resized (animated) avatar + const avatarFallback = await extractAvatarFallback({ + resizedBuffer: inputBuffer, + avatarIsAnimated, + }); + const mainAvatarDetails = await extractMainAvatarDetails({ + resizedBuffer: inputBuffer, // we can just reuse the input buffer here as the dimensions and format are correct + resizedMetadata: metadata, + planForReupload: false, + isSourceGif: false, + }); + + logIfOn( + `[imageProcessorWorker] processNoPlanForReuploadAvatar sizes (already correct sizes): main: ${inputBuffer.byteLength} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` + ); return { - outputBuffer: outputBuffer.buffer, - width: outputMetadata.width, - height: outputMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` - size: outputMetadataSize, - format: 'jpeg' as const, - contentType: 'image/jpeg' as const, + mainAvatarDetails, + avatarFallback, }; - }, + } + + // generate a square image of the avatar, scaled down or up to `maxSide` + const resized = sharpFrom(inputBuffer, { animated: true }).resize( + centerCoverOpts({ + maxSidePx: sizeRequired, + withoutEnlargement: true, + }) + ); + + // when not planning for reupload, we always want a webp for the main avatar (and we do not care about how long that takes) + const resizedBuffer = await resized.webp({ quality: webpDefaultQuality }).toBuffer(); + + // Note: we need to use the resized buffer here, not the original one, + // as metadata is always linked to the source buffer (even if a resize() is done before the metadata call) + const resizedMetadata = await metadataFromBuffer(resizedBuffer); + + if (!resizedMetadata) { + return null; + } + + const resizedMetadataSize = metadataSizeIsSetOrThrow( + resizedMetadata, + 'processNoPlanForReuploadAvatar' + ); + + logIfOn( + `[imageProcessorWorker] processNoPlanForReuploadAvatar mainAvatar resize took ${Date.now() - start}ms for ${inputBuffer.byteLength} bytes` + ); + + const resizedIsAnimated = isAnimated(resizedMetadata); + + // also extract the first frame of the resized (animated) avatar + const avatarFallback = await extractAvatarFallback({ + resizedBuffer: resizedBuffer.buffer, + avatarIsAnimated: resizedIsAnimated, + }); + + logIfOn( + `[imageProcessorWorker] processNoPlanForReuploadAvatar sizes: main: ${resizedMetadataSize} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` + ); + const mainAvatarDetails = await extractMainAvatarDetails({ + resizedBuffer: resizedBuffer.buffer, + resizedMetadata, + planForReupload: false, + isSourceGif: false, // we always generate a webp here so we do not care if the src was a gif. + }); + + return { + mainAvatarDetails, + avatarFallback, + }; +} +const workerActions: ImageProcessorWorkerActions = { imageMetadata: async inputBuffer => { if (!inputBuffer?.byteLength) { throw new Error('imageMetadata: inputBuffer is required'); } - const metadata = await metadataFromBuffer(inputBuffer, { animated: true }); + const metadata = await metadataFromBuffer(inputBuffer, false, { animated: true }); if (!metadata) { return null; @@ -170,90 +540,19 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processAvatarData: async (inputBuffer: ArrayBufferLike, maxSidePx: number) => { + processAvatarData: async ( + inputBuffer: ArrayBufferLike, + planForReupload: boolean, + remoteChange: boolean + ) => { if (!inputBuffer?.byteLength) { throw new Error('processAvatarData: inputBuffer is required'); } - const start = Date.now(); - - const metadata = await metadataFromBuffer(inputBuffer, { animated: true }); - if (!metadata) { - return null; - } - - const avatarIsAnimated = isAnimated(metadata); - if (avatarIsAnimated && metadata.format !== 'webp' && metadata.format !== 'gif') { - throw new Error('processAvatarData: we only support animated images in webp or gif'); + if (planForReupload) { + return await processPlanForReuploadAvatar({ inputBuffer, remoteChange }); } - - // generate a square image of the avatar, scaled down or up to `maxSide` - - const resized = sharpFrom(inputBuffer, { animated: true }).resize(centerCoverOpts(maxSidePx)); - - // we know the avatar is animated and gif or webp, force it to webp for performance reasons - if (avatarIsAnimated) { - resized.webp(); - } else { - resized.jpeg(); - } - - const resizedBuffer = await resized.toBuffer(); - - // Note: we need to use the resized buffer here, not the original one, - // as metadata is always linked to the source buffer (even if a resize() is done before the metadata call) - const resizedMetadata = await metadataFromBuffer(resizedBuffer); - - if (!resizedMetadata) { - return null; - } - - const resizedMetadataSize = metadataSizeIsSetOrThrow(resizedMetadata, 'processAvatarData'); - - logIfOn( - `[imageProcessorWorker] processAvatarData mainAvatar resize took ${Date.now() - start}ms for ${inputBuffer.byteLength} bytes` - ); - - const resizedIsAnimated = isAnimated(resizedMetadata); - - const formatDetails = avatarIsAnimated - ? { format: 'webp' as const, contentType: 'image/webp' as const } - : { format: 'jpeg' as const, contentType: 'image/jpeg' as const }; - - const mainAvatarDetails = { - outputBuffer: resizedBuffer.buffer, - height: resizedMetadata.height, - width: resizedMetadata.width, - isAnimated: resizedIsAnimated, - ...formatDetails, - size: resizedMetadataSize, - }; - - let avatarFallback = null; - - if (resizedIsAnimated) { - // also extract the first frame of the resized (animated) avatar - const firstFrameJpeg = await workerActions.extractFirstFrameJpeg(resizedBuffer.buffer); - if (!firstFrameJpeg) { - throw new Error('processAvatarData: failed to extract first frame as jpeg'); - } - const fallbackFormat = 'jpeg' as const; - - avatarFallback = { - outputBuffer: firstFrameJpeg.outputBuffer, - height: firstFrameJpeg.height, // this one is only the frame height already. No need for `metadataToFrameHeight` - width: firstFrameJpeg.width, - format: fallbackFormat, - contentType: `image/${fallbackFormat}` as const, - size: firstFrameJpeg.size, - }; - } - - logIfOn( - `[imageProcessorWorker] processAvatarData sizes: main: ${mainAvatarDetails.size} bytes, fallback: ${avatarFallback ? avatarFallback.size : 0} bytes` - ); - - return { mainAvatarDetails, avatarFallback }; + return await processNoPlanForReuploadAvatar({ inputBuffer }); }, testIntegrationFakeAvatar: async ( @@ -267,7 +566,7 @@ const workerActions: ImageProcessorWorkerActions = { channels: 3, // RGB background, }, - }).jpeg({ quality: 90 }); + }).webp({ quality: webpDefaultQuality }); const createdBuffer = await created.toBuffer(); const createdMetadata = await metadataFromBuffer(createdBuffer); @@ -279,7 +578,7 @@ const workerActions: ImageProcessorWorkerActions = { const size = metadataSizeIsSetOrThrow(createdMetadata, 'testIntegrationFakeAvatar'); - const format = 'jpeg' as const; + const format = 'webp' as const; return { outputBuffer: createdBuffer.buffer, height: createdMetadata.height, // this one is only the frame height already, no need for `metadataToFrameHeight` @@ -297,7 +596,7 @@ const workerActions: ImageProcessorWorkerActions = { } const parsed = sharpFrom(inputBuffer, { animated: false }); - const metadata = await metadataFromBuffer(inputBuffer, { animated: false }); + const metadata = await metadataFromBuffer(inputBuffer, false, { animated: false }); if (!metadata) { return null; @@ -305,9 +604,10 @@ const workerActions: ImageProcessorWorkerActions = { metadataSizeIsSetOrThrow(metadata, 'processForLinkPreviewThumbnail'); - const resized = parsed.resize(centerCoverOpts(maxSidePx)); + // for thumbnail, we actually want to enlarge the image if required + const resized = parsed.resize(centerCoverOpts({ maxSidePx, withoutEnlargement: false })); - const resizedBuffer = await resized.png().toBuffer(); + const resizedBuffer = await resized.webp({ quality: webpDefaultQuality }).toBuffer(); const resizedMetadata = await metadataFromBuffer(resizedBuffer); if (!resizedMetadata) { @@ -316,7 +616,7 @@ const workerActions: ImageProcessorWorkerActions = { const resizedSize = metadataSizeIsSetOrThrow(resizedMetadata, 'processForLinkPreviewThumbnail'); - const format = 'png' as const; + const format = 'webp' as const; return { outputBuffer: resizedBuffer.buffer, @@ -328,21 +628,33 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processForInConversationThumbnail: async (inputBuffer, maxSidePx) => { + processForInConversationThumbnail: async (inputBuffer: ArrayBufferLike, maxSidePx: number) => { if (!inputBuffer?.byteLength) { throw new Error('processForInConversationThumbnail: inputBuffer is required'); } - // Note: this is false here because we want to force a static image (so no need to extract all the frames) - const parsed = sharpFrom(inputBuffer, { animated: false }).resize(centerCoverOpts(maxSidePx)); - const metadata = await metadataFromBuffer(inputBuffer, { animated: false }); + // Note: this `animated` is false here because we want to force a static image (so no need to extract all the frames) + const parsed = sharpFrom(inputBuffer, { animated: false }).resize( + centerCoverOpts({ maxSidePx, withoutEnlargement: false }) // We actually want to enlarge the image if required for a thumbnail in conversation + ); + const metadata = await metadataFromBuffer(inputBuffer, false, { animated: false }); if (!metadata) { return null; } const animated = isAnimated(metadata); - const resizedBuffer = await parsed.png().toBuffer(); // animated ? await parsed.webp().toBuffer() : ; + + const awaited = await Promise.race([ + parsed.webp({ quality: webpDefaultQuality }).toBuffer(), + sleepFor(defaultTimeoutProcessingSeconds * 1000), // it seems that timeout is not working as expected in sharp --' + ]); + + if (!isBuffer(awaited)) { + throw new Error('Image processing timed out'); + } + + const resizedBuffer = awaited as Buffer; const resizedMetadata = await metadataFromBuffer(resizedBuffer); if (!resizedMetadata) { @@ -351,7 +663,7 @@ const workerActions: ImageProcessorWorkerActions = { const size = metadataSizeIsSetOrThrow(resizedMetadata, 'processForInConversationThumbnail'); - const formatDetails = { format: 'png' as const, contentType: 'image/png' as const }; + const formatDetails = { format: 'webp' as const, contentType: 'image/webp' as const }; return { outputBuffer: resizedBuffer.buffer, @@ -363,13 +675,17 @@ const workerActions: ImageProcessorWorkerActions = { }; }, - processForFileServerUpload: async (inputBuffer, maxSidePx, maxSizeBytes) => { + processForFileServerUpload: async ( + inputBuffer: ArrayBufferLike, + maxSidePx: number, + maxSizeBytes: number + ) => { if (!inputBuffer?.byteLength) { throw new Error('processForFileServerUpload: inputBuffer is required'); } const lossyFormats = ['jpeg', 'webp', 'avif']; const start = Date.now(); - const metadata = await metadataFromBuffer(inputBuffer); + const metadata = await metadataFromBuffer(inputBuffer, false); if ( !metadata || @@ -449,9 +765,7 @@ const workerActions: ImageProcessorWorkerActions = { // if we can't get a picture with a quality of more than 30, consider it a failure and return null const qualityRange = [95, 85, 75, 55, 30] as const; - let qualityRangeIndex = 0; - while (qualityRangeIndex < qualityRange.length) { - const quality = qualityRange[qualityRangeIndex]; + for (const quality of qualityRange) { const pipeline = base.clone(); switch (metadata.format) { @@ -469,11 +783,11 @@ const workerActions: ImageProcessorWorkerActions = { } // eslint-disable-next-line no-await-in-loop - const buffer = await pipeline.toBuffer(); + const buffer = await pipeline.toBuffer(); // no timeout here for now if (buffer.length < maxSizeBytes) { // eslint-disable-next-line no-await-in-loop - const outputMetadata = await metadataFromBuffer(buffer); + const outputMetadata = await metadataFromBuffer(buffer, false); if (!outputMetadata) { return null; @@ -502,7 +816,7 @@ const workerActions: ImageProcessorWorkerActions = { }; } logIfOn( - `[imageProcessorWorker] processForFileServerUpload: iteration[${qualityRangeIndex}] took so far ${ + `[imageProcessorWorker] processForFileServerUpload: took so far ${ Date.now() - start }ms with quality ${quality}` ); @@ -510,7 +824,6 @@ const workerActions: ImageProcessorWorkerActions = { `\t src${formattedMetadata({ width: metadata.width, height: metadata.height, format: metadata.format, size: inputBuffer.byteLength })} ` ); } - qualityRangeIndex++; logIfOn( `[imageProcessorWorker] processForFileServerUpload: failed to get a buffer of size ${maxSizeBytes} for ${inputBuffer.byteLength} bytes for image of ${metadata.width}x${metadata.height} with format ${metadata.format}` diff --git a/yarn.lock b/yarn.lock index 984617a9ad..cf8dfae051 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5121,9 +5121,9 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -"libsession_util_nodejs@https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz": - version "0.5.8" - resolved "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.8/libsession_util_nodejs-v0.5.8.tar.gz#5bd0bd24782813bb5f67eee767f7840d42bce93b" +"libsession_util_nodejs@https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz": + version "0.5.9" + resolved "https://github.com/session-foundation/libsession-util-nodejs/releases/download/v0.5.9/libsession_util_nodejs-v0.5.9.tar.gz#33248ecab095c8b1223c3582890d9708731a3ab8" dependencies: cmake-js "7.3.1" node-addon-api "^8.3.1" @@ -5720,9 +5720,9 @@ node-addon-api@^8.3.1: integrity sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A== node-api-headers@^1.1.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.5.0.tgz#73a0bab642c77e39b815b6d24ad4c6b56f695912" - integrity sha512-Yi/FgnN8IU/Cd6KeLxyHkylBUvDTsSScT0Tna2zTrz8klmc8qF2ppj6Q1LHsmOueJWhigQwR4cO2p0XBGW5IaQ== + version "1.6.0" + resolved "https://registry.yarnpkg.com/node-api-headers/-/node-api-headers-1.6.0.tgz#88ad7fb817b44e33baba81f92ae1566bda462d0c" + integrity sha512-81T99+mWLZnxX0LlZPYuafyFlxVVaWKQ0BDAbSrOqLO+v+gzCzu0GTAVNeVK8lucqjqo9L/1UcK9cpkem8Py4Q== node-api-version@^0.2.0: version "0.2.0"